├── .gitignore ├── hott ├── Bool_HoTT.thy ├── List_HoTT.thy ├── Propositions.thy ├── Univalence.thy ├── Nat.thy ├── Equivalence.thy └── Equivalence2.thy ├── .github ├── isabelle-action │ ├── entrypoint.sh │ ├── action.yml │ └── Dockerfile └── workflows │ └── build.yml ├── ROOT ├── mltt ├── core │ ├── cases.ML │ ├── elimination.ML │ ├── calc.ML │ ├── implicits.ML │ ├── context_facts.ML │ ├── elaboration.ML │ ├── types.ML │ ├── focus.ML │ ├── lib.ML │ ├── tactics.ML │ ├── goals.ML │ ├── context_tactical.ML │ ├── eqsubst.ML │ ├── comp.ML │ ├── elaborated_statement.ML │ └── MLTT.thy └── lib │ ├── Maybe.thy │ ├── Prelude.thy │ └── List.thy ├── README.md └── LICENSE /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | *.bak 3 | \#*.thy# 4 | \#*.ML# 5 | 6 | -------------------------------------------------------------------------------- /hott/Bool_HoTT.thy: -------------------------------------------------------------------------------- 1 | theory Bool_HoTT 2 | imports 3 | MLTT.Prelude 4 | Propositions 5 | 6 | begin 7 | 8 | end 9 | -------------------------------------------------------------------------------- /.github/isabelle-action/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | chmod +x /Isabelle/bin/isabelle 4 | sh -c "/Isabelle/bin/isabelle $INPUT_TOOL_ARGS" 5 | 6 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: build 2 | on: push 3 | 4 | jobs: 5 | build: 6 | name: build ROOT 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v1 10 | - uses: ./.github/isabelle-action 11 | with: 12 | TOOL_ARGS: "build -D $GITHUB_WORKSPACE" 13 | 14 | -------------------------------------------------------------------------------- /.github/isabelle-action/action.yml: -------------------------------------------------------------------------------- 1 | name: "Isabelle action" 2 | description: "Run Isabelle" 3 | author: "josh@joshchen.io" 4 | 5 | inputs: 6 | TOOL_ARGS: 7 | description: "Isabelle tool to invoke and arguments to pass" 8 | required: true 9 | 10 | runs: 11 | using: "docker" 12 | image: "Dockerfile" 13 | 14 | -------------------------------------------------------------------------------- /hott/List_HoTT.thy: -------------------------------------------------------------------------------- 1 | theory List_HoTT 2 | imports 3 | MLTT.List 4 | Nat 5 | 6 | begin 7 | 8 | section \Length\ 9 | 10 | definition [implicit]: "len \ ListRec {} Nat 0 (fn _ _ rec. suc rec)" 11 | 12 | experiment begin 13 | Lemma "len [] \ ?n" by (subst comp; typechk?)+ 14 | Lemma "len [0, suc 0, suc (suc 0)] \ ?n" by (subst comp; typechk?)+ 15 | end 16 | 17 | 18 | end 19 | -------------------------------------------------------------------------------- /hott/Propositions.thy: -------------------------------------------------------------------------------- 1 | theory Propositions 2 | imports 3 | MLTT.Prelude 4 | Equivalence 5 | 6 | begin 7 | 8 | Definition isProp: ":= \x y: A. x = y" 9 | where "A: U i" by typechk 10 | 11 | Definition isSet: ":=\x y: A. \p q: x = y. p = q" 12 | where "A: U i" by typechk 13 | 14 | Theorem isProp_Top: "isProp \" 15 | unfolding isProp_def 16 | by (intros, elims, refl) 17 | 18 | Theorem isProp_Bot: "isProp \" 19 | unfolding isProp_def 20 | by (intros, elim) 21 | 22 | 23 | end 24 | -------------------------------------------------------------------------------- /.github/isabelle-action/Dockerfile: -------------------------------------------------------------------------------- 1 | ## Isabelle2020 Dockerfile for GitHub Actions 2 | 3 | FROM ubuntu 4 | SHELL ["/bin/bash", "-c"] 5 | 6 | # Install packages 7 | ENV DEBIAN_FRONTEND=noninteractive 8 | RUN apt-get -y update &&\ 9 | apt-get install -y curl less libfontconfig1 libgomp1 libwww-perl rlwrap unzip &&\ 10 | apt-get clean 11 | 12 | # Set up Isabelle 13 | ADD https://isabelle.in.tum.de/dist/Isabelle2021_linux.tar.gz Isabelle.tar.gz 14 | RUN tar xzf Isabelle.tar.gz &&\ 15 | mv Isabelle2021 Isabelle &&\ 16 | perl -pi -e 's,ISABELLE_HOME_USER=.*,ISABELLE_HOME_USER="\$HOME/.isabelle",g;' Isabelle/etc/settings &&\ 17 | perl -pi -e 's,ISABELLE_LOGIC=.*,ISABELLE_LOGIC=Pure,g;' Isabelle/etc/settings &&\ 18 | Isabelle/bin/isabelle build -o system_heaps -b Pure &&\ 19 | rm Isabelle.tar.gz 20 | 21 | # Set up entrypoint 22 | COPY entrypoint.sh /entrypoint.sh 23 | RUN chmod +x /entrypoint.sh 24 | 25 | ENTRYPOINT ["/entrypoint.sh"] 26 | 27 | -------------------------------------------------------------------------------- /ROOT: -------------------------------------------------------------------------------- 1 | session MLTT_Core in "mltt/core" = Pure + 2 | description 3 | "Core MLTT: minimal dependent type theory based on intensional Martin-Löf 4 | type theory with cumulative Russell-style universes, Pi types and Sigma 5 | types." 6 | sessions 7 | "HOL-Eisbach" 8 | theories 9 | MLTT (global) 10 | 11 | session MLTT in mltt = MLTT_Core + 12 | description 13 | "Dependent type theory based on MLTT_Core." 14 | directories 15 | lib 16 | theories 17 | Prelude 18 | Maybe 19 | List 20 | 21 | session HoTT in hott = MLTT + 22 | description 23 | "Homotopy type theory, following the development in 24 | The Univalent Foundations Program, 25 | Homotopy Type Theory: Univalent Foundations of Mathematics, 26 | Institute for Advanced Study, (2013). 27 | Available online at https://homotopytypetheory.org/book." 28 | theories 29 | Identity 30 | Equivalence 31 | Nat 32 | Propositions 33 | Univalence 34 | Bool_HoTT 35 | List_HoTT 36 | -------------------------------------------------------------------------------- /hott/Univalence.thy: -------------------------------------------------------------------------------- 1 | theory Univalence 2 | imports Equivalence 3 | 4 | begin 5 | 6 | declare Ui_in_USi [form] 7 | 8 | Definition univalent_U: ":= \ A B: U i. \ p: A = B. is_biinv (idtoeqv p)" 9 | by (typechk; rule U_lift)+ 10 | 11 | axiomatization univalence where 12 | univalence: "\i. univalence i: univalent_U i" 13 | 14 | Lemma (def) idtoeqv_is_qinv: 15 | assumes "A: U i" "B: U i" "p: A = B" 16 | shows "is_qinv (idtoeqv p)" 17 | by (rule is_qinv_if_is_biinv) (rule univalence[unfolded univalent_U_def]) 18 | 19 | Definition ua: ":= fst (idtoeqv_is_qinv i A B p)" 20 | where "A: U i" "B: U i" "p: A = B" 21 | by typechk 22 | 23 | definition ua_i ("ua") 24 | where [implicit]: "ua p \ Univalence.ua {} {} {} p" 25 | 26 | Definition ua_idtoeqv [folded ua_def]: ":= fst (snd (idtoeqv_is_qinv i A B p))" 27 | where "A: U i" "B: U i" "p: A = B" 28 | by typechk 29 | 30 | Definition idtoeqv_ua [folded ua_def]: ":= snd (snd (idtoeqv_is_qinv i A B p))" 31 | where "A: U i" "B: U i" "p: A = B" 32 | by typechk 33 | 34 | 35 | end 36 | -------------------------------------------------------------------------------- /mltt/core/cases.ML: -------------------------------------------------------------------------------- 1 | (* Title: cases.ML 2 | Author: Joshua Chen 3 | 4 | Case reasoning. 5 | *) 6 | 7 | structure Case: sig 8 | 9 | val rules: Proof.context -> thm list 10 | val lookup_rule: Proof.context -> Termtab.key -> thm option 11 | val register_rule: thm -> Context.generic -> Context.generic 12 | 13 | end = struct 14 | 15 | (* Context data *) 16 | 17 | (*Stores elimination rules together with a list of the indexnames of the 18 | variables each rule eliminates. Keyed by head of the type being eliminated.*) 19 | structure Rules = Generic_Data ( 20 | type T = thm Termtab.table 21 | val empty = Termtab.empty 22 | val extend = I 23 | val merge = Termtab.merge Thm.eq_thm_prop 24 | ) 25 | 26 | val rules = map #2 o Termtab.dest o Rules.get o Context.Proof 27 | fun lookup_rule ctxt = Termtab.lookup (Rules.get (Context.Proof ctxt)) 28 | fun register_rule rl = 29 | let val hd = Term.head_of (Lib.type_of_typing (Thm.major_prem_of rl)) 30 | in Rules.map (Termtab.update (hd, rl)) end 31 | 32 | 33 | (* [cases] attribute *) 34 | val _ = Theory.setup ( 35 | Attrib.setup \<^binding>\cases\ 36 | (Scan.succeed (Thm.declaration_attribute register_rule)) 37 | "" 38 | #> Global_Theory.add_thms_dynamic (\<^binding>\cases\, rules o Context.proof_of) 39 | ) 40 | 41 | 42 | end 43 | -------------------------------------------------------------------------------- /mltt/core/elimination.ML: -------------------------------------------------------------------------------- 1 | (* Title: elimination.ML 2 | Author: Joshua Chen 3 | 4 | Type elimination setup. 5 | *) 6 | 7 | structure Elim: sig 8 | 9 | val Rules: Proof.context -> (thm * indexname list) Termtab.table 10 | val rules: Proof.context -> (thm * indexname list) list 11 | val lookup_rule: Proof.context -> Termtab.key -> (thm * indexname list) option 12 | val register_rule: term list -> thm -> Context.generic -> Context.generic 13 | 14 | end = struct 15 | 16 | (** Context data **) 17 | 18 | (* Elimination rule data *) 19 | 20 | (*Stores elimination rules together with a list of the indexnames of the 21 | variables each rule eliminates. Keyed by head of the type being eliminated.*) 22 | structure Rules = Generic_Data ( 23 | type T = (thm * indexname list) Termtab.table 24 | val empty = Termtab.empty 25 | val extend = I 26 | val merge = Termtab.merge (eq_fst Thm.eq_thm_prop) 27 | ) 28 | 29 | val Rules = Rules.get o Context.Proof 30 | fun rules ctxt = map (op #2) (Termtab.dest (Rules ctxt)) 31 | fun lookup_rule ctxt = Termtab.lookup (Rules ctxt) 32 | fun register_rule tms rl = 33 | let val hd = Term.head_of (Lib.type_of_typing (Thm.major_prem_of rl)) 34 | in Rules.map (Termtab.update (hd, (rl, map (#1 o dest_Var) tms))) end 35 | 36 | 37 | (* [elim] attribute *) 38 | val _ = Theory.setup ( 39 | Attrib.setup \<^binding>\elim\ 40 | (Scan.repeat Args.term_pattern >> 41 | (Thm.declaration_attribute o register_rule)) 42 | "" 43 | #> Global_Theory.add_thms_dynamic (\<^binding>\elim\, 44 | fn context => map #1 (rules (Context.proof_of context))) 45 | ) 46 | 47 | 48 | end 49 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Isabelle/HoTT [![build](https://github.com/jaycech3n/Isabelle-HoTT/workflows/build/badge.svg)](https://github.com/jaycech3n/Isabelle-HoTT/actions?query=workflow%3Abuild) 2 | 3 | Isabelle/HoTT is an experimental implementation of [homotopy type theory](https://en.wikipedia.org/wiki/Homotopy_type_theory) in the [Isabelle](https://isabelle.in.tum.de/) interactive theorem prover. 4 | It largely follows the development of the theory in the [Homotopy Type Theory book](https://homotopytypetheory.org/book/), but aims to be general enough to eventually support cubical and other kinds of homotopy type theories. 5 | 6 | Work is slowly ongoing to develop the logic into a fully-featured proof environment in which one can formulate and prove mathematical statements, in the style of the univalent foundations school. 7 | While Isabelle has provided support for object logics based on Martin-Löf type theory since the beginning, these have largely been ignored in favor of Isabelle/HOL. 8 | Thus this project is also an experiment in creating a viable framework, based on dependent type theory, inside the simple type theoretic foundations of Isabelle/Pure. 9 | 10 | **Caveat prover**: *This project is under active experimentation and is not yet guaranteed fit for any particular purpose.* 11 | 12 | ### References 13 | 14 | - *Homotopy Type Theory in Isabelle*. ITP 2021. DOI: [10.4230/LIPIcs.ITP.2021.12](https://drops.dagstuhl.de/opus/frontdoor.php?source_opus=13907) 15 | - *An Implementation of Homotopy Type Theory in Isabelle/Pure*. [arXiv:1911.00399 [cs.LO]](https://arxiv.org/abs/1911.00399) 16 | 17 | ### Usage 18 | 19 | Isabelle/HoTT is compatible with Isabelle2021. 20 | To use, add the Isabelle/HoTT folder path to `.isabelle/Isabelle2021/ROOTS` (on Mac/Linux/cygwin installations): 21 | 22 | ``` 23 | $ echo path/to/Isabelle/HoTT >> ~/.isabelle/Isabelle2021/ROOTS 24 | ``` 25 | 26 | ### To-do list 27 | 28 | In no particular order. 29 | 30 | - [ ] Dedicated type context data 31 | - [ ] Definitional unfolding, better simplification in the typechecker 32 | - [ ] Proper handling of universes 33 | - [ ] Recursive function definitions 34 | - [ ] Inductive type definitions 35 | - [ ] Higher inductive type definitions 36 | -------------------------------------------------------------------------------- /mltt/lib/Maybe.thy: -------------------------------------------------------------------------------- 1 | chapter \Maybe type\ 2 | 3 | theory Maybe 4 | imports Prelude 5 | 6 | begin 7 | 8 | text \Defined as a sum.\ 9 | 10 | definition "Maybe A \ A \ \" 11 | definition "none A \ inr A \ tt" 12 | definition "some A a \ inl A \ a" 13 | 14 | lemma 15 | MaybeF: "A: U i \ Maybe A: U i" and 16 | Maybe_none: "A: U i \ none A: Maybe A" and 17 | Maybe_some: "a: A \ some A a: Maybe A" 18 | unfolding Maybe_def none_def some_def by typechk+ 19 | 20 | Lemma (def) MaybeInd: 21 | assumes 22 | "A: U i" 23 | "\m. m: Maybe A \ C m: U i" 24 | "c\<^sub>0: C (none A)" 25 | "\a. a: A \ f a: C (some A a)" 26 | "m: Maybe A" 27 | shows "C m" 28 | using assms[unfolded Maybe_def none_def some_def, type] 29 | apply (elim m) 30 | apply fact 31 | apply (elim, fact) 32 | done 33 | 34 | Lemma Maybe_comp_none: 35 | assumes 36 | "A: U i" 37 | "c\<^sub>0: C (none A)" 38 | "\a. a: A \ f a: C (some A a)" 39 | "\m. m: Maybe A \ C m: U i" 40 | shows "MaybeInd A C c\<^sub>0 f (none A) \ c\<^sub>0" 41 | using assms 42 | unfolding Maybe_def MaybeInd_def none_def some_def 43 | by compute 44 | 45 | Lemma Maybe_comp_some: 46 | assumes 47 | "A: U i" 48 | "a: A" 49 | "c\<^sub>0: C (none A)" 50 | "\a. a: A \ f a: C (some A a)" 51 | "\m. m: Maybe A \ C m: U i" 52 | shows "MaybeInd A C c\<^sub>0 f (some A a) \ f a" 53 | using assms 54 | unfolding Maybe_def MaybeInd_def none_def some_def 55 | by compute 56 | 57 | lemmas 58 | [form] = MaybeF and 59 | [intr, intro] = Maybe_none Maybe_some and 60 | [comp] = Maybe_comp_none Maybe_comp_some and 61 | MaybeE [elim "?m"] = MaybeInd[rotated 4] 62 | lemmas 63 | Maybe_cases [cases] = MaybeE 64 | 65 | abbreviation "MaybeRec A C \ MaybeInd A (K C)" 66 | 67 | definition none_i ("none") where [implicit]: "none \ Maybe.none {}" 68 | definition some_i ("some") where [implicit]: "some a \ Maybe.some {} a" 69 | 70 | translations 71 | "none" \ "CONST Maybe.none A" 72 | "some a" \ "CONST Maybe.some A a" 73 | 74 | 75 | end 76 | -------------------------------------------------------------------------------- /mltt/core/calc.ML: -------------------------------------------------------------------------------- 1 | structure Calc = struct 2 | 3 | (* Calculational type context data 4 | 5 | A "calculational" type is a type expressing some congruence relation. In 6 | particular, it has a notion of composition of terms that is often used to derive 7 | proofs equationally. 8 | *) 9 | 10 | structure RHS = Generic_Data ( 11 | type T = (term * indexname) Termtab.table 12 | val empty = Termtab.empty 13 | val extend = I 14 | val merge = Termtab.merge (Term.aconv o apply2 #1) 15 | ) 16 | 17 | fun register_rhs t var = 18 | let 19 | val key = Term.head_of t 20 | val idxname = #1 (dest_Var var) 21 | in 22 | RHS.map (Termtab.update (key, (t, idxname))) 23 | end 24 | 25 | fun lookup_calc ctxt t = 26 | Termtab.lookup (RHS.get (Context.Proof ctxt)) (Term.head_of t) 27 | 28 | 29 | (* Declaration *) 30 | 31 | local val Frees_to_Vars = 32 | map_aterms (fn tm => 33 | case tm of 34 | Free (name, T) => Var (("*!"^name, 0), T) (*FIXME: Hacky naming!*) 35 | | _ => tm) 36 | in 37 | 38 | (*Declare the "right-hand side" of calculational types. Does not handle bound 39 | variables, so no dependent RHS in declarations!*) 40 | val _ = Outer_Syntax.local_theory \<^command_keyword>\calc\ 41 | "declare right hand side of calculational type" 42 | (Parse.term -- (\<^keyword>\rhs\ |-- Parse.term) >> 43 | (fn (t_str, rhs_str) => fn lthy => 44 | let 45 | val (t, rhs) = apply2 (Frees_to_Vars o Syntax.read_term lthy) 46 | (t_str, rhs_str) 47 | in lthy |> 48 | Local_Theory.background_theory ( 49 | Context.theory_map (register_rhs t rhs)) 50 | end)) 51 | 52 | end 53 | 54 | 55 | (* Ditto "''" setup *) 56 | 57 | fun last_rhs ctxt = map_aterms (fn t => 58 | case t of 59 | Const (\<^const_name>\rhs\, _) => 60 | let 61 | val this_name = Name_Space.full_name (Proof_Context.naming_of ctxt) 62 | (Binding.name Auto_Bind.thisN) 63 | val this = #thms (the (Proof_Context.lookup_fact ctxt this_name)) 64 | handle Option => [] 65 | val rhs = 66 | (case map Thm.prop_of this of 67 | [prop] => 68 | (let 69 | val typ = Lib.type_of_typing (Logic.strip_assums_concl prop) 70 | val (cong_pttrn, varname) = the (lookup_calc ctxt typ) 71 | val unif_res = Pattern.unify (Context.Proof ctxt) 72 | (cong_pttrn, typ) Envir.init 73 | val rhs = #2 (the 74 | (Vartab.lookup (Envir.term_env unif_res) varname)) 75 | in 76 | rhs 77 | end handle Option => 78 | error (".. can't match right-hand side of calculational type")) 79 | | _ => Term.dummy) 80 | in rhs end 81 | | _ => t) 82 | 83 | val _ = Context.>> 84 | (Syntax_Phases.term_check 5 "" (fn ctxt => map (last_rhs ctxt))) 85 | 86 | 87 | end 88 | -------------------------------------------------------------------------------- /mltt/core/implicits.ML: -------------------------------------------------------------------------------- 1 | (* Title: implicits.ML 2 | Author: Joshua Chen 3 | 4 | Implicit arguments. 5 | *) 6 | 7 | structure Implicits : 8 | sig 9 | 10 | val implicit_defs: Proof.context -> (term * term) Symtab.table 11 | val implicit_defs_attr: attribute 12 | val make_holes: Proof.context -> term list -> term list 13 | 14 | end = struct 15 | 16 | structure Defs = Generic_Data ( 17 | type T = (term * term) Symtab.table 18 | val empty = Symtab.empty 19 | val extend = I 20 | val merge = Symtab.merge (Term.aconv o apply2 #1) 21 | ) 22 | 23 | val implicit_defs = Defs.get o Context.Proof 24 | 25 | val implicit_defs_attr = Thm.declaration_attribute (fn th => 26 | let 27 | val (t, def) = Lib.dest_eq (Thm.prop_of th) 28 | val (head, args) = Term.strip_comb t 29 | val def' = fold_rev lambda args def 30 | in 31 | Defs.map (Symtab.update (Term.term_name head, (head, def'))) 32 | end) 33 | 34 | fun make_holes_single ctxt tm name_ctxt = 35 | let 36 | fun iarg_to_hole (Const (\<^const_name>\iarg\, T)) = 37 | Const (\<^const_name>\hole\, T) 38 | | iarg_to_hole t = t 39 | 40 | fun expand head args = 41 | let fun betapplys (head', args') = 42 | Term.betapplys (map_aterms iarg_to_hole head', args') 43 | in 44 | case head of 45 | Abs (x, T, t) => 46 | list_comb (Abs (x, T, Lib.traverse_term expand t), args) 47 | | _ => 48 | case Symtab.lookup (implicit_defs ctxt) (Term.term_name head) of 49 | SOME (t, def) => betapplys 50 | (Envir.expand_atom 51 | (Term.fastype_of head) 52 | (Term.fastype_of t, def), 53 | args) 54 | | NONE => list_comb (head, args) 55 | end 56 | 57 | fun holes_to_vars t = 58 | let 59 | val count = Lib.subterm_count (Const (\<^const_name>\hole\, dummyT)) 60 | 61 | fun subst (Const (\<^const_name>\hole\, T)) (Var (idx, _)::_) Ts = 62 | let 63 | val bounds = map Bound (0 upto (length Ts - 1)) 64 | val T' = foldr1 (op -->) (Ts @ [T]) 65 | in 66 | foldl1 (op $) (Var (idx, T')::bounds) 67 | end 68 | | subst (Abs (x, T, t)) vs Ts = Abs (x, T, subst t vs (T::Ts)) 69 | | subst (t $ u) vs Ts = 70 | let val n = count t 71 | in subst t (take n vs) Ts $ subst u (drop n vs) Ts end 72 | | subst t _ _ = t 73 | 74 | val names = Name.invent name_ctxt "*" (count t) 75 | val vars = map (fn n => Var ((n, 0), dummyT)) names 76 | in 77 | (subst t vars [], fold Name.declare names name_ctxt) 78 | end 79 | in 80 | holes_to_vars (Lib.traverse_term expand tm) 81 | end 82 | 83 | fun make_holes ctxt tms = #1 84 | (fold_map (make_holes_single ctxt) tms (Variable.names_of ctxt)) 85 | 86 | 87 | end 88 | -------------------------------------------------------------------------------- /mltt/core/context_facts.ML: -------------------------------------------------------------------------------- 1 | structure Context_Facts: sig 2 | 3 | val Known: Proof.context -> thm Item_Net.T 4 | val known: Proof.context -> thm list 5 | val known_of: Proof.context -> term -> thm list 6 | val register_known: thm -> Context.generic -> Context.generic 7 | val register_knowns: thm list -> Context.generic -> Context.generic 8 | 9 | val Cond: Proof.context -> thm Item_Net.T 10 | val cond: Proof.context -> thm list 11 | val cond_of: Proof.context -> term -> thm list 12 | val register_cond: thm -> Context.generic -> Context.generic 13 | val register_conds: thm list -> Context.generic -> Context.generic 14 | 15 | val Eq: Proof.context -> thm Item_Net.T 16 | val eq: Proof.context -> thm list 17 | val eq_of: Proof.context -> term -> thm list 18 | val register_eq: thm -> Context.generic -> Context.generic 19 | val register_eqs: thm list -> Context.generic -> Context.generic 20 | 21 | val register_facts: thm list -> Proof.context -> Proof.context 22 | 23 | end = struct 24 | 25 | (* Known types *) 26 | 27 | structure Known = Generic_Data ( 28 | type T = thm Item_Net.T 29 | val empty = Item_Net.init Thm.eq_thm 30 | (single o Lib.term_of_typing o Thm.prop_of) 31 | val extend = I 32 | val merge = Item_Net.merge 33 | ) 34 | 35 | val Known = Known.get o Context.Proof 36 | val known = Item_Net.content o Known 37 | fun known_of ctxt tm = Item_Net.retrieve (Known ctxt) tm 38 | 39 | fun register_known typing = 40 | if Lib.is_typing (Thm.prop_of typing) then Known.map (Item_Net.update typing) 41 | else error "Not a type judgment" 42 | 43 | fun register_knowns typings = foldr1 (op o) (map register_known typings) 44 | 45 | 46 | (* Conditional type rules *) 47 | 48 | (*Two important cases: 1. general type inference rules and 2. type family 49 | judgments*) 50 | 51 | structure Cond = Generic_Data ( 52 | type T = thm Item_Net.T 53 | val empty = Item_Net.init Thm.eq_thm 54 | (single o Lib.term_of_typing o Thm.concl_of) 55 | val extend = I 56 | val merge = Item_Net.merge 57 | ) 58 | 59 | val Cond = Cond.get o Context.Proof 60 | val cond = Item_Net.content o Cond 61 | fun cond_of ctxt tm = Item_Net.retrieve (Cond ctxt) tm 62 | 63 | fun register_cond rule = 64 | if Lib.is_typing (Thm.concl_of rule) then Cond.map (Item_Net.update rule) 65 | else error "Not a conditional type judgment" 66 | 67 | fun register_conds rules = foldr1 (op o) (map register_cond rules) 68 | 69 | 70 | (* Equality statements *) 71 | 72 | structure Eq = Generic_Data ( 73 | type T = thm Item_Net.T 74 | val empty = Item_Net.init Thm.eq_thm 75 | (single o (#1 o Lib.dest_eq) o Thm.concl_of) 76 | val extend = I 77 | val merge = Item_Net.merge 78 | ) 79 | 80 | val Eq = Eq.get o Context.Proof 81 | val eq = Item_Net.content o Eq 82 | fun eq_of ctxt tm = Item_Net.retrieve (Eq ctxt) tm 83 | 84 | fun register_eq rule = 85 | if Lib.is_eq (Thm.concl_of rule) then Eq.map (Item_Net.update rule) 86 | else error "Not a definitional equality judgment" 87 | 88 | fun register_eqs rules = foldr1 (op o) (map register_eq rules) 89 | 90 | 91 | (* Context assumptions *) 92 | 93 | fun register_facts ths = 94 | let 95 | val (facts, conds, eqs) = Lib.partition_judgments ths 96 | val f = register_knowns facts handle Empty => I 97 | val c = register_conds conds handle Empty => I 98 | val e = register_eqs eqs handle Empty => I 99 | in Context.proof_map (e o c o f) end 100 | 101 | end 102 | -------------------------------------------------------------------------------- /mltt/core/elaboration.ML: -------------------------------------------------------------------------------- 1 | (* Title: elaboration.ML 2 | Author: Joshua Chen 3 | 4 | Basic term elaboration. 5 | *) 6 | 7 | structure Elab: sig 8 | 9 | val elab: Proof.context -> cterm list -> term -> Envir.env 10 | val elab_stmt: Proof.context -> cterm list -> term -> Envir.env * term 11 | val elaborate: Proof.context -> cterm list -> ('a * (term * term list) list) list -> ('a * (term * term list) list) list 12 | 13 | end = struct 14 | 15 | (*Elaborate `tm` by solving the inference problem `tm: {}`, knowing `assums`, 16 | which are fully elaborated, in `ctxt`. Return a substitution.*) 17 | fun elab ctxt assums tm = 18 | if Lib.no_vars tm 19 | then Envir.init 20 | else 21 | let 22 | val inf = Goal.init (Thm.cterm_of ctxt (Lib.typing_of_term tm)) 23 | val res = Types.check_infer (map Thm.assume assums) 1 (ctxt, inf) 24 | val tm' = 25 | Thm.prop_of (#2 (Seq.hd (Seq.filter_results res))) 26 | |> Lib.dest_prop |> Lib.term_of_typing 27 | handle TERM ("dest_typing", [t]) => 28 | let val typ = Logic.unprotect (Logic.strip_assums_concl t) 29 | |> Lib.term_of_typing 30 | in 31 | error ("Elaboration of " ^ Syntax.string_of_term ctxt typ ^ " failed") 32 | end 33 | in 34 | Seq.hd (Unify.matchers (Context.Proof ctxt) [(tm, tm')]) 35 | end 36 | handle Option => error 37 | ("Elaboration of " ^ Syntax.string_of_term ctxt tm ^ " failed") 38 | 39 | (*Recursively elaborate a statement \x ... y. \...\ \ P x ... y by elaborating 40 | only the types of typing judgments (in particular, does not look at judgmental 41 | equality statements). Could also elaborate the terms of typing judgments, but 42 | for now we assume that these are always free variables in all the cases we're 43 | interested in.*) 44 | fun elab_stmt ctxt assums stmt = 45 | let 46 | val stmt = Lib.dest_prop stmt 47 | fun subst_term env = Envir.subst_term (Envir.type_env env, Envir.term_env env) 48 | in 49 | if Lib.no_vars stmt orelse Lib.is_eq stmt then 50 | (Envir.init, stmt) 51 | else if Lib.is_typing stmt then 52 | let 53 | val typ = Lib.type_of_typing stmt 54 | val subst = elab ctxt assums typ 55 | in (subst, subst_term subst stmt) end 56 | else 57 | let 58 | fun elab' assums (x :: xs) = 59 | let 60 | val (env, x') = elab_stmt ctxt assums x 61 | val assums' = 62 | if Lib.no_vars x' then Thm.cterm_of ctxt x' :: assums else assums 63 | in env :: elab' assums' xs end 64 | | elab' _ [] = [] 65 | val (prems, concl) = Lib.decompose_goal ctxt stmt 66 | val subst = fold (curry Envir.merge) (elab' assums prems) Envir.init 67 | val prems' = map (Thm.cterm_of ctxt o subst_term subst) prems 68 | val subst' = 69 | if Lib.is_typing concl then 70 | let val typ = Lib.type_of_typing concl 71 | in Envir.merge (subst, elab ctxt (assums @ prems') typ) end 72 | else subst 73 | in (subst', subst_term subst' stmt) end 74 | end 75 | 76 | (*Apply elaboration to the list format that assumptions and goal statements are 77 | given in*) 78 | fun elaborate ctxt known assms = 79 | let 80 | fun subst_term env = Envir.subst_term (Envir.type_env env, Envir.term_env env) 81 | fun elab_fact (fact, xs) assums = 82 | let val (subst, fact') = elab_stmt ctxt assums fact in 83 | ((fact', map (subst_term subst) xs), Thm.cterm_of ctxt fact' :: assums) 84 | end 85 | fun elab (b, facts) assums = 86 | let val (facts', assums') = fold_map elab_fact facts assums 87 | in ((b, facts'), assums') end 88 | in #1 (fold_map elab assms known) end 89 | 90 | 91 | end 92 | -------------------------------------------------------------------------------- /mltt/core/types.ML: -------------------------------------------------------------------------------- 1 | (* Title: types.ML 2 | Author: Joshua Chen 3 | 4 | Type-checking infrastructure. 5 | *) 6 | 7 | structure Types: sig 8 | 9 | val debug_typechk: bool Config.T 10 | 11 | val known_ctac: thm list -> int -> context_tactic 12 | val check_infer: thm list -> int -> context_tactic 13 | 14 | end = struct 15 | 16 | open Context_Facts 17 | 18 | (** [type] attribute **) 19 | 20 | val _ = Theory.setup ( 21 | Attrib.setup \<^binding>\type\ 22 | (Scan.succeed (Thm.declaration_attribute (fn th => 23 | if Thm.no_prems th then register_known th else register_cond th))) 24 | "" 25 | #> Global_Theory.add_thms_dynamic (\<^binding>\type\, 26 | fn context => let val ctxt = Context.proof_of context in 27 | known ctxt @ cond ctxt end) 28 | ) 29 | 30 | 31 | (** Context tactics for type-checking and elaboration **) 32 | 33 | val debug_typechk = Attrib.setup_config_bool \<^binding>\debug_typechk\ (K false) 34 | 35 | fun debug_tac ctxt s = 36 | if Config.get ctxt debug_typechk then print_tac ctxt s else all_tac 37 | 38 | (*Solves goals without metavariables and type inference problems by assumption 39 | from inline premises or resolution with facts*) 40 | fun known_ctac facts = CONTEXT_SUBGOAL (fn (goal, i) => fn (ctxt, st) => 41 | TACTIC_CONTEXT ctxt 42 | let val concl = Logic.strip_assums_concl goal in 43 | if Lib.no_vars concl orelse 44 | (Lib.is_typing concl andalso Lib.no_vars (Lib.term_of_typing concl)) 45 | then 46 | let val ths = known ctxt @ facts 47 | in st |> 48 | (assume_tac ctxt ORELSE' resolve_tac ctxt ths THEN_ALL_NEW K no_tac) i 49 | end 50 | else Seq.empty 51 | end) 52 | 53 | (*Simple bidirectional typing tactic with some backtracking search over input 54 | facts.*) 55 | fun check_infer_step facts i (ctxt, st) = 56 | let 57 | val refine_tac = SUBGOAL (fn (goal, i) => 58 | if Lib.rigid_typing_concl goal 59 | then 60 | let 61 | val net = Tactic.build_net ( 62 | map (Simplifier.norm_hhf ctxt) facts 63 | @(cond ctxt) 64 | @(Named_Theorems.get ctxt \<^named_theorems>\form\) 65 | @(Named_Theorems.get ctxt \<^named_theorems>\intr\) 66 | @(map #1 (Elim.rules ctxt))) 67 | in resolve_from_net_tac ctxt net i end 68 | else no_tac) 69 | 70 | val sub_tac = SUBGOAL (fn (goal, i) => 71 | let val concl = Logic.strip_assums_concl goal in 72 | if Lib.is_typing concl 73 | andalso Lib.is_rigid (Lib.term_of_typing concl) 74 | andalso Lib.no_vars (Lib.type_of_typing concl) 75 | then 76 | (resolve_tac ctxt @{thms sub} 77 | THEN' SUBGOAL (fn (_, i) => 78 | NO_CONTEXT_TACTIC ctxt (check_infer facts i)) 79 | THEN' compute_tac ctxt facts 80 | THEN_ALL_NEW K no_tac) i 81 | else no_tac end) 82 | 83 | val ctxt' = ctxt (*TODO: Use this to store already-derived typing judgments*) 84 | in 85 | TACTIC_CONTEXT ctxt' ( 86 | ((NO_CONTEXT_TACTIC ctxt' o known_ctac facts THEN' K (debug_tac ctxt' "after `known`")) 87 | ORELSE' (refine_tac THEN' K (debug_tac ctxt' "after `refine`")) 88 | ORELSE' (sub_tac THEN' K (debug_tac ctxt' "after `sub`"))) i st) 89 | end 90 | 91 | and check_infer facts i (cst as (_, st)) = 92 | let 93 | val ctac = check_infer_step facts 94 | in 95 | cst |> (ctac i CTHEN 96 | CREPEAT_IN_RANGE i (Thm.nprems_of st - i) (CTRY o CREPEAT_ALL_NEW_FWD ctac)) 97 | end 98 | 99 | and compute_tac ctxt facts = 100 | let 101 | val comps = Named_Theorems.get ctxt \<^named_theorems>\comp\ 102 | val ss = simpset_of ctxt 103 | val ss' = simpset_of (empty_simpset ctxt addsimps comps) 104 | val ctxt' = put_simpset (merge_ss (ss, ss')) ctxt 105 | in 106 | SUBGOAL (fn (_, i) => 107 | ((CHANGED o asm_simp_tac ctxt' ORELSE' EqSubst.eqsubst_tac ctxt [0] comps) 108 | THEN_ALL_NEW SUBGOAL (fn (_, i) => 109 | NO_CONTEXT_TACTIC ctxt (check_infer facts i))) i) 110 | end 111 | 112 | 113 | end 114 | -------------------------------------------------------------------------------- /mltt/lib/Prelude.thy: -------------------------------------------------------------------------------- 1 | theory Prelude 2 | imports MLTT 3 | 4 | begin 5 | 6 | section \Sum type\ 7 | 8 | axiomatization 9 | Sum :: \o \ o \ o\ and 10 | inl :: \o \ o \ o \ o\ and 11 | inr :: \o \ o \ o \ o\ and 12 | SumInd :: \o \ o \ (o \ o) \ (o \ o) \ (o \ o) \ o \ o\ 13 | 14 | notation Sum (infixl "\" 50) 15 | 16 | axiomatization where 17 | SumF: "\A: U i; B: U i\ \ A \ B: U i" and 18 | 19 | Sum_inl: "\B: U i; a: A\ \ inl A B a: A \ B" and 20 | 21 | Sum_inr: "\A: U i; b: B\ \ inr A B b: A \ B" and 22 | 23 | SumE: "\ 24 | s: A \ B; 25 | \s. s: A \ B \ C s: U i; 26 | \a. a: A \ c a: C (inl A B a); 27 | \b. b: B \ d b: C (inr A B b) 28 | \ \ SumInd A B (fn s. C s) (fn a. c a) (fn b. d b) s: C s" and 29 | 30 | Sum_comp_inl: "\ 31 | a: A; 32 | \s. s: A \ B \ C s: U i; 33 | \a. a: A \ c a: C (inl A B a); 34 | \b. b: B \ d b: C (inr A B b) 35 | \ \ SumInd A B (fn s. C s) (fn a. c a) (fn b. d b) (inl A B a) \ c a" and 36 | 37 | Sum_comp_inr: "\ 38 | b: B; 39 | \s. s: A \ B \ C s: U i; 40 | \a. a: A \ c a: C (inl A B a); 41 | \b. b: B \ d b: C (inr A B b) 42 | \ \ SumInd A B (fn s. C s) (fn a. c a) (fn b. d b) (inr A B b) \ d b" 43 | 44 | lemmas 45 | [form] = SumF and 46 | [intr] = Sum_inl Sum_inr and 47 | [intro] = Sum_inl[rotated] Sum_inr[rotated] and 48 | [elim ?s] = SumE and 49 | [comp] = Sum_comp_inl Sum_comp_inr 50 | 51 | method left = rule Sum_inl 52 | method right = rule Sum_inr 53 | 54 | 55 | section \Empty and unit types\ 56 | 57 | axiomatization 58 | Top :: \o\ and 59 | tt :: \o\ and 60 | TopInd :: \(o \ o) \ o \ o \ o\ 61 | and 62 | Bot :: \o\ and 63 | BotInd :: \(o \ o) \ o \ o\ 64 | 65 | notation Top ("\") and Bot ("\") 66 | 67 | axiomatization where 68 | TopF: "\: U i" and 69 | 70 | TopI: "tt: \" and 71 | 72 | TopE: "\a: \; \x. x: \ \ C x: U i; c: C tt\ \ TopInd (fn x. C x) c a: C a" and 73 | 74 | Top_comp: "\\x. x: \ \ C x: U i; c: C tt\ \ TopInd (fn x. C x) c tt \ c" 75 | and 76 | BotF: "\: U i" and 77 | 78 | BotE: "\x: \; \x. x: \ \ C x: U i\ \ BotInd (fn x. C x) x: C x" 79 | 80 | lemmas 81 | [form] = TopF BotF and 82 | [intr, intro] = TopI and 83 | [elim ?a] = TopE and 84 | [elim ?x] = BotE and 85 | [comp] = Top_comp 86 | 87 | abbreviation (input) Not ("\_" [1000] 1000) where "\A \ A \ \" 88 | 89 | 90 | section \Booleans\ 91 | 92 | definition "Bool \ \ \ \" 93 | definition "true \ inl \ \ tt" 94 | definition "false \ inr \ \ tt" 95 | 96 | Lemma 97 | BoolF: "Bool: U i" and 98 | Bool_true: "true: Bool" and 99 | Bool_false: "false: Bool" 100 | unfolding Bool_def true_def false_def by typechk+ 101 | 102 | \ \Definitions like these should be handled by a future function package\ 103 | Lemma (def) ifelse [rotated 1]: 104 | assumes *[unfolded Bool_def true_def false_def]: 105 | "\x. x: Bool \ C x: U i" 106 | "x: Bool" 107 | "a: C true" 108 | "b: C false" 109 | shows "C x" 110 | using assms[unfolded Bool_def true_def false_def, type] 111 | by (elim x) (elim, fact)+ 112 | 113 | Lemma if_true: 114 | assumes 115 | "\x. x: Bool \ C x: U i" 116 | "a: C true" 117 | "b: C false" 118 | shows "ifelse C true a b \ a" 119 | unfolding ifelse_def true_def 120 | using assms unfolding Bool_def true_def false_def 121 | by compute 122 | 123 | Lemma if_false: 124 | assumes 125 | "\x. x: Bool \ C x: U i" 126 | "a: C true" 127 | "b: C false" 128 | shows "ifelse C false a b \ b" 129 | unfolding ifelse_def false_def 130 | using assms unfolding Bool_def true_def false_def 131 | by compute 132 | 133 | lemmas 134 | [form] = BoolF and 135 | [intr, intro] = Bool_true Bool_false and 136 | [comp] = if_true if_false and 137 | [elim ?x] = ifelse 138 | lemmas 139 | BoolE = ifelse 140 | 141 | subsection \Notation\ 142 | 143 | definition ifelse_i ("if _ then _ else _") 144 | where [implicit]: "if x then a else b \ ifelse {} x a b" 145 | 146 | translations "if x then a else b" \ "CONST ifelse C x a b" 147 | 148 | subsection \Logical connectives\ 149 | 150 | definition not ("!_") where "!x \ ifelse (K Bool) x false true" 151 | 152 | 153 | end 154 | -------------------------------------------------------------------------------- /mltt/core/focus.ML: -------------------------------------------------------------------------------- 1 | (* Title: focus.ML 2 | Author: Joshua Chen 3 | 4 | Focus on head subgoal, with optional variable renaming. 5 | 6 | Modified from code contained in ~~/Pure/Isar/subgoal.ML. 7 | *) 8 | 9 | local 10 | 11 | fun reverse_prems imp = 12 | let val (prems, concl) = (Drule.strip_imp_prems imp, Drule.strip_imp_concl imp) 13 | in fold (curry mk_implies) prems concl end 14 | 15 | fun gen_focus ctxt i bindings raw_st = 16 | let 17 | val st = raw_st 18 | |> Thm.solve_constraints 19 | |> Thm.transfer' ctxt 20 | |> Raw_Simplifier.norm_hhf_protect ctxt 21 | 22 | val ((schematic_types, [st']), ctxt1) = Variable.importT [st] ctxt 23 | 24 | val ((params, goal), ctxt2) = 25 | Variable.focus_cterm bindings (Thm.cprem_of st' i) ctxt1 26 | 27 | val (asms, concl) = 28 | (Drule.strip_imp_prems goal, Drule.strip_imp_concl goal) 29 | 30 | fun intern_var_assms asm (asms, concl) = 31 | if Lib.no_vars (Thm.term_of asm) 32 | then (asm :: asms, concl) 33 | else (asms, Drule.mk_implies (asm, concl)) 34 | 35 | val (asms', concl') = fold intern_var_assms asms ([], concl) 36 | |> apfst rev |> apsnd reverse_prems 37 | 38 | val (inst, ctxt3) = Variable.import_inst true (map Thm.term_of (asms')) ctxt2 39 | val schematic_terms = map (apsnd (Thm.cterm_of ctxt3)) (#2 inst) 40 | val schematics = (schematic_types, schematic_terms) 41 | val asms' = map (Thm.instantiate_cterm schematics) asms' 42 | val concl' = Thm.instantiate_cterm schematics concl' 43 | val (prems, context) = Assumption.add_assumes asms' ctxt3 44 | in 45 | ({context = context, params = params, prems = prems, 46 | asms = asms', concl = concl', schematics = schematics}, Goal.init concl') 47 | end 48 | 49 | fun param_bindings ctxt (param_suffix, raw_param_specs) st = 50 | let 51 | val _ = if Thm.no_prems st then error "No subgoals!" else () 52 | val subgoal = #1 (Logic.dest_implies (Thm.prop_of st)) 53 | val subgoal_params = 54 | map (apfst (Name.internal o Name.clean)) (Term.strip_all_vars subgoal) 55 | |> Term.variant_frees subgoal |> map #1 56 | 57 | val n = length subgoal_params 58 | val m = length raw_param_specs 59 | val _ = 60 | m <= n orelse 61 | error ("Excessive subgoal parameter specification" ^ 62 | Position.here_list (map snd (drop n raw_param_specs))) 63 | 64 | val param_specs = raw_param_specs 65 | |> map 66 | (fn (NONE, _) => NONE 67 | | (SOME x, pos) => 68 | let 69 | val b = #1 (#1 (Proof_Context.cert_var (Binding.make (x, pos), NONE, NoSyn) ctxt)) 70 | val _ = Variable.check_name b 71 | in SOME b end) 72 | |> param_suffix ? append (replicate (n - m) NONE) 73 | 74 | fun bindings (SOME x :: xs) (_ :: ys) = x :: bindings xs ys 75 | | bindings (NONE :: xs) (y :: ys) = Binding.name y :: bindings xs ys 76 | | bindings _ ys = map Binding.name ys 77 | in bindings param_specs subgoal_params end 78 | 79 | fun gen_schematic_subgoal prep_atts raw_result_binding param_specs state = 80 | let 81 | val _ = Proof.assert_backward state 82 | 83 | val state1 = state 84 | |> Proof.map_context (Proof_Context.set_mode Proof_Context.mode_schematic) 85 | |> Proof.refine_insert [] 86 | 87 | val {context = ctxt, facts, goal = st} = Proof.raw_goal state1 88 | val result_binding = apsnd (map (prep_atts ctxt)) raw_result_binding 89 | 90 | val subgoal_focus = #1 91 | (gen_focus ctxt 1 (SOME (param_bindings ctxt param_specs st)) st) 92 | 93 | val prems = #prems subgoal_focus 94 | 95 | fun after_qed (ctxt'', [[result]]) = 96 | Proof.end_block #> (fn state' => 97 | let 98 | val ctxt' = Proof.context_of state' 99 | val results' = 100 | Proof_Context.export ctxt'' ctxt' (Conjunction.elim_conjunctions result) 101 | in 102 | state' 103 | |> Proof.refine_primitive (fn _ => fn _ => 104 | Subgoal.retrofit ctxt'' ctxt' (#params subgoal_focus) (#asms subgoal_focus) 1 105 | (Goal.protect 0 result) st 106 | |> Seq.hd) 107 | |> Proof.map_context 108 | (#2 o Proof_Context.note_thmss "" [(result_binding, [(results', [])])]) 109 | end) 110 | #> Proof.reset_facts 111 | #> Proof.enter_backward 112 | in 113 | state1 114 | |> Proof.enter_forward 115 | |> Proof.using_facts [] 116 | |> Proof.begin_block 117 | |> Proof.map_context (fn _ => 118 | #context subgoal_focus 119 | |> Proof_Context.note_thmss "" [((Binding.name "prems", []), [(prems, [])])] 120 | |> snd 121 | |> Context_Facts.register_facts prems) 122 | |> Proof.internal_goal (K (K ())) (Proof_Context.get_mode ctxt) true "subgoal" 123 | NONE after_qed [] [] [(Binding.empty_atts, [(Thm.term_of (#concl subgoal_focus), [])])] 124 | |> #2 125 | |> Proof.using_facts (facts @ prems) 126 | |> pair subgoal_focus 127 | end 128 | 129 | val opt_fact_binding = 130 | Scan.optional ((Parse.binding -- Parse.opt_attribs || Parse.attribs >> pair Binding.empty) --| Args.colon) 131 | Binding.empty_atts 132 | 133 | val for_params = Scan.optional 134 | (\<^keyword>\vars\ |-- 135 | Parse.!!! ((Scan.option Parse.dots >> is_some) -- 136 | (Scan.repeat1 (Parse.maybe_position Parse.name_position)))) 137 | (false, []) 138 | 139 | val schematic_subgoal_cmd = gen_schematic_subgoal Attrib.attribute_cmd 140 | 141 | val parser = opt_fact_binding -- for_params >> (fn (fact, params) => 142 | Toplevel.proofs (Seq.make_results o Seq.single o #2 o schematic_subgoal_cmd fact params)) 143 | 144 | in 145 | 146 | (** Outer syntax commands **) 147 | 148 | val _ = Outer_Syntax.command \<^command_keyword>\focus\ 149 | "focus on first subgoal within backward refinement, without instantiating schematic vars" 150 | parser 151 | 152 | val _ = Outer_Syntax.command \<^command_keyword>\\<^item>\ "focus bullet" parser 153 | val _ = Outer_Syntax.command \<^command_keyword>\\<^enum>\ "focus bullet" parser 154 | val _ = Outer_Syntax.command \<^command_keyword>\\\ "focus bullet" parser 155 | val _ = Outer_Syntax.command \<^command_keyword>\\\ "focus bullet" parser 156 | val _ = Outer_Syntax.command \<^command_keyword>\~\ "focus bullet" parser 157 | 158 | end 159 | -------------------------------------------------------------------------------- /mltt/lib/List.thy: -------------------------------------------------------------------------------- 1 | chapter \Lists\ 2 | 3 | theory List 4 | imports Maybe 5 | 6 | begin 7 | 8 | (*TODO: Inductive type and recursive function definitions. The ad-hoc 9 | axiomatization below should be subsumed once general inductive types are 10 | properly implemented.*) 11 | 12 | axiomatization 13 | List :: \o \ o\ and 14 | nil :: \o \ o\ and 15 | cons :: \o \ o \ o \ o\ and 16 | ListInd :: \o \ (o \ o) \ o \ (o \ o \ o \ o) \ o \ o\ 17 | where 18 | ListF: "A: U i \ List A: U i" and 19 | 20 | List_nil: "A: U i \ nil A: List A" and 21 | 22 | List_cons: "\x: A; xs: List A\ \ cons A x xs: List A" and 23 | 24 | ListE: "\ 25 | xs: List A; 26 | c\<^sub>0: C (nil A); 27 | \x xs rec. \x: A; xs: List A; rec: C xs\ \ f x xs rec: C (cons A x xs); 28 | \xs. xs: List A \ C xs: U i 29 | \ \ ListInd A (fn xs. C xs) c\<^sub>0 (fn x xs rec. f x xs rec) xs: C xs" and 30 | 31 | List_comp_nil: "\ 32 | c\<^sub>0: C (nil A); 33 | \x xs rec. \x: A; xs: List A; rec: C xs\ \ f x xs rec: C (cons A x xs); 34 | \xs. xs: List A \ C xs: U i 35 | \ \ ListInd A (fn xs. C xs) c\<^sub>0 (fn x xs rec. f x xs rec) (nil A) \ c\<^sub>0" and 36 | 37 | List_comp_cons: "\ 38 | xs: List A; 39 | c\<^sub>0: C (nil A); 40 | \x xs rec. \x: A; xs: List A; rec: C xs\ \ f x xs rec: C (cons A x xs); 41 | \xs. xs: List A \ C xs: U i 42 | \ \ 43 | ListInd A (fn xs. C xs) c\<^sub>0 (fn x xs rec. f x xs rec) (cons A x xs) \ 44 | f x xs (ListInd A (fn xs. C xs) c\<^sub>0 (fn x xs rec. f x xs rec) xs)" 45 | 46 | lemmas 47 | [form] = ListF and 48 | [intr, intro] = List_nil List_cons and 49 | [elim "?xs"] = ListE and 50 | [comp] = List_comp_nil List_comp_cons 51 | 52 | abbreviation "ListRec A C \ ListInd A (fn _. C)" 53 | 54 | Lemma list_cases [cases]: 55 | assumes 56 | "xs: List A" and 57 | nil_case: "c\<^sub>0: C (nil A)" and 58 | cons_case: "\x xs. \x: A; xs: List A\ \ f x xs: C (cons A x xs)" and 59 | "\xs. xs: List A \ C xs: U i" 60 | shows "C xs" 61 | by (elim xs) (fact nil_case, rule cons_case) 62 | 63 | 64 | section \Notation\ 65 | 66 | definition nil_i ("[]") 67 | where [implicit]: "[] \ nil {}" 68 | 69 | definition cons_i (infixr "#" 120) 70 | where [implicit]: "x # xs \ cons {} x xs" 71 | 72 | translations 73 | "[]" \ "CONST List.nil A" 74 | "x # xs" \ "CONST List.cons A x xs" 75 | syntax 76 | "_list" :: \args \ o\ ("[_]") 77 | translations 78 | "[x, xs]" \ "x # [xs]" 79 | "[x]" \ "x # []" 80 | 81 | 82 | section \Standard functions\ 83 | 84 | subsection \Head and tail\ 85 | 86 | Lemma (def) head: 87 | assumes "A: U i" "xs: List A" 88 | shows "Maybe A" 89 | proof (cases xs) 90 | show "none: Maybe A" by intro 91 | show "\x. x: A \ some x: Maybe A" by intro 92 | qed 93 | 94 | Lemma (def) tail: 95 | assumes "A: U i" "xs: List A" 96 | shows "List A" 97 | proof (cases xs) 98 | show "[]: List A" by intro 99 | show "\xs. xs: List A \ xs: List A" . 100 | qed 101 | 102 | definition head_i ("head") where [implicit]: "head xs \ List.head {} xs" 103 | definition tail_i ("tail") where [implicit]: "tail xs \ List.tail {} xs" 104 | 105 | translations 106 | "head" \ "CONST List.head A" 107 | "tail" \ "CONST List.tail A" 108 | 109 | Lemma head_type [type]: 110 | assumes "A: U i" "xs: List A" 111 | shows "head xs: Maybe A" 112 | unfolding head_def by typechk 113 | 114 | Lemma head_of_cons [comp]: 115 | assumes "A: U i" "x: A" "xs: List A" 116 | shows "head (x # xs) \ some x" 117 | unfolding head_def by compute 118 | 119 | Lemma tail_type [type]: 120 | assumes "A: U i" "xs: List A" 121 | shows "tail xs: List A" 122 | unfolding tail_def by typechk 123 | 124 | Lemma tail_of_cons [comp]: 125 | assumes "A: U i" "x: A" "xs: List A" 126 | shows "tail (x # xs) \ xs" 127 | unfolding tail_def by compute 128 | 129 | subsection \Append\ 130 | 131 | Lemma (def) app: 132 | assumes "A: U i" "xs: List A" "ys: List A" 133 | shows "List A" 134 | apply (elim xs) 135 | \<^item> by (fact \ys:_\) 136 | \<^item> vars x _ rec 137 | proof - show "x # rec: List A" by typechk qed 138 | done 139 | 140 | definition app_i ("app") where [implicit]: "app xs ys \ List.app {} xs ys" 141 | 142 | translations "app" \ "CONST List.app A" 143 | 144 | subsection \Map\ 145 | 146 | Lemma (def) map: 147 | assumes "A: U i" "B: U i" "f: A \ B" "xs: List A" 148 | shows "List B" 149 | proof (elim xs) 150 | show "[]: List B" by intro 151 | next fix x ys 152 | assuming "x: A" "ys: List B" 153 | show "f x # ys: List B" by typechk 154 | qed 155 | 156 | definition map_i ("map") where [implicit]: "map \ List.map {} {}" 157 | 158 | translations "map" \ "CONST List.map A B" 159 | 160 | Lemma map_type [type]: 161 | assumes "A: U i" "B: U i" "f: A \ B" "xs: List A" 162 | shows "map f xs: List B" 163 | unfolding map_def by typechk 164 | 165 | 166 | subsection \Reverse\ 167 | 168 | Lemma (def) rev: 169 | assumes "A: U i" "xs: List A" 170 | shows "List A" 171 | apply (elim xs) 172 | \<^item> by (rule List_nil) 173 | \<^item> vars x _ rec proof - show "app rec [x]: List A" by typechk qed 174 | done 175 | 176 | definition rev_i ("rev") where [implicit]: "rev \ List.rev {}" 177 | 178 | translations "rev" \ "CONST List.rev A" 179 | 180 | Lemma rev_type [type]: 181 | assumes "A: U i" "xs: List A" 182 | shows "rev xs: List A" 183 | unfolding rev_def by typechk 184 | 185 | Lemma rev_nil [comp]: 186 | assumes "A: U i" 187 | shows "rev (nil A) \ nil A" 188 | unfolding rev_def by compute 189 | 190 | 191 | end 192 | -------------------------------------------------------------------------------- /mltt/core/lib.ML: -------------------------------------------------------------------------------- 1 | structure Lib : 2 | sig 3 | 4 | (*Lists*) 5 | val max: ('a * 'a -> bool) -> 'a list -> 'a 6 | val maxint: int list -> int 7 | 8 | (*Terms*) 9 | val no_vars: term -> bool 10 | val is_rigid: term -> bool 11 | val is_eq: term -> bool 12 | val dest_prop: term -> term 13 | val dest_eq: term -> term * term 14 | val mk_Var: string -> int -> typ -> term 15 | val lambda_var: term -> term -> term 16 | 17 | val is_lvl: term -> bool 18 | val is_typing: term -> bool 19 | val mk_typing: term -> term -> term 20 | val dest_typing: term -> term * term 21 | val term_of_typing: term -> term 22 | val type_of_typing: term -> term 23 | val mk_Pi: term -> term -> term -> term 24 | 25 | val typing_of_term: term -> term 26 | 27 | (*Goals*) 28 | val decompose_goal: Proof.context -> term -> term list * term 29 | val rigid_typing_concl: term -> bool 30 | 31 | (*Theorems*) 32 | val partition_judgments: thm list -> thm list * thm list * thm list 33 | 34 | (*Subterms*) 35 | val has_subterm: term list -> term -> bool 36 | val subterm_count: term -> term -> int 37 | val subterm_count_distinct: term list -> term -> int 38 | val traverse_term: (term -> term list -> term) -> term -> term 39 | val collect_subterms: (term -> bool) -> term -> term list 40 | 41 | (*Orderings*) 42 | val subterm_order: term -> term -> order 43 | val lvl_order: term -> term -> order 44 | val cond_order: order -> order -> order 45 | 46 | end = struct 47 | 48 | 49 | (** Lists **) 50 | 51 | fun max gt (x::xs) = fold (fn a => fn b => if gt (a, b) then a else b) xs x 52 | | max _ [] = error "max of empty list" 53 | 54 | val maxint = max (op >) 55 | 56 | 57 | (** Terms **) 58 | 59 | (* Meta *) 60 | 61 | val no_vars = not o exists_subterm is_Var 62 | 63 | val is_rigid = not o is_Var o head_of 64 | 65 | fun is_eq (Const (\<^const_name>\Pure.eq\, _) $ _ $ _) = true 66 | | is_eq _ = false 67 | 68 | fun dest_prop (Const (\<^const_name>\Pure.prop\, _) $ P) = P 69 | | dest_prop P = P 70 | 71 | fun dest_eq (Const (\<^const_name>\Pure.eq\, _) $ t $ def) = (t, def) 72 | | dest_eq _ = error "dest_eq" 73 | 74 | fun mk_Var name idx T = Var ((name, idx), T) 75 | 76 | fun lambda_var x tm = 77 | let 78 | fun var_args (Var (idx, T)) = Var (idx, \<^typ>\o\ --> T) $ x 79 | | var_args t = t 80 | in 81 | tm |> map_aterms var_args 82 | |> lambda x 83 | end 84 | 85 | (* Object *) 86 | 87 | fun is_lvl t = case fastype_of t of Type (\<^type_name>\lvl\, _) => true | _ => false 88 | 89 | fun is_typing (Const (\<^const_name>\has_type\, _) $ _ $ _) = true 90 | | is_typing _ = false 91 | 92 | fun mk_typing t T = \<^const>\has_type\ $ t $ T 93 | 94 | fun dest_typing (Const (\<^const_name>\has_type\, _) $ t $ T) = (t, T) 95 | | dest_typing t = raise TERM ("dest_typing", [t]) 96 | 97 | val term_of_typing = #1 o dest_typing 98 | val type_of_typing = #2 o dest_typing 99 | 100 | fun mk_Pi v typ body = Const (\<^const_name>\Pi\, dummyT) $ typ $ lambda v body 101 | 102 | fun typing_of_term tm = \<^const>\has_type\ $ tm $ Var (("*?", 0), \<^typ>\o\) 103 | (*The above is a bit hacky; basically we need to guarantee that the schematic 104 | var is fresh. This works for now because no other code in the Isabelle system 105 | or the current logic uses this identifier.*) 106 | 107 | 108 | (** Goals **) 109 | 110 | (*Breaks a goal \x ... y. \P1; ... Pn\ \ Q into ([P1, ..., Pn], Q), fixing 111 | \-quantified variables and keeping schematics.*) 112 | fun decompose_goal ctxt goal = 113 | let 114 | val focus = 115 | #1 (Subgoal.focus_prems ctxt 1 NONE (Thm.trivial (Thm.cterm_of ctxt goal))) 116 | 117 | val schematics = #2 (#schematics focus) 118 | |> map (fn (v, ctm) => (Thm.term_of ctm, Var v)) 119 | in 120 | map Thm.prop_of (#prems focus) @ [Thm.term_of (#concl focus)] 121 | |> map (subst_free schematics) 122 | |> (fn xs => chop (length xs - 1) xs) |> apsnd the_single 123 | end 124 | handle List.Empty => error "Lib.decompose_goal" 125 | 126 | fun rigid_typing_concl goal = 127 | let val concl = Logic.strip_assums_concl goal 128 | in is_typing concl andalso is_rigid (term_of_typing concl) end 129 | 130 | 131 | (** Theorems **) 132 | fun partition_judgments ths = 133 | let 134 | fun part [] facts conds eqs = (facts, conds, eqs) 135 | | part (th::ths) facts conds eqs = 136 | if is_typing (Thm.prop_of th) then 137 | part ths (th::facts) conds eqs 138 | else if is_typing (Thm.concl_of th) then 139 | part ths facts (th::conds) eqs 140 | else part ths facts conds (th::eqs) 141 | in part ths [] [] [] end 142 | 143 | 144 | (** Subterms **) 145 | 146 | fun has_subterm tms = 147 | Term.exists_subterm 148 | (foldl1 (op orf) (map (fn t => fn s => Term.aconv_untyped (s, t)) tms)) 149 | 150 | fun subterm_count s t = 151 | let 152 | fun count (t1 $ t2) i = i + count t1 0 + count t2 0 153 | | count (Abs (_, _, t)) i = i + count t 0 154 | | count t i = if Term.aconv_untyped (s, t) then i + 1 else i 155 | in 156 | count t 0 157 | end 158 | 159 | (*Number of distinct subterms in `tms` that appear in `tm`*) 160 | fun subterm_count_distinct tms tm = 161 | length (filter I (map (fn t => has_subterm [t] tm) tms)) 162 | 163 | (* 164 | "Folds" a function f over the term structure of t by traversing t from child 165 | nodes upwards through parents. At each node n in the term syntax tree, f is 166 | additionally passed a list of the results of f at all children of n. 167 | *) 168 | fun traverse_term f t = 169 | let 170 | fun map_aux (Abs (x, T, t)) = Abs (x, T, map_aux t) 171 | | map_aux t = 172 | let 173 | val (head, args) = Term.strip_comb t 174 | val args' = map map_aux args 175 | in 176 | f head args' 177 | end 178 | in 179 | map_aux t 180 | end 181 | 182 | fun collect_subterms f (t $ u) = collect_subterms f t @ collect_subterms f u 183 | | collect_subterms f (Abs (_, _, t)) = collect_subterms f t 184 | | collect_subterms f t = if f t then [t] else [] 185 | 186 | 187 | (** Orderings **) 188 | 189 | fun subterm_order t1 t2 = 190 | if has_subterm [t1] t2 then LESS 191 | else if has_subterm [t2] t1 then GREATER 192 | else EQUAL 193 | 194 | fun lvl_order t1 t2 = 195 | case fastype_of t1 of 196 | Type (\<^type_name>\lvl\, _) => (case fastype_of t2 of 197 | Type (\<^type_name>\lvl\, _) => EQUAL 198 | | Type (_, _) => LESS 199 | | _ => EQUAL) 200 | | Type (_, _) => (case fastype_of t2 of 201 | Type (\<^type_name>\lvl\, _) => GREATER 202 | | _ => EQUAL) 203 | | _ => EQUAL 204 | 205 | fun cond_order o1 o2 = case o1 of EQUAL => o2 | _ => o1 206 | 207 | 208 | end 209 | -------------------------------------------------------------------------------- /hott/Nat.thy: -------------------------------------------------------------------------------- 1 | theory Nat 2 | imports Identity 3 | 4 | begin 5 | 6 | axiomatization 7 | Nat :: \o\ and 8 | zero :: \o\ ("0") and 9 | suc :: \o \ o\ and 10 | NatInd :: \(o \ o) \ o \ (o \ o \ o) \ o \ o\ 11 | where 12 | NatF: "Nat: U i" and 13 | 14 | Nat_zero: "0: Nat" and 15 | 16 | Nat_suc: "n: Nat \ suc n: Nat" and 17 | 18 | NatE: "\ 19 | n: Nat; 20 | c\<^sub>0: C 0; 21 | \k rec. \k: Nat; rec: C k\ \ f k rec: C (suc k); 22 | \n. n: Nat \ C n: U i 23 | \ \ NatInd (fn n. C n) c\<^sub>0 (fn k rec. f k rec) n: C n" and 24 | 25 | Nat_comp_zero: "\ 26 | c\<^sub>0: C 0; 27 | \k rec. \k: Nat; rec: C k\ \ f k rec: C (suc k); 28 | \n. n: Nat \ C n: U i 29 | \ \ NatInd (fn n. C n) c\<^sub>0 (fn k rec. f k rec) 0 \ c\<^sub>0" and 30 | 31 | Nat_comp_suc: "\ 32 | n: Nat; 33 | c\<^sub>0: C 0; 34 | \k rec. \k: Nat; rec: C k\ \ f k rec: C (suc k); 35 | \n. n: Nat \ C n: U i 36 | \ \ 37 | NatInd (fn n. C n) c\<^sub>0 (fn k rec. f k rec) (suc n) \ 38 | f n (NatInd (fn n. C n) c\<^sub>0 (fn k rec. f k rec) n)" 39 | 40 | lemmas 41 | [form] = NatF and 42 | [intr, intro] = Nat_zero Nat_suc and 43 | [elim "?n"] = NatE and 44 | [comp] = Nat_comp_zero Nat_comp_suc 45 | 46 | abbreviation "NatRec C \ NatInd (fn _. C)" 47 | 48 | abbreviation one ("1") where "1 \ suc 0" 49 | abbreviation two ("2") where "2 \ suc 1" 50 | abbreviation three ("3") where "3 \ suc 2" 51 | abbreviation four ("4") where "4 \ suc 3" 52 | abbreviation five ("5") where "5 \ suc 4" 53 | abbreviation six ("6") where "6 \ suc 5" 54 | abbreviation seven ("7") where "7 \ suc 6" 55 | abbreviation eight ("8") where "8 \ suc 7" 56 | abbreviation nine ("9") where "9 \ suc 8" 57 | 58 | 59 | section \Basic arithmetic\ 60 | 61 | subsection \Addition\ 62 | 63 | definition add (infixl "+" 120) where "m + n \ NatRec Nat m (K suc) n" 64 | 65 | Lemma add_type [type]: 66 | assumes "m: Nat" "n: Nat" 67 | shows "m + n: Nat" 68 | unfolding add_def by typechk 69 | 70 | Lemma add_zero [comp]: 71 | assumes "m: Nat" 72 | shows "m + 0 \ m" 73 | unfolding add_def by compute 74 | 75 | Lemma add_suc [comp]: 76 | assumes "m: Nat" "n: Nat" 77 | shows "m + suc n \ suc (m + n)" 78 | unfolding add_def by compute 79 | 80 | Lemma (def) zero_add: 81 | assumes "n: Nat" 82 | shows "0 + n = n" 83 | apply (elim n) 84 | \<^item> by (compute; intro) 85 | \<^item> vars _ ih by compute (eq ih; refl) 86 | done 87 | 88 | Lemma (def) suc_add: 89 | assumes "m: Nat" "n: Nat" 90 | shows "suc m + n = suc (m + n)" 91 | apply (elim n) 92 | \<^item> by compute refl 93 | \<^item> vars _ ih by compute (eq ih; refl) 94 | done 95 | 96 | Lemma (def) suc_eq: 97 | assumes "m: Nat" "n: Nat" 98 | shows "p: m = n \ suc m = suc n" 99 | by (eq p) intro 100 | 101 | Lemma (def) add_assoc: 102 | assumes "l: Nat" "m: Nat" "n: Nat" 103 | shows "l + (m + n) = l + m+ n" 104 | apply (elim n) 105 | \<^item> by compute intro 106 | \<^item> vars _ ih by compute (eq ih; refl) 107 | done 108 | 109 | Lemma (def) add_comm: 110 | assumes "m: Nat" "n: Nat" 111 | shows "m + n = n + m" 112 | apply (elim n) 113 | \<^item> by (compute; rule zero_add[symmetric]) 114 | \<^item> vars n ih 115 | proof compute 116 | have "suc (m + n) = suc (n + m)" by (eq ih) refl 117 | also have ".. = suc n + m" by (rewr eq: suc_add) refl 118 | finally show "?" by this 119 | qed 120 | done 121 | 122 | subsection \Multiplication\ 123 | 124 | definition mul (infixl "*" 121) where "m * n \ NatRec Nat 0 (K $ add m) n" 125 | 126 | Lemma mul_type [type]: 127 | assumes "m: Nat" "n: Nat" 128 | shows "m * n: Nat" 129 | unfolding mul_def by typechk 130 | 131 | Lemma mul_zero [comp]: 132 | assumes "n: Nat" 133 | shows "n * 0 \ 0" 134 | unfolding mul_def by compute 135 | 136 | Lemma mul_one [comp]: 137 | assumes "n: Nat" 138 | shows "n * 1 \ n" 139 | unfolding mul_def by compute 140 | 141 | Lemma mul_suc [comp]: 142 | assumes "m: Nat" "n: Nat" 143 | shows "m * suc n \ m + m * n" 144 | unfolding mul_def by compute 145 | 146 | Lemma (def) zero_mul: 147 | assumes "n: Nat" 148 | shows "0 * n = 0" 149 | apply (elim n) 150 | \<^item> by compute refl 151 | \<^item> vars n ih 152 | proof compute 153 | have "0 + 0 * n = 0 + 0 " by (eq ih) refl 154 | also have ".. = 0" by compute refl 155 | finally show "?" by this 156 | qed 157 | done 158 | 159 | Lemma (def) suc_mul: 160 | assumes "m: Nat" "n: Nat" 161 | shows "suc m * n = m * n + n" 162 | apply (elim n) 163 | \<^item> by compute refl 164 | \<^item> vars n ih 165 | proof (compute, rewr eq: \ih:_\) 166 | have "suc m + (m * n + n) = suc (m + ?)" by (rule suc_add) 167 | also have ".. = suc (m + m * n + n)" by (rewr eq: add_assoc) refl 168 | finally show "?" by this 169 | qed 170 | done 171 | 172 | Lemma (def) mul_dist_add: 173 | assumes "l: Nat" "m: Nat" "n: Nat" 174 | shows "l * (m + n) = l * m + l * n" 175 | apply (elim n) 176 | \<^item> by compute refl 177 | \<^item> vars n ih 178 | proof compute 179 | have "l + l * (m + n) = l + (l * m + l * n)" by (eq ih) refl 180 | also have ".. = l + l * m + l * n" by (rule add_assoc) 181 | also have ".. = l * m + l + l * n" by (rewr eq: add_comm) refl 182 | also have ".. = l * m + (l + l * n)" by (rewr eq: add_assoc) refl 183 | finally show "?" by this 184 | qed 185 | done 186 | 187 | Lemma (def) mul_assoc: 188 | assumes "l: Nat" "m: Nat" "n: Nat" 189 | shows "l * (m * n) = l * m * n" 190 | apply (elim n) 191 | \<^item> by compute refl 192 | \<^item> vars n ih 193 | proof compute 194 | have "l * (m + m * n) = l * m + l * (m * n)" by (rule mul_dist_add) 195 | also have ".. = l * m + l * m * n" by (rewr eq: \ih:_\) refl 196 | finally show "?" by this 197 | qed 198 | done 199 | 200 | Lemma (def) mul_comm: 201 | assumes "m: Nat" "n: Nat" 202 | shows "m * n = n * m" 203 | apply (elim n) 204 | \<^item> by compute (rewr eq: zero_mul, refl) 205 | \<^item> vars n ih 206 | proof (compute, rule pathinv) 207 | have "suc n * m = n * m + m" by (rule suc_mul) 208 | also have ".. = m + m * n" 209 | by (rewr eq: \ih:_\, rewr eq: add_comm) refl 210 | finally show "?" by this 211 | qed 212 | done 213 | 214 | 215 | end 216 | -------------------------------------------------------------------------------- /mltt/core/tactics.ML: -------------------------------------------------------------------------------- 1 | (* Title: tactics.ML 2 | Author: Joshua Chen 3 | 4 | General tactics for dependent type theory. 5 | *) 6 | 7 | structure Tactics: 8 | sig 9 | 10 | val solve_side_conds: int Config.T 11 | val SIDE_CONDS: int -> context_tactic' -> thm list -> context_tactic' 12 | val rule_ctac: thm list -> context_tactic' 13 | val dest_ctac: int option -> thm list -> context_tactic' 14 | val intro_ctac: context_tactic' 15 | val elim_ctac: term list -> context_tactic' 16 | val cases_ctac: term -> context_tactic' 17 | 18 | end = struct 19 | 20 | 21 | (* Side conditions *) 22 | val solve_side_conds = Attrib.setup_config_int \<^binding>\solve_side_conds\ (K 2) 23 | 24 | fun SIDE_CONDS j ctac facts i (cst as (ctxt, st)) = cst |> 25 | (case Config.get ctxt solve_side_conds of 26 | 1 => (ctac CTHEN_ALL_NEW (CTRY o Types.known_ctac facts)) i 27 | | 2 => ctac i CTHEN CREPEAT_IN_RANGE (i + j) (Thm.nprems_of st - i) 28 | (CTRY o CREPEAT_ALL_NEW_FWD (Types.check_infer facts)) 29 | | _ => ctac i) 30 | 31 | 32 | (* rule, dest, intro *) 33 | 34 | local 35 | fun mk_rules _ ths [] = ths 36 | | mk_rules n ths ths' = 37 | let val ths'' = foldr1 (op @) 38 | (map 39 | (fn th => [rotate_prems n (th RS @{thm PiE})] handle THM _ => []) 40 | ths') 41 | in 42 | mk_rules n (ths @ ths') ths'' 43 | end 44 | in 45 | 46 | (*Resolves with given rules*) 47 | fun rule_ctac ths i (ctxt, st) = 48 | TACTIC_CONTEXT ctxt (resolve_tac ctxt (mk_rules 0 [] ths) i st) 49 | 50 | (*Attempts destruct-resolution with the n-th premise of the given rules*) 51 | fun dest_ctac opt_n ths i (ctxt, st) = 52 | TACTIC_CONTEXT ctxt (dresolve_tac ctxt 53 | (mk_rules (case opt_n of NONE => 0 | SOME 0 => 0 | SOME n => n-1) [] ths) 54 | i st) 55 | 56 | end 57 | 58 | (*Applies an appropriate introduction rule*) 59 | val intro_ctac = CONTEXT_TACTIC' (fn ctxt => SUBGOAL (fn (goal, i) => 60 | let val concl = Logic.strip_assums_concl goal in 61 | if Lib.is_typing concl andalso Lib.is_rigid (Lib.type_of_typing concl) 62 | then resolve_tac ctxt (Named_Theorems.get ctxt \<^named_theorems>\intro\) i 63 | else no_tac 64 | end)) 65 | 66 | 67 | (* Induction/elimination *) 68 | 69 | (*Pushes a known typing t:T into a \-type. 70 | This tactic is well-behaved only when t is sufficiently well specified 71 | (otherwise there might be multiple possible judgments t:T that unify, and 72 | which is chosen is undefined).*) 73 | fun internalize_fact_tac t = 74 | Subgoal.FOCUS_PARAMS (fn {context = ctxt, concl = raw_concl, ...} => 75 | let 76 | val concl = Logic.strip_assums_concl (Thm.term_of raw_concl) 77 | val C = Lib.type_of_typing concl 78 | val B = Thm.cterm_of ctxt (Lib.lambda_var t C) 79 | val a = Thm.cterm_of ctxt t 80 | (*The resolvent is PiE[where ?B=B and ?a=a]*) 81 | val resolvent = 82 | Drule.infer_instantiate' ctxt [NONE, NONE, SOME B, SOME a] @{thm PiE} 83 | in 84 | HEADGOAL (resolve_tac ctxt [resolvent]) 85 | (*Unify with the correct type T*) 86 | THEN SOMEGOAL (NO_CONTEXT_TACTIC ctxt o Types.known_ctac []) 87 | end) 88 | 89 | fun elim_core_tac tms types ctxt = 90 | let 91 | val rule_insts = map ((Elim.lookup_rule ctxt) o Term.head_of) types 92 | val rules = flat (map 93 | (fn rule_inst => case rule_inst of 94 | NONE => [] 95 | | SOME (rl, idxnames) => [Drule.infer_instantiate ctxt 96 | (idxnames ~~ map (Thm.cterm_of ctxt) tms) rl]) 97 | rule_insts) 98 | in 99 | resolve_tac ctxt rules 100 | THEN' RANGE (replicate (length tms) (NO_CONTEXT_TACTIC ctxt o Types.check_infer [])) 101 | end handle Option => K no_tac 102 | 103 | (*Premises that have already been pushed into the \-type*) 104 | structure Inserts = Proof_Data ( 105 | type T = term Item_Net.T 106 | val init = K (Item_Net.init Term.aconv_untyped single) 107 | ) 108 | 109 | fun elim_ctac tms = 110 | case tms of 111 | [] => CONTEXT_TACTIC' (fn ctxt => eresolve_tac ctxt (map #1 (Elim.rules ctxt))) 112 | | major :: _ => CONTEXT_SUBGOAL (fn (goal, _) => fn cst as (ctxt, st) => 113 | let 114 | val facts = map Thm.prop_of (Context_Facts.known ctxt) 115 | val prems = Logic.strip_assums_hyp goal 116 | val template = Lib.typing_of_term major 117 | val types = filter (fn th => Term.could_unify (template, th)) (facts @ prems) 118 | |> map Lib.type_of_typing 119 | in case types of 120 | [] => no_ctac cst 121 | | _ => 122 | let 123 | val inserts = facts @ prems 124 | |> filter Lib.is_typing 125 | |> map Lib.dest_typing 126 | |> filter_out (fn (t, _) => 127 | Term.aconv (t, major) orelse Item_Net.member (Inserts.get ctxt) t) 128 | |> map (fn (t, T) => ((t, T), Lib.subterm_count_distinct tms T)) 129 | |> filter (fn (_, i) => i > 0) 130 | (*`t1: T1` comes before `t2: T2` if T1 contains t2 as subterm. 131 | If they are incomparable, then order by decreasing 132 | `subterm_count_distinct tms T`*) 133 | |> sort (fn (((t1, _), i), ((_, T2), j)) => 134 | Lib.cond_order (Lib.subterm_order T2 t1) (int_ord (j, i))) 135 | |> map (#1 o #1) 136 | val record_inserts = Inserts.map (fold Item_Net.update inserts) 137 | val tac = 138 | (*Push premises having a subterm in `tms` into a \*) 139 | fold (fn t => fn tac => 140 | tac THEN HEADGOAL (internalize_fact_tac t ctxt)) 141 | inserts all_tac 142 | (*Apply elimination rule*) 143 | THEN HEADGOAL ( 144 | elim_core_tac tms types ctxt 145 | (*Pull pushed premises back out*) 146 | THEN_ALL_NEW (SUBGOAL (fn (_, i) => 147 | REPEAT_DETERM_N (length inserts) 148 | (resolve_tac ctxt @{thms PiI[rotated]} i)))) 149 | in 150 | TACTIC_CONTEXT (record_inserts ctxt) (tac st) 151 | end 152 | end) 153 | 154 | fun cases_ctac tm = 155 | let fun tac ctxt = 156 | SUBGOAL (fn (goal, i) => 157 | let 158 | val facts = Proof_Context.facts_of ctxt 159 | val prems = Logic.strip_assums_hyp goal 160 | val template = Lib.typing_of_term tm 161 | val types = 162 | map (Thm.prop_of o #1) (Facts.could_unify facts template) 163 | @ filter (fn prem => Term.could_unify (template, prem)) prems 164 | |> map Lib.type_of_typing 165 | val res = (case types of 166 | [typ] => Drule.infer_instantiate' ctxt [SOME (Thm.cterm_of ctxt tm)] 167 | (the (Case.lookup_rule ctxt (Term.head_of typ))) 168 | | [] => raise Option 169 | | _ => raise error (Syntax.string_of_term ctxt tm ^ "not uniquely typed")) 170 | handle Option => 171 | error ("No case rule known for " ^ (Syntax.string_of_term ctxt tm)) 172 | in 173 | resolve_tac ctxt [res] i 174 | end) 175 | in CONTEXT_TACTIC' tac end 176 | 177 | 178 | end 179 | 180 | open Tactics 181 | -------------------------------------------------------------------------------- /mltt/core/goals.ML: -------------------------------------------------------------------------------- 1 | (* Title: goals.ML 2 | Author: Joshua Chen 3 | 4 | Goal statements and proof term export. 5 | 6 | Modified from code contained in ~~/Pure/Isar/specification.ML. 7 | *) 8 | 9 | local 10 | 11 | val long_keyword = 12 | Parse_Spec.includes >> K "" || 13 | Parse_Spec.long_statement_keyword 14 | 15 | val long_statement = 16 | Scan.optional 17 | (Parse_Spec.opt_thm_name ":" --| Scan.ahead long_keyword) 18 | Binding.empty_atts 19 | -- Scan.optional Parse_Spec.includes [] 20 | -- Parse_Spec.long_statement >> 21 | (fn ((binding, includes), (elems, concl)) => 22 | (true, binding, includes, elems, concl)) 23 | 24 | val short_statement = 25 | Parse_Spec.statement -- Parse_Spec.if_statement -- Parse.for_fixes >> 26 | (fn ((shows, assumes), fixes) => 27 | (false, Binding.empty_atts, [], 28 | [Element.Fixes fixes, Element.Assumes assumes], Element.Shows shows) 29 | ) 30 | 31 | val where_statement = Scan.optional (Parse.$$$ "where" |-- Parse.!!! Parse_Spec.statement) [] 32 | 33 | val def_statement = 34 | Parse_Spec.statement -- where_statement >> 35 | (fn (shows, assumes) => 36 | (false, Binding.empty_atts, [], 37 | [Element.Fixes [], Element.Assumes assumes], Element.Shows shows) 38 | ) 39 | 40 | fun prep_statement prep_att prep_stmt raw_elems raw_stmt ctxt = 41 | let 42 | val (stmt, elems_ctxt) = prep_stmt raw_elems raw_stmt ctxt 43 | val prems = Assumption.local_prems_of elems_ctxt ctxt 44 | val stmt_ctxt = 45 | fold (fold (Proof_Context.augment o fst) o snd) stmt elems_ctxt 46 | in case raw_stmt 47 | of Element.Shows _ => 48 | let val stmt' = Attrib.map_specs (map prep_att) stmt 49 | in (([], prems, stmt', NONE), stmt_ctxt) end 50 | | Element.Obtains raw_obtains => 51 | let 52 | val asms_ctxt = stmt_ctxt 53 | |> fold (fn ((name, _), asm) => 54 | snd o Proof_Context.add_assms Assumption.assume_export 55 | [((name, [Context_Rules.intro_query NONE]), asm)]) stmt 56 | val that = Assumption.local_prems_of asms_ctxt stmt_ctxt 57 | val ([(_, that')], that_ctxt) = asms_ctxt 58 | |> Proof_Context.set_stmt true 59 | |> Proof_Context.note_thmss "" 60 | [((Binding.name Auto_Bind.thatN, []), [(that, [])])] 61 | ||> Proof_Context.restore_stmt asms_ctxt 62 | val stmt' = 63 | [(Binding.empty_atts, [(#2 (#1 (Obtain.obtain_thesis ctxt)), [])])] 64 | in 65 | ((Obtain.obtains_attribs raw_obtains, prems, stmt', SOME that'), 66 | that_ctxt) 67 | end 68 | end 69 | 70 | fun make_name_binding name suffix local_name = 71 | let val base_local_name = Long_Name.base_name local_name 72 | in Binding.qualified_name 73 | ((case base_local_name of "" => name | _ => base_local_name) ^ 74 | (case suffix 75 | of SOME s => "_" ^ s 76 | | NONE => "")) 77 | end 78 | 79 | fun define_proof_term name (local_name, [th]) lthy = 80 | let 81 | val (prems, concl) = 82 | (Logic.strip_assums_hyp (Thm.prop_of th), 83 | Logic.strip_assums_concl (Thm.prop_of th)) 84 | in 85 | if not (Lib.is_typing concl) then ([], lthy) 86 | else let 87 | val prems_vars = distinct Term.aconv ( 88 | flat (map (Lib.collect_subterms is_Var) prems)) 89 | 90 | val concl_vars = distinct Term.aconv ( 91 | Lib.collect_subterms is_Var (Lib.term_of_typing concl)) 92 | 93 | val params = sort (uncurry Lib.lvl_order) (inter Term.aconv concl_vars prems_vars) 94 | 95 | val prf_tm = fold_rev lambda params (Lib.term_of_typing concl) 96 | 97 | val levels = filter Lib.is_lvl (distinct Term.aconv ( 98 | Lib.collect_subterms is_Var prf_tm)) 99 | 100 | val prf_tm' = fold_rev lambda levels prf_tm 101 | 102 | val ((_, (_, raw_def)), lthy') = Local_Theory.define 103 | ((make_name_binding name NONE local_name, Mixfix.NoSyn), 104 | ((make_name_binding name (SOME "prf") local_name, []), prf_tm')) lthy 105 | 106 | val def = fold 107 | (fn th1 => fn th2 => Thm.combination th2 th1) 108 | (map (Thm.reflexive o Thm.cterm_of lthy) (params @ levels)) 109 | raw_def 110 | 111 | val ((_, def'), lthy'') = Local_Theory.note 112 | ((make_name_binding name (SOME "def") local_name, []), [def]) 113 | lthy' 114 | in 115 | (def', lthy'') 116 | end 117 | end 118 | | define_proof_term _ _ _ = error 119 | ("Can't generate proof terms for multiple facts in one statement") 120 | 121 | fun gen_schematic_theorem 122 | bundle_includes prep_att prep_stmt 123 | gen_prf_tm long kind defn 124 | before_qed after_qed 125 | (name, raw_atts) raw_includes raw_elems raw_concl 126 | do_print lthy 127 | = 128 | let 129 | val _ = Local_Theory.assert lthy 130 | val elems = raw_elems |> map (Element.map_ctxt_attrib (prep_att lthy)) 131 | val ((more_atts, prems, stmt, facts), goal_ctxt) = lthy 132 | |> bundle_includes raw_includes 133 | |> prep_statement (prep_att lthy) prep_stmt elems raw_concl 134 | val atts = more_atts @ map (prep_att lthy) raw_atts 135 | val pos = Position.thread_data () 136 | val prems_name = if long then Auto_Bind.assmsN else Auto_Bind.thatN 137 | 138 | fun gen_and_after_qed results goal_ctxt' = 139 | let 140 | val results' = burrow 141 | (map (Goal.norm_result lthy) o Proof_Context.export goal_ctxt' lthy) 142 | results 143 | 144 | val ((res, lthy'), substmts) = 145 | if forall (Binding.is_empty_atts o fst) stmt 146 | then ((map (pair "") results', lthy), false) 147 | else 148 | (Local_Theory.notes_kind kind 149 | (map2 (fn (b, _) => fn ths => (b, [(ths, [])])) stmt results') 150 | lthy, 151 | true) 152 | 153 | val ((name_def, defs), (res', lthy'')) = 154 | if gen_prf_tm 155 | then 156 | let 157 | val (prf_tm_defs, new_lthy) = fold 158 | (fn result => fn (defs, lthy) => 159 | apfst (fn new_defs => defs @ new_defs) 160 | (define_proof_term (Binding.name_of name) result lthy)) 161 | res 162 | ([], lthy') 163 | 164 | val res_folded = 165 | map (apsnd (map (Local_Defs.fold new_lthy prf_tm_defs))) res 166 | 167 | val name_def = 168 | make_name_binding (Binding.name_of name) (SOME "def") (#1 (hd res_folded)) 169 | 170 | val name_type = 171 | if defn then 172 | make_name_binding (Binding.name_of name) (SOME "type") (#1 (hd res_folded)) 173 | else name 174 | in 175 | ((name_def, prf_tm_defs), 176 | Local_Theory.notes_kind kind 177 | [((name_type, @{attributes [type]} @ atts), 178 | [(maps #2 res_folded, [])])] 179 | new_lthy) 180 | end 181 | else 182 | ((Binding.empty, []), 183 | Local_Theory.notes_kind kind 184 | [((name, atts), [(maps #2 res, [])])] 185 | lthy') 186 | 187 | (*Display theorems*) 188 | val _ = 189 | if defn then 190 | single (Proof_Display.print_results do_print pos lthy'' 191 | ((kind, Binding.name_of name_def), [("", defs)])) 192 | else if not long andalso not substmts then 193 | single (Proof_Display.print_results do_print pos lthy'' 194 | ((kind, Binding.name_of name), map (fn (_, ths) => ("", ths)) res')) 195 | else 196 | (if long then 197 | Proof_Display.print_results do_print pos lthy'' 198 | ((kind, Binding.name_of name), map (fn (_, ths) => ("", ths)) res') 199 | else (); 200 | if substmts then 201 | map (fn (name, ths) => 202 | Proof_Display.print_results do_print pos lthy'' 203 | ((kind, name), [("", ths)])) 204 | res 205 | else []) 206 | in 207 | after_qed results' lthy'' 208 | end 209 | in 210 | goal_ctxt 211 | |> not (null prems) ? 212 | (Proof_Context.note_thmss "" [((Binding.name prems_name, []), [(prems, [])])] 213 | #> snd #> Context_Facts.register_facts prems) 214 | |> Proof.theorem before_qed gen_and_after_qed (map snd stmt) 215 | |> (case facts of NONE => I | SOME ths => Proof.refine_insert ths) 216 | end 217 | 218 | val schematic_theorem_cmd = 219 | gen_schematic_theorem 220 | Bundle.includes_cmd 221 | Attrib.check_src 222 | Elaborated_Statement.read_goal_statement 223 | 224 | fun theorem spec descr = 225 | Outer_Syntax.local_theory_to_proof' spec ("state " ^ descr) 226 | (Scan.option (Args.parens (Args.$$$ "def")) 227 | -- (long_statement || short_statement) >> 228 | (fn (opt_derive, (long, binding, includes, elems, concl)) => 229 | schematic_theorem_cmd 230 | (case opt_derive of SOME "def" => true | _ => false) 231 | long descr false NONE (K I) binding includes elems concl)) 232 | 233 | fun definition spec descr = 234 | Outer_Syntax.local_theory_to_proof' spec "definition with explicit type checking obligation" 235 | (def_statement >> 236 | (fn (long, binding, includes, elems, concl) => 237 | schematic_theorem_cmd 238 | true long descr true NONE (K I) binding includes elems concl)) 239 | 240 | in 241 | 242 | val _ = theorem \<^command_keyword>\Theorem\ "Theorem" 243 | val _ = theorem \<^command_keyword>\Lemma\ "Lemma" 244 | val _ = theorem \<^command_keyword>\Corollary\ "Corollary" 245 | val _ = theorem \<^command_keyword>\Proposition\ "Proposition" 246 | val _ = definition \<^command_keyword>\Definition\ "Definition" 247 | 248 | end 249 | -------------------------------------------------------------------------------- /mltt/core/context_tactical.ML: -------------------------------------------------------------------------------- 1 | (* Title: context_tactical.ML 2 | Author: Joshua Chen 3 | 4 | More context tactics, and context tactic combinators. 5 | 6 | Contains code modified from 7 | ~~/Pure/search.ML 8 | ~~/Pure/tactical.ML 9 | *) 10 | 11 | infix 1 CTHEN CTHEN' CTHEN_ALL_NEW CTHEN_ALL_NEW_FWD 12 | infix 0 CORELSE CAPPEND CORELSE' CAPPEND' 13 | 14 | structure Context_Tactical: 15 | sig 16 | 17 | type context_tactic' = int -> context_tactic 18 | val CONTEXT_TACTIC': (Proof.context -> int -> tactic) -> context_tactic' 19 | val all_ctac: context_tactic 20 | val no_ctac: context_tactic 21 | val print_ctac: (Proof.context -> string) -> context_tactic 22 | val CTHEN: context_tactic * context_tactic -> context_tactic 23 | val CORELSE: context_tactic * context_tactic -> context_tactic 24 | val CAPPEND: context_tactic * context_tactic -> context_tactic 25 | val CTHEN': context_tactic' * context_tactic' -> context_tactic' 26 | val CORELSE': context_tactic' * context_tactic' -> context_tactic' 27 | val CAPPEND': context_tactic' * context_tactic' -> context_tactic' 28 | val CTRY: context_tactic -> context_tactic 29 | val CREPEAT: context_tactic -> context_tactic 30 | val CREPEAT1: context_tactic -> context_tactic 31 | val CREPEAT_N: int -> context_tactic -> context_tactic 32 | val CFILTER: (context_state -> bool) -> context_tactic -> context_tactic 33 | val CCHANGED: context_tactic -> context_tactic 34 | val CTHEN_ALL_NEW: context_tactic' * context_tactic' -> context_tactic' 35 | val CREPEAT_IN_RANGE: int -> int -> context_tactic' -> context_tactic 36 | val CREPEAT_ALL_NEW: context_tactic' -> context_tactic' 37 | val CTHEN_ALL_NEW_FWD: context_tactic' * context_tactic' -> context_tactic' 38 | val CREPEAT_ALL_NEW_FWD: context_tactic' -> context_tactic' 39 | val CHEADGOAL: context_tactic' -> context_tactic 40 | val CALLGOALS: context_tactic' -> context_tactic 41 | val CSOMEGOAL: context_tactic' -> context_tactic 42 | val CRANGE: context_tactic' list -> context_tactic' 43 | val CFIRST: context_tactic list -> context_tactic 44 | val CFIRST': context_tactic' list -> context_tactic' 45 | val CTHEN_BEST_FIRST: context_tactic -> (context_state -> bool) -> 46 | (context_state -> int) -> context_tactic -> context_tactic 47 | val CBEST_FIRST: (context_state -> bool) -> (context_state -> int) -> 48 | context_tactic -> context_tactic 49 | val CTHEN_ASTAR: context_tactic -> (context_state -> bool) -> 50 | (int -> context_state -> int) -> context_tactic -> context_tactic 51 | val CASTAR: (context_state -> bool) -> (int -> context_state -> int) -> 52 | context_tactic -> context_tactic 53 | 54 | end = struct 55 | 56 | type context_tactic' = int -> context_tactic 57 | 58 | fun CONTEXT_TACTIC' tac i (ctxt, st) = TACTIC_CONTEXT ctxt ((tac ctxt i) st) 59 | 60 | val all_ctac = Seq.make_results o Seq.single 61 | val no_ctac = K Seq.empty 62 | fun print_ctac f (ctxt, st) = CONTEXT_TACTIC (print_tac ctxt (f ctxt)) (ctxt, st) 63 | 64 | fun (ctac1 CTHEN ctac2) cst = Seq.maps_results ctac2 (ctac1 cst) 65 | 66 | fun (ctac1 CORELSE ctac2) cst = 67 | (case Seq.pull (ctac1 cst) of 68 | NONE => ctac2 cst 69 | | some => Seq.make (fn () => some)) 70 | 71 | fun (ctac1 CAPPEND ctac2) cst = 72 | Seq.append (ctac1 cst) (Seq.make (fn () => Seq.pull (ctac2 cst))) 73 | 74 | fun (ctac1 CTHEN' ctac2) x = ctac1 x CTHEN ctac2 x 75 | fun (ctac1 CORELSE' ctac2) x = ctac1 x CORELSE ctac2 x 76 | fun (ctac1 CAPPEND' ctac2) x = ctac1 x CAPPEND ctac2 x 77 | 78 | fun CTRY ctac = ctac CORELSE all_ctac 79 | 80 | fun CREPEAT ctac = 81 | let 82 | fun rep qs cst = 83 | (case Seq.pull (Seq.filter_results (ctac cst)) of 84 | NONE => SOME (cst, Seq.make (fn () => repq qs)) 85 | | SOME (cst', q) => rep (q :: qs) cst') 86 | and repq [] = NONE 87 | | repq (q :: qs) = 88 | (case Seq.pull q of 89 | NONE => repq qs 90 | | SOME (cst, q) => rep (q :: qs) cst); 91 | in fn cst => Seq.make_results (Seq.make (fn () => rep [] cst)) end 92 | 93 | fun CREPEAT1 ctac = ctac CTHEN CREPEAT ctac 94 | 95 | fun CREPEAT_N 0 _ = no_ctac 96 | | CREPEAT_N n ctac = ctac CTHEN CREPEAT_N (n - 1) ctac 97 | 98 | fun CFILTER pred ctac cst = 99 | ctac cst 100 | |> Seq.filter_results 101 | |> Seq.filter pred 102 | |> Seq.make_results 103 | 104 | (*Only accept next states where the subgoals have changed*) 105 | fun CCHANGED ctac (cst as (_, st)) = 106 | CFILTER (fn (_, st') => not (Thm.eq_thm (st, st'))) ctac cst 107 | 108 | local 109 | fun op THEN (f, g) x = Seq.maps_results g (f x) 110 | 111 | fun INTERVAL f i j x = 112 | if i > j then Seq.make_results (Seq.single x) 113 | else op THEN (f j, INTERVAL f i (j - 1)) x 114 | 115 | (*By Peter Lammich: apply tactic to subgoals in interval in a forward manner, 116 | skipping over emerging subgoals*) 117 | fun INTERVAL_FWD ctac l u (cst as (_, st)) = cst |> 118 | (if l > u then all_ctac 119 | else (ctac l CTHEN (fn cst' as (_, st') => 120 | let val ofs = Thm.nprems_of st' - Thm.nprems_of st in 121 | if ofs < ~1 122 | then raise THM ( 123 | "INTERVAL_FWD: tactic solved more than one goal", ~1, [st, st']) 124 | else INTERVAL_FWD ctac (l + 1 + ofs) (u + ofs) cst' 125 | end))) 126 | in 127 | 128 | fun (ctac1 CTHEN_ALL_NEW ctac2) i (cst as (_, st)) = 129 | cst |> (ctac1 i CTHEN (fn cst' as (_, st') => 130 | INTERVAL ctac2 i (i + Thm.nprems_of st' - Thm.nprems_of st) cst')) 131 | 132 | (*By Peter Lammich: apply ctac2 to all subgoals emerging from ctac1, in forward 133 | manner*) 134 | fun (ctac1 CTHEN_ALL_NEW_FWD ctac2) i (cst as (_, st)) = 135 | cst |> (ctac1 i CTHEN (fn cst' as (_, st') => 136 | INTERVAL_FWD ctac2 i (i + Thm.nprems_of st' - Thm.nprems_of st) cst')) 137 | 138 | (*Repeatedly apply ctac to the i-th until the k-th-from-last subgoals 139 | (i.e. leave the last k subgoals alone), until no more changes appear in the 140 | goal state.*) 141 | fun CREPEAT_IN_RANGE i k ctac = 142 | let fun interval_ctac (cst as (_, st)) = 143 | INTERVAL_FWD ctac i (Thm.nprems_of st - k) cst 144 | in CREPEAT (CCHANGED interval_ctac) end 145 | 146 | end 147 | 148 | fun CREPEAT_ALL_NEW ctac = 149 | ctac CTHEN_ALL_NEW (CTRY o (fn i => CREPEAT_ALL_NEW ctac i)) 150 | 151 | fun CREPEAT_ALL_NEW_FWD ctac = 152 | ctac CTHEN_ALL_NEW_FWD (CTRY o (fn i => CREPEAT_ALL_NEW_FWD ctac i)) 153 | 154 | fun CHEADGOAL ctac = ctac 1 155 | 156 | fun CALLGOALS ctac (cst as (_, st)) = 157 | let 158 | fun doall 0 = all_ctac 159 | | doall n = ctac n CTHEN doall (n - 1); 160 | in doall (Thm.nprems_of st) cst end 161 | 162 | fun CSOMEGOAL ctac (cst as (_, st)) = 163 | let 164 | fun find 0 = no_ctac 165 | | find n = ctac n CORELSE find (n - 1); 166 | in find (Thm.nprems_of st) cst end 167 | 168 | fun CRANGE [] _ = all_ctac 169 | | CRANGE (ctac :: ctacs) i = CRANGE ctacs (i + 1) CTHEN ctac i 170 | 171 | fun CFIRST ctacs = fold_rev (curry op CORELSE) ctacs no_ctac 172 | 173 | (*FIRST' [tac1,...,tacn] i equals tac1 i ORELSE ... ORELSE tacn i*) 174 | fun CFIRST' ctacs = fold_rev (curry op CORELSE') ctacs (K no_ctac) 175 | 176 | 177 | (** Search tacticals **) 178 | 179 | (* Best-first search *) 180 | 181 | structure Thm_Heap = Heap ( 182 | type elem = int * thm; 183 | val ord = prod_ord int_ord (Term_Ord.term_ord o apply2 Thm.prop_of) 184 | ) 185 | 186 | structure Context_State_Heap = Heap ( 187 | type elem = int * context_state; 188 | val ord = prod_ord int_ord (Term_Ord.term_ord o apply2 (Thm.prop_of o #2)) 189 | ) 190 | 191 | fun some_of_list [] = NONE 192 | | some_of_list (x :: l) = SOME (x, Seq.make (fn () => some_of_list l)) 193 | 194 | (*Check for and delete duplicate proof states*) 195 | fun delete_all_min (cst as (_, st)) heap = 196 | if Context_State_Heap.is_empty heap then heap 197 | else if Thm.eq_thm (st, #2 (#2 (Context_State_Heap.min heap))) 198 | then delete_all_min cst (Context_State_Heap.delete_min heap) 199 | else heap 200 | 201 | (*Best-first search for a state that satisfies satp (incl initial state) 202 | Function sizef estimates size of problem remaining (smaller means better). 203 | tactic tac0 sets up the initial priority queue, while tac1 searches it. *) 204 | fun CTHEN_BEST_FIRST ctac0 satp sizef ctac = 205 | let 206 | fun pairsize cst = (sizef cst, cst); 207 | fun bfs (news, nst_heap) = 208 | (case List.partition satp news of 209 | ([], nonsats) => next (fold_rev Context_State_Heap.insert (map pairsize nonsats) nst_heap) 210 | | (sats, _) => some_of_list sats) 211 | and next nst_heap = 212 | if Context_State_Heap.is_empty nst_heap then NONE 213 | else 214 | let 215 | val (n, cst) = Context_State_Heap.min nst_heap; 216 | in 217 | bfs (Seq.list_of (Seq.filter_results (ctac cst)), delete_all_min cst (Context_State_Heap.delete_min nst_heap)) 218 | end; 219 | fun btac cst = bfs (Seq.list_of (Seq.filter_results (ctac0 cst)), Context_State_Heap.empty) 220 | in fn cst => Seq.make_results (Seq.make (fn () => btac cst)) end 221 | 222 | (*Ordinary best-first search, with no initial tactic*) 223 | val CBEST_FIRST = CTHEN_BEST_FIRST all_ctac 224 | 225 | 226 | (* A*-like search *) 227 | 228 | (*Insertion into priority queue of states, marked with level*) 229 | fun insert_with_level (lnth: int * int * context_state) [] = [lnth] 230 | | insert_with_level (l, m, cst) ((l', n, cst') :: csts) = 231 | if n < m then (l', n, cst') :: insert_with_level (l, m, cst) csts 232 | else if n = m andalso Thm.eq_thm (#2 cst, #2 cst') then (l', n, cst') :: csts 233 | else (l, m, cst) :: (l', n, cst') :: csts; 234 | 235 | fun CTHEN_ASTAR ctac0 satp costf ctac = 236 | let 237 | fun bfs (news, nst, level) = 238 | let fun cost cst = (level, costf level cst, cst) in 239 | (case List.partition satp news of 240 | ([], nonsats) => next (fold_rev (insert_with_level o cost) nonsats nst) 241 | | (sats, _) => some_of_list sats) 242 | end 243 | and next [] = NONE 244 | | next ((level, n, cst) :: nst) = 245 | bfs (Seq.list_of (Seq.filter_results (ctac cst)), nst, level + 1) 246 | in fn cst => Seq.make_results 247 | (Seq.make (fn () => bfs (Seq.list_of (Seq.filter_results (ctac0 cst)), [], 0))) 248 | end 249 | 250 | (*Ordinary ASTAR, with no initial tactic*) 251 | val CASTAR = CTHEN_ASTAR all_ctac; 252 | 253 | 254 | end 255 | 256 | open Context_Tactical 257 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Isabelle/HoTT 2 | Copyright (c) 2018–2021 Joshua Chen 3 | 4 | All files are licensed under the terms of the 5 | GNU Lesser General Public License v3.0 reproduced below, 6 | WITH THE EXCEPTION OF the following files: 7 | mltt/core/comp.ML 8 | mltt/core/context_tactical.ML 9 | mltt/core/elaborated_statement.ML 10 | mltt/core/eqsubst.ML 11 | mltt/core/focus.ML 12 | mltt/core/goals.ML 13 | 14 | These have been modified from source code which is part of the official 15 | Isabelle distribution (https://isabelle.in.tum.de/) and licensed under the 16 | Isabelle license, also reproduced below. 17 | 18 | ================================================================================ 19 | GNU LESSER GENERAL PUBLIC LICENSE 20 | Version 3, 29 June 2007 21 | 22 | Copyright (C) 2007 Free Software Foundation, Inc. 23 | Everyone is permitted to copy and distribute verbatim copies 24 | of this license document, but changing it is not allowed. 25 | 26 | 27 | This version of the GNU Lesser General Public License incorporates 28 | the terms and conditions of version 3 of the GNU General Public 29 | License, supplemented by the additional permissions listed below. 30 | 31 | 0. Additional Definitions. 32 | 33 | As used herein, "this License" refers to version 3 of the GNU Lesser 34 | General Public License, and the "GNU GPL" refers to version 3 of the GNU 35 | General Public License. 36 | 37 | "The Library" refers to a covered work governed by this License, 38 | other than an Application or a Combined Work as defined below. 39 | 40 | An "Application" is any work that makes use of an interface provided 41 | by the Library, but which is not otherwise based on the Library. 42 | Defining a subclass of a class defined by the Library is deemed a mode 43 | of using an interface provided by the Library. 44 | 45 | A "Combined Work" is a work produced by combining or linking an 46 | Application with the Library. The particular version of the Library 47 | with which the Combined Work was made is also called the "Linked 48 | Version". 49 | 50 | The "Minimal Corresponding Source" for a Combined Work means the 51 | Corresponding Source for the Combined Work, excluding any source code 52 | for portions of the Combined Work that, considered in isolation, are 53 | based on the Application, and not on the Linked Version. 54 | 55 | The "Corresponding Application Code" for a Combined Work means the 56 | object code and/or source code for the Application, including any data 57 | and utility programs needed for reproducing the Combined Work from the 58 | Application, but excluding the System Libraries of the Combined Work. 59 | 60 | 1. Exception to Section 3 of the GNU GPL. 61 | 62 | You may convey a covered work under sections 3 and 4 of this License 63 | without being bound by section 3 of the GNU GPL. 64 | 65 | 2. Conveying Modified Versions. 66 | 67 | If you modify a copy of the Library, and, in your modifications, a 68 | facility refers to a function or data to be supplied by an Application 69 | that uses the facility (other than as an argument passed when the 70 | facility is invoked), then you may convey a copy of the modified 71 | version: 72 | 73 | a) under this License, provided that you make a good faith effort to 74 | ensure that, in the event an Application does not supply the 75 | function or data, the facility still operates, and performs 76 | whatever part of its purpose remains meaningful, or 77 | 78 | b) under the GNU GPL, with none of the additional permissions of 79 | this License applicable to that copy. 80 | 81 | 3. Object Code Incorporating Material from Library Header Files. 82 | 83 | The object code form of an Application may incorporate material from 84 | a header file that is part of the Library. You may convey such object 85 | code under terms of your choice, provided that, if the incorporated 86 | material is not limited to numerical parameters, data structure 87 | layouts and accessors, or small macros, inline functions and templates 88 | (ten or fewer lines in length), you do both of the following: 89 | 90 | a) Give prominent notice with each copy of the object code that the 91 | Library is used in it and that the Library and its use are 92 | covered by this License. 93 | 94 | b) Accompany the object code with a copy of the GNU GPL and this license 95 | document. 96 | 97 | 4. Combined Works. 98 | 99 | You may convey a Combined Work under terms of your choice that, 100 | taken together, effectively do not restrict modification of the 101 | portions of the Library contained in the Combined Work and reverse 102 | engineering for debugging such modifications, if you also do each of 103 | the following: 104 | 105 | a) Give prominent notice with each copy of the Combined Work that 106 | the Library is used in it and that the Library and its use are 107 | covered by this License. 108 | 109 | b) Accompany the Combined Work with a copy of the GNU GPL and this license 110 | document. 111 | 112 | c) For a Combined Work that displays copyright notices during 113 | execution, include the copyright notice for the Library among 114 | these notices, as well as a reference directing the user to the 115 | copies of the GNU GPL and this license document. 116 | 117 | d) Do one of the following: 118 | 119 | 0) Convey the Minimal Corresponding Source under the terms of this 120 | License, and the Corresponding Application Code in a form 121 | suitable for, and under terms that permit, the user to 122 | recombine or relink the Application with a modified version of 123 | the Linked Version to produce a modified Combined Work, in the 124 | manner specified by section 6 of the GNU GPL for conveying 125 | Corresponding Source. 126 | 127 | 1) Use a suitable shared library mechanism for linking with the 128 | Library. A suitable mechanism is one that (a) uses at run time 129 | a copy of the Library already present on the user's computer 130 | system, and (b) will operate properly with a modified version 131 | of the Library that is interface-compatible with the Linked 132 | Version. 133 | 134 | e) Provide Installation Information, but only if you would otherwise 135 | be required to provide such information under section 6 of the 136 | GNU GPL, and only to the extent that such information is 137 | necessary to install and execute a modified version of the 138 | Combined Work produced by recombining or relinking the 139 | Application with a modified version of the Linked Version. (If 140 | you use option 4d0, the Installation Information must accompany 141 | the Minimal Corresponding Source and Corresponding Application 142 | Code. If you use option 4d1, you must provide the Installation 143 | Information in the manner specified by section 6 of the GNU GPL 144 | for conveying Corresponding Source.) 145 | 146 | 5. Combined Libraries. 147 | 148 | You may place library facilities that are a work based on the 149 | Library side by side in a single library together with other library 150 | facilities that are not Applications and are not covered by this 151 | License, and convey such a combined library under terms of your 152 | choice, if you do both of the following: 153 | 154 | a) Accompany the combined library with a copy of the same work based 155 | on the Library, uncombined with any other library facilities, 156 | conveyed under the terms of this License. 157 | 158 | b) Give prominent notice with the combined library that part of it 159 | is a work based on the Library, and explaining where to find the 160 | accompanying uncombined form of the same work. 161 | 162 | 6. Revised Versions of the GNU Lesser General Public License. 163 | 164 | The Free Software Foundation may publish revised and/or new versions 165 | of the GNU Lesser General Public License from time to time. Such new 166 | versions will be similar in spirit to the present version, but may 167 | differ in detail to address new problems or concerns. 168 | 169 | Each version is given a distinguishing version number. If the 170 | Library as you received it specifies that a certain numbered version 171 | of the GNU Lesser General Public License "or any later version" 172 | applies to it, you have the option of following the terms and 173 | conditions either of that published version or of any later version 174 | published by the Free Software Foundation. If the Library as you 175 | received it does not specify a version number of the GNU Lesser 176 | General Public License, you may choose any version of the GNU Lesser 177 | General Public License ever published by the Free Software Foundation. 178 | 179 | If the Library as you received it specifies that a proxy can decide 180 | whether future versions of the GNU Lesser General Public License shall 181 | apply, that proxy's public statement of acceptance of any version is 182 | permanent authorization for you to choose that version for the 183 | Library. 184 | 185 | ================================================================================ 186 | ISABELLE COPYRIGHT NOTICE, LICENCE AND DISCLAIMER. 187 | 188 | Copyright (c) 1986-2019, 189 | University of Cambridge, 190 | Technische Universitaet Muenchen, 191 | and contributors. 192 | 193 | All rights reserved. 194 | 195 | Redistribution and use in source and binary forms, with or without 196 | modification, are permitted provided that the following conditions are 197 | met: 198 | 199 | * Redistributions of source code must retain the above copyright 200 | notice, this list of conditions and the following disclaimer. 201 | 202 | * Redistributions in binary form must reproduce the above copyright 203 | notice, this list of conditions and the following disclaimer in the 204 | documentation and/or other materials provided with the distribution. 205 | 206 | * Neither the name of the University of Cambridge or the Technische 207 | Universitaet Muenchen nor the names of their contributors may be used 208 | to endorse or promote products derived from this software without 209 | specific prior written permission. 210 | 211 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS 212 | IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 213 | TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A 214 | PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 215 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 216 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 217 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 218 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 219 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 220 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 221 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 222 | 223 | -------------------------------------------------------------------------------- /hott/Equivalence.thy: -------------------------------------------------------------------------------- 1 | theory Equivalence 2 | imports Identity 3 | 4 | begin 5 | 6 | section \Homotopy\ 7 | 8 | definition "homotopy A B f g \ \x: A. f `x =\<^bsub>B x\<^esub> g `x" 9 | 10 | definition homotopy_i (infix "~" 100) 11 | where [implicit]: "f ~ g \ homotopy {} {} f g" 12 | 13 | translations "f ~ g" \ "CONST homotopy A B f g" 14 | 15 | Lemma homotopy_type [type]: 16 | assumes 17 | "A: U i" 18 | "\x. x: A \ B x: U i" 19 | "f: \x: A. B x" 20 | "g: \x: A. B x" 21 | shows "f ~ g: U i" 22 | unfolding homotopy_def 23 | by typechk 24 | 25 | Lemma apply_homotopy: 26 | assumes 27 | "A: U i" "\x. x: A \ B x: U i" 28 | "f: \x: A. B x" "g: \x: A. B x" 29 | "H: f ~ g" 30 | "x: A" 31 | shows "H x: f x = g x" 32 | using \H:_\ unfolding homotopy_def 33 | by typechk 34 | 35 | method htpy for H::o = rule apply_homotopy[where ?H=H] 36 | 37 | Lemma (def) homotopy_refl [refl]: 38 | assumes 39 | "A: U i" 40 | "f: \x: A. B x" 41 | shows "f ~ f" 42 | unfolding homotopy_def 43 | by intros fact 44 | 45 | Lemma (def) hsym: 46 | assumes 47 | "A: U i" 48 | "\x. x: A \ B x: U i" 49 | "f: \x: A. B x" 50 | "g: \x: A. B x" 51 | "H: f ~ g" 52 | shows "g ~ f" 53 | unfolding homotopy_def 54 | proof intro 55 | fix x assuming "x: A" then have "f x = g x" 56 | by (htpy H) 57 | thus "g x = f x" 58 | by (rule pathinv) fact 59 | qed 60 | 61 | Lemma (def) htrans: 62 | assumes 63 | "A: U i" 64 | "\x. x: A \ B x: U i" 65 | "f: \x: A. B x" 66 | "g: \x: A. B x" 67 | "h: \x: A. B x" 68 | "H1: f ~ g" 69 | "H2: g ~ h" 70 | shows "f ~ h" 71 | unfolding homotopy_def 72 | proof intro 73 | fix x assuming "x: A" 74 | have *: "f x = g x" "g x = h x" 75 | by (htpy H1, htpy H2) 76 | show "f x = h x" 77 | by (rule pathcomp; (rule *)?) typechk 78 | qed 79 | 80 | section \Rewriting homotopies\ 81 | 82 | calc "f ~ g" rhs g 83 | 84 | lemmas 85 | homotopy_sym [sym] = hsym[rotated 4] and 86 | homotopy_trans [trans] = htrans[rotated 5] 87 | 88 | Lemma id_funcomp_htpy: 89 | assumes "A: U i" "B: U i" "f: A \ B" 90 | shows "homotopy_refl A f: (id B) \ f ~ f" 91 | by compute 92 | 93 | Lemma funcomp_id_htpy: 94 | assumes "A: U i" "B: U i" "f: A \ B" 95 | shows "homotopy_refl A f: f \ (id A) ~ f" 96 | by compute 97 | 98 | Lemma funcomp_left_htpy: 99 | assumes 100 | "A: U i" "B: U i" 101 | "\x. x: B \ C x: U i" 102 | "f: A \ B" 103 | "g: \x: B. C x" 104 | "g': \x: B. C x" 105 | "H: g ~ g'" 106 | shows "(g \ f) ~ (g' \ f)" 107 | unfolding homotopy_def 108 | apply (intro, compute) 109 | apply (htpy H) 110 | done 111 | 112 | Lemma funcomp_right_htpy: 113 | assumes 114 | "A: U i" "B: U i" "C: U i" 115 | "f: A \ B" 116 | "f': A \ B" 117 | "g: B \ C" 118 | "H: f ~ f'" 119 | shows "(g \ f) ~ (g \ f')" 120 | unfolding homotopy_def 121 | proof (intro, compute) 122 | fix x assuming "x: A" 123 | have *: "f x = f' x" 124 | by (htpy H) 125 | show "g (f x) = g (f' x)" 126 | by (rewr eq: *) refl 127 | qed 128 | 129 | method lhtpy = rule funcomp_left_htpy[rotated 6] 130 | method rhtpy = rule funcomp_right_htpy[rotated 6] 131 | 132 | Lemma (def) commute_homotopy: 133 | assumes 134 | "A: U i" "B: U i" 135 | "x: A" "y: A" 136 | "p: x = y" 137 | "f: A \ B" "g: A \ B" 138 | "H: f ~ g" 139 | shows "(H x) \ g[p] = f[p] \ (H y)" 140 | using \H:_\ 141 | unfolding homotopy_def 142 | apply (eq p, compute) 143 | apply (rewr eq: pathcomp_refl, rewr eq: refl_pathcomp) 144 | by refl 145 | 146 | Corollary (def) commute_homotopy': 147 | assumes 148 | "A: U i" 149 | "x: A" 150 | "f: A \ A" 151 | "H: f ~ (id A)" 152 | shows "H (f x) = f [H x]" 153 | proof - 154 | (*FUTURE: Because we don't have very good normalization integrated into 155 | things yet, we need to manually unfold the type of H.*) 156 | from \H: f ~ id A\ have [type]: "H: \x: A. f x = x" 157 | by (compute add: homotopy_def) 158 | 159 | have "H (f x) \ H x = H (f x) \ (id A)[H x]" 160 | by (rule left_whisker, rewr eq: ap_id, refl) 161 | also have [simplified id_comp]: "H (f x) \ (id A)[H x] = f[H x] \ H x" 162 | by (rule commute_homotopy) 163 | finally have "?" by this 164 | 165 | thus "H (f x) = f [H x]" by pathcomp_cancelr (fact, typechk+) 166 | qed 167 | 168 | 169 | section \Quasi-inverse and bi-invertibility\ 170 | 171 | subsection \Quasi-inverses\ 172 | 173 | definition "is_qinv A B f \ \g: B \ A. 174 | homotopy A (fn _. A) (g \\<^bsub>A\<^esub> f) (id A) \ homotopy B (fn _. B) (f \\<^bsub>B\<^esub> g) (id B)" 175 | 176 | Lemma is_qinv_type [type]: 177 | assumes "A: U i" "B: U i" "f: A \ B" 178 | shows "is_qinv A B f: U i" 179 | unfolding is_qinv_def 180 | by typechk 181 | 182 | definition is_qinv_i ("is'_qinv") 183 | where [implicit]: "is_qinv f \ Equivalence.is_qinv {} {} f" 184 | 185 | no_translations "is_qinv f" \ "CONST Equivalence.is_qinv A B f" 186 | 187 | Lemma (def) id_is_qinv: 188 | assumes "A: U i" 189 | shows "is_qinv (id A)" 190 | unfolding is_qinv_def 191 | proof intro 192 | show "id A: A \ A" by typechk 193 | qed (compute, intro; refl) 194 | 195 | Lemma is_qinvI: 196 | assumes 197 | "A: U i" "B: U i" "f: A \ B" 198 | "g: B \ A" 199 | "H1: g \ f ~ id A" 200 | "H2: f \ g ~ id B" 201 | shows "is_qinv f" 202 | unfolding is_qinv_def 203 | proof intro 204 | show "g: B \ A" by fact 205 | show "g \ f ~ id A \ f \ g ~ id B" by (intro; fact) 206 | qed 207 | 208 | Lemma is_qinv_components [type]: 209 | assumes "A: U i" "B: U i" "f: A \ B" "pf: is_qinv f" 210 | shows 211 | qinv_of_is_qinv: "fst pf: B \ A" and 212 | ret_of_is_qinv: "p\<^sub>2\<^sub>1 pf: fst pf \ f ~ id A" and 213 | sec_of_is_qinv: "p\<^sub>2\<^sub>2 pf: f \ fst pf ~ id B" 214 | using assms unfolding is_qinv_def 215 | by typechk+ 216 | 217 | Lemma (def) qinv_is_qinv: 218 | assumes "A: U i" "B: U i" "f: A \ B" "pf: is_qinv f" 219 | shows "is_qinv (fst pf)" 220 | using \pf:_\[unfolded is_qinv_def] \ \Should be unfolded by the typechecker\ 221 | apply (rule is_qinvI) 222 | \<^item> by (fact \f:_\) 223 | \<^item> by (rule sec_of_is_qinv) 224 | \<^item> by (rule ret_of_is_qinv) 225 | done 226 | 227 | Lemma (def) funcomp_is_qinv: 228 | assumes 229 | "A: U i" "B: U i" "C: U i" 230 | "f: A \ B" "g: B \ C" 231 | shows "is_qinv f \ is_qinv g \ is_qinv (g \ f)" 232 | apply intros 233 | unfolding is_qinv_def apply elims 234 | focus vars _ _ finv _ ginv 235 | apply intro 236 | \<^item> by (rule funcompI[where ?f=ginv and ?g=finv]) 237 | \<^item> proof intro 238 | show "(finv \ ginv) \ g \ f ~ id A" 239 | proof - 240 | have "(finv \ ginv) \ g \ f ~ finv \ (ginv \ g) \ f" by compute refl 241 | also have ".. ~ finv \ id B \ f" by (rhtpy, lhtpy) fact 242 | also have ".. ~ id A" by compute fact 243 | finally show "?" by this 244 | qed 245 | 246 | show "(g \ f) \ finv \ ginv ~ id C" 247 | proof - 248 | have "(g \ f) \ finv \ ginv ~ g \ (f \ finv) \ ginv" by compute refl 249 | also have ".. ~ g \ id B \ ginv" by (rhtpy, lhtpy) fact 250 | also have ".. ~ id C" by compute fact 251 | finally show "?" by this 252 | qed 253 | qed 254 | done 255 | done 256 | 257 | subsection \Bi-invertible maps\ 258 | 259 | definition "is_biinv A B f \ 260 | (\g: B \ A. homotopy A (fn _. A) (g \\<^bsub>A\<^esub> f) (id A)) 261 | \ (\g: B \ A. homotopy B (fn _. B) (f \\<^bsub>B\<^esub> g) (id B))" 262 | 263 | Lemma is_biinv_type [type]: 264 | assumes "A: U i" "B: U i" "f: A \ B" 265 | shows "is_biinv A B f: U i" 266 | unfolding is_biinv_def by typechk 267 | 268 | definition is_biinv_i ("is'_biinv") 269 | where [implicit]: "is_biinv f \ Equivalence.is_biinv {} {} f" 270 | 271 | translations "is_biinv f" \ "CONST Equivalence.is_biinv A B f" 272 | 273 | Lemma is_biinvI: 274 | assumes 275 | "A: U i" "B: U i" "f: A \ B" 276 | "g: B \ A" "h: B \ A" 277 | "H1: g \ f ~ id A" "H2: f \ h ~ id B" 278 | shows "is_biinv f" 279 | unfolding is_biinv_def 280 | proof intro 281 | show ": \g: B \ A. g \ f ~ id A" by typechk 282 | show ": \g: B \ A. f \ g ~ id B" by typechk 283 | qed 284 | 285 | Lemma is_biinv_components [type]: 286 | assumes "A: U i" "B: U i" "f: A \ B" "pf: is_biinv f" 287 | shows 288 | section_of_is_biinv: "p\<^sub>1\<^sub>1 pf: B \ A" and 289 | retraction_of_is_biinv: "p\<^sub>2\<^sub>1 pf: B \ A" and 290 | ret_of_is_biinv: "p\<^sub>1\<^sub>2 pf: p\<^sub>1\<^sub>1 pf \ f ~ id A" and 291 | sec_of_is_biinv: "p\<^sub>2\<^sub>2 pf: f \ p\<^sub>2\<^sub>1 pf ~ id B" 292 | using assms unfolding is_biinv_def 293 | by typechk+ 294 | 295 | Lemma (def) is_biinv_if_is_qinv: 296 | assumes "A: U i" "B: U i" "f: A \ B" 297 | shows "is_qinv f \ is_biinv f" 298 | apply intros 299 | unfolding is_qinv_def is_biinv_def 300 | by (rule distribute_Sig) 301 | 302 | Lemma (def) is_qinv_if_is_biinv: 303 | assumes "A: U i" "B: U i" "f: A \ B" 304 | shows "is_biinv f \ is_qinv f" 305 | apply intro 306 | unfolding is_biinv_def apply elims 307 | focus vars _ _ _ g H1 h H2 308 | apply (rule is_qinvI) 309 | \<^item> by (fact \g: _\) 310 | \<^item> by (fact \H1: _\) 311 | \<^item> proof - 312 | have "g ~ g \ (id B)" by compute refl 313 | also have ".. ~ g \ f \ h" by rhtpy (rule \H2:_\[symmetric]) 314 | also have ".. ~ (id A) \ h" by (comp funcomp_assoc[symmetric]) (lhtpy, fact) 315 | also have ".. ~ h" by compute refl 316 | finally have "g ~ h" by this 317 | then have "f \ g ~ f \ h" by (rhtpy, this) 318 | also note \H2:_\ 319 | finally show "f \ g ~ id B" by this 320 | qed 321 | done 322 | done 323 | 324 | Lemma (def) id_is_biinv: 325 | "A: U i \ is_biinv (id A)" 326 | by (rule is_biinv_if_is_qinv) (rule id_is_qinv) 327 | 328 | Lemma (def) funcomp_is_biinv: 329 | assumes 330 | "A: U i" "B: U i" "C: U i" 331 | "f: A \ B" "g: B \ C" 332 | shows "is_biinv f \ is_biinv g \ is_biinv (g \ f)" 333 | apply intros 334 | focus vars pf pg 335 | by (rule is_biinv_if_is_qinv) 336 | (rule funcomp_is_qinv; rule is_qinv_if_is_biinv, fact) 337 | done 338 | 339 | 340 | section \Equivalence\ 341 | 342 | text \ 343 | Following the HoTT book, we first define equivalence in terms of 344 | bi-invertibility. 345 | \ 346 | 347 | definition equivalence (infix "\" 110) 348 | where "A \ B \ \f: A \ B. Equivalence.is_biinv A B f" 349 | 350 | Lemma equivalence_type [type]: 351 | assumes "A: U i" "B: U i" 352 | shows "A \ B: U i" 353 | unfolding equivalence_def by typechk 354 | 355 | Lemma (def) equivalence_refl: 356 | assumes "A: U i" 357 | shows "A \ A" 358 | unfolding equivalence_def 359 | proof intro 360 | show "is_biinv (id A)" by (rule is_biinv_if_is_qinv) (rule id_is_qinv) 361 | qed typechk 362 | 363 | Lemma (def) equivalence_symmetric: 364 | assumes "A: U i" "B: U i" 365 | shows "A \ B \ B \ A" 366 | apply intros 367 | unfolding equivalence_def 368 | apply elim 369 | apply (dest (4) is_qinv_if_is_biinv) 370 | apply intro 371 | \<^item> by (rule qinv_of_is_qinv) facts 372 | \<^item> by (rule is_biinv_if_is_qinv) (rule qinv_is_qinv) 373 | done 374 | 375 | Lemma (def) equivalence_transitive: 376 | assumes "A: U i" "B: U i" "C: U i" 377 | shows "A \ B \ B \ C \ A \ C" 378 | proof intros 379 | fix AB BC assume *: "AB: A \ B" "BC: B \ C" 380 | then have 381 | "fst AB: A \ B" and 1: "snd AB: is_biinv (fst AB)" 382 | "fst BC: B \ C" and 2: "snd BC: is_biinv (fst BC)" 383 | unfolding equivalence_def by typechk+ 384 | then have "fst BC \ fst AB: A \ C" by typechk 385 | moreover have "is_biinv (fst BC \ fst AB)" 386 | using * unfolding equivalence_def by (rule funcomp_is_biinv 1 2) facts 387 | ultimately show "A \ C" 388 | unfolding equivalence_def by intro facts 389 | qed 390 | 391 | text \ 392 | Equal types are equivalent. We give two proofs: the first by induction, and 393 | the second by following the HoTT book and showing that transport is an 394 | equivalence. 395 | \ 396 | 397 | Lemma 398 | assumes "A: U i" "B: U i" "p: A =\<^bsub>U i\<^esub> B" 399 | shows "A \ B" 400 | by (eq p) (rule equivalence_refl) 401 | 402 | text \ 403 | The following proof is wordy because (1) typechecker normalization is still 404 | rudimentary, and (2) we don't yet have universe level inference. 405 | \ 406 | 407 | Lemma (def) equiv_if_equal: 408 | notes Ui_in_USi [form] 409 | assumes 410 | "A: U i" "B: U i" "p: A = B" 411 | shows ": A \ B" 412 | unfolding equivalence_def 413 | apply intro defer 414 | \<^item> apply (eq p) 415 | \<^enum> vars A B 416 | apply (comp at A in "A \ B" id_comp[symmetric]) 417 | using [[solve_side_conds=1]] 418 | apply (comp at B in "_ \ B" id_comp[symmetric]) 419 | using Ui_in_USi by (rule transport, rule lift_universe_codomain) 420 | \<^enum> vars A 421 | apply (comp transport_comp) 422 | \ by (rule U_lift) 423 | \ by compute (rule id_is_biinv) 424 | done 425 | done 426 | 427 | \<^item> \ \Similar proof as in the first subgoal above\ 428 | apply (comp at A in "A \ B" id_comp[symmetric]) 429 | using [[solve_side_conds=1]] 430 | apply (comp at B in "_ \ B" id_comp[symmetric]) 431 | using Ui_in_USi by (rule transport, rule lift_universe_codomain) 432 | done 433 | 434 | Definition idtoeqv: ":= MLTT.fst (A \ B) is_biinv (equiv_if_equal i A B p)" 435 | where "A: U i" "B: U i" "p: A =\<^bsub>U i\<^esub> B" 436 | using equiv_if_equal unfolding equivalence_def 437 | by typechk 438 | 439 | definition idtoeqv_i ("idtoeqv") 440 | where [implicit]: "idtoeqv p \ Equivalence.idtoeqv {} {} {} p" 441 | 442 | 443 | end 444 | -------------------------------------------------------------------------------- /hott/Equivalence2.thy: -------------------------------------------------------------------------------- 1 | (*This is a rewrite of Equivalence.thy using the new Definition mechanism to abstract 2 | sections and retractions.*) 3 | 4 | theory Equivalence2 5 | imports Identity 6 | 7 | begin 8 | 9 | section \Homotopy\ 10 | 11 | Definition homotopy: ":= \x: A. f x = g x" 12 | where "A: U i" "\x. x: A \ B x: U i" "f: \x: A. B x" "g: \x: A. B x" 13 | by typechk 14 | 15 | definition homotopy_i (infix "~" 100) 16 | where [implicit]: "f ~ g \ homotopy {} {} f g" 17 | 18 | translations "f ~ g" \ "CONST homotopy A B f g" 19 | 20 | Lemma apply_homotopy: 21 | assumes 22 | "A: U i" "\x. x: A \ B x: U i" 23 | "f: \x: A. B x" "g: \x: A. B x" 24 | "H: f ~ g" 25 | "x: A" 26 | shows "H x: f x = g x" 27 | using \H:_\ unfolding homotopy_def 28 | by typechk 29 | 30 | method htpy for H::o = rule apply_homotopy[where ?H=H] 31 | 32 | Lemma (def) homotopy_refl [refl]: 33 | assumes 34 | "A: U i" 35 | "f: \x: A. B x" 36 | shows "f ~ f" 37 | unfolding homotopy_def 38 | by intros fact 39 | 40 | Lemma (def) hsym: 41 | assumes 42 | "A: U i" 43 | "\x. x: A \ B x: U i" 44 | "f: \x: A. B x" 45 | "g: \x: A. B x" 46 | "H: f ~ g" 47 | shows "g ~ f" 48 | unfolding homotopy_def 49 | proof intro 50 | fix x assuming "x: A" then have "f x = g x" 51 | by (htpy H) 52 | thus "g x = f x" 53 | by (rule pathinv) fact 54 | qed 55 | 56 | Lemma (def) htrans: 57 | assumes 58 | "A: U i" 59 | "\x. x: A \ B x: U i" 60 | "f: \x: A. B x" 61 | "g: \x: A. B x" 62 | "h: \x: A. B x" 63 | "H1: f ~ g" 64 | "H2: g ~ h" 65 | shows "f ~ h" 66 | unfolding homotopy_def 67 | proof intro 68 | fix x assuming "x: A" 69 | have *: "f x = g x" "g x = h x" 70 | by (htpy H1, htpy H2) 71 | show "f x = h x" 72 | by (rule pathcomp; (rule *)?) typechk 73 | qed 74 | 75 | section \Rewriting homotopies\ 76 | 77 | calc "f ~ g" rhs g 78 | 79 | lemmas 80 | homotopy_sym [sym] = hsym[rotated 4] and 81 | homotopy_trans [trans] = htrans[rotated 5] 82 | 83 | Lemma id_funcomp_htpy: 84 | assumes "A: U i" "B: U i" "f: A \ B" 85 | shows "homotopy_refl A f: (id B) \ f ~ f" 86 | by compute 87 | 88 | Lemma funcomp_id_htpy: 89 | assumes "A: U i" "B: U i" "f: A \ B" 90 | shows "homotopy_refl A f: f \ (id A) ~ f" 91 | by compute 92 | 93 | Lemma funcomp_left_htpy: 94 | assumes 95 | "A: U i" "B: U i" 96 | "\x. x: B \ C x: U i" 97 | "f: A \ B" 98 | "g: \x: B. C x" 99 | "g': \x: B. C x" 100 | "H: g ~ g'" 101 | shows "(g \ f) ~ (g' \ f)" 102 | unfolding homotopy_def 103 | apply (intro, compute) 104 | apply (htpy H) 105 | done 106 | 107 | Lemma funcomp_right_htpy: 108 | assumes 109 | "A: U i" "B: U i" "C: U i" 110 | "f: A \ B" 111 | "f': A \ B" 112 | "g: B \ C" 113 | "H: f ~ f'" 114 | shows "(g \ f) ~ (g \ f')" 115 | unfolding homotopy_def 116 | proof (intro, compute) 117 | fix x assuming "x: A" 118 | have *: "f x = f' x" 119 | by (htpy H) 120 | show "g (f x) = g (f' x)" 121 | by (rewr eq: *) refl 122 | qed 123 | 124 | method lhtpy = rule funcomp_left_htpy[rotated 6] 125 | method rhtpy = rule funcomp_right_htpy[rotated 6] 126 | 127 | Lemma (def) commute_homotopy: 128 | assumes 129 | "A: U i" "B: U i" 130 | "x: A" "y: A" 131 | "p: x = y" 132 | "f: A \ B" "g: A \ B" 133 | "H: f ~ g" 134 | shows "(H x) \ g[p] = f[p] \ (H y)" 135 | using \H:_\ 136 | unfolding homotopy_def 137 | apply (eq p, compute) 138 | apply (rewr eq: pathcomp_refl, rewr eq: refl_pathcomp) 139 | by refl 140 | 141 | Corollary (def) commute_homotopy': 142 | assumes 143 | "A: U i" 144 | "x: A" 145 | "f: A \ A" 146 | "H: f ~ (id A)" 147 | shows "H (f x) = f [H x]" 148 | proof - 149 | (*FUTURE: Because we don't have very good normalization integrated into 150 | things yet, we need to manually unfold the type of H.*) 151 | from \H: f ~ id A\ have [type]: "H: \x: A. f x = x" 152 | by (compute add: homotopy_def) 153 | 154 | have "H (f x) \ H x = H (f x) \ (id A)[H x]" 155 | by (rule left_whisker, rewr eq: ap_id, refl) 156 | also have [simplified id_comp]: "H (f x) \ (id A)[H x] = f[H x] \ H x" 157 | by (rule commute_homotopy) 158 | finally have "?" by this 159 | 160 | thus "H (f x) = f [H x]" by pathcomp_cancelr (fact, typechk+) 161 | qed 162 | 163 | 164 | section \Quasi-inverse and bi-invertibility\ 165 | 166 | subsection \Sections and retractions\ 167 | 168 | Definition retraction: ":= g \ f ~ id A" 169 | where "A: U i" "B: U i" "f: A \ B" "g: B \ A" 170 | by typechk 171 | 172 | Definition "section": ":= f \ g ~ id B" 173 | where "A: U i" "B: U i" "f: A \ B" "g: B \ A" 174 | by typechk 175 | 176 | definition retraction_i ("retraction") 177 | where [implicit]: "retraction f g \ Equivalence.retraction {} f g" 178 | 179 | definition section_i ("section") 180 | where [implicit]: "section f g \ Equivalence.section {} f g" 181 | 182 | Lemma (def) id_is_retraction: 183 | assumes "A: U i" 184 | shows "retraction (id A) (id A)" 185 | unfolding retraction_def 186 | by compute refl 187 | 188 | Lemma (def) id_is_section: 189 | assumes "A: U i" 190 | shows "section (id A) (id A)" 191 | unfolding section_def 192 | by compute refl 193 | 194 | Lemma 195 | assumes "A: U i" "B: U i" "f: A \ B" "g: B \ A" 196 | shows 197 | section_of_retraction: "h: retraction f g \ h: section g f" and 198 | retraction_of_section: "h: section f g \ h: retraction g f" 199 | unfolding section_def retraction_def . 200 | 201 | subsection \Quasi-inverses\ 202 | 203 | Definition is_qinv: ":= \g: B \ A. section f g \ retraction f g" 204 | where "A: U i" "B: U i" "f: A \ B" 205 | by typechk 206 | 207 | definition is_qinv_i ("is'_qinv") 208 | where [implicit]: "is_qinv f \ Equivalence.is_qinv {} {} f" 209 | 210 | no_translations "is_qinv f" \ "CONST Equivalence.is_qinv A B f" 211 | 212 | Lemma (def) id_is_qinv: 213 | assumes "A: U i" 214 | shows "is_qinv (id A)" 215 | unfolding is_qinv_def 216 | proof intro 217 | show "id A: A \ A" by typechk 218 | qed (intro, rule id_is_section, rule id_is_retraction) 219 | 220 | Lemma is_qinvI: 221 | assumes 222 | "A: U i" "B: U i" "f: A \ B" 223 | "g: B \ A" 224 | "H1: section f g" 225 | "H2: retraction f g" 226 | shows "is_qinv f" 227 | unfolding is_qinv_def 228 | by (intro, fact, intro; fact) 229 | 230 | Lemma is_qinv_components [type]: 231 | assumes "A: U i" "B: U i" "f: A \ B" "fq: is_qinv f" 232 | shows 233 | qinv_of_is_qinv: "fst fq: B \ A" and 234 | sec_of_is_qinv: "p\<^sub>2\<^sub>1 fq: section f (fst fq)" and 235 | ret_of_is_qinv: "p\<^sub>2\<^sub>2 fq: retraction f (fst fq)" 236 | using assms unfolding is_qinv_def 237 | by typechk+ 238 | 239 | Lemma (def) qinv_is_qinv: 240 | assumes "A: U i" "B: U i" "f: A \ B" "pf: is_qinv f" 241 | shows "is_qinv (fst pf)" 242 | apply (rule is_qinvI) 243 | \<^item> by fact 244 | \<^item> by (rule section_of_retraction) (rule ret_of_is_qinv) 245 | \<^item> by (rule retraction_of_section) (rule sec_of_is_qinv) 246 | done 247 | 248 | (*Note the issues with definitional folding/unfolding in the following proof.*) 249 | Lemma (def) funcomp_of_qinv: 250 | assumes 251 | "A: U i" "B: U i" "C: U i" 252 | "f: A \ B" "g: B \ C" 253 | shows "is_qinv f \ is_qinv g \ is_qinv (g \ f)" 254 | apply intros 255 | apply (rule is_qinvI) 256 | \<^item> vars fq gq 257 | proof - 258 | show "fst fq \ fst gq: C \ A" by typechk 259 | qed 260 | \<^item> vars fq gq 261 | proof - 262 | have "(g \ f) \ fst fq \ fst gq ~ g \ (f \ fst fq) \ fst gq" by (compute, refl) 263 | also have ".. ~ g \ id B \ fst gq" by (rhtpy, lhtpy, fold section_def, rule sec_of_is_qinv) 264 | also have ".. ~ id C" by (compute, unfold section_def, rule sec_of_is_qinv[unfolded section_def]) 265 | finally have [folded section_def]: "?" by this 266 | then show "?" . 267 | qed 268 | \<^item> vars fq gq 269 | proof - 270 | 271 | (*An alternative proof to the above starts with 272 | apply intros 273 | unfolding is_qinv_def apply elims 274 | focus vars _ _ finv ginv 275 | *) 276 | 277 | subsection \Bi-invertible maps\ 278 | 279 | definition "is_biinv A B f \ 280 | (\g: B \ A. homotopy A (fn _. A) (g \\<^bsub>A\<^esub> f) (id A)) 281 | \ (\g: B \ A. homotopy B (fn _. B) (f \\<^bsub>B\<^esub> g) (id B))" 282 | 283 | Lemma is_biinv_type [type]: 284 | assumes "A: U i" "B: U i" "f: A \ B" 285 | shows "is_biinv A B f: U i" 286 | unfolding is_biinv_def by typechk 287 | 288 | definition is_biinv_i ("is'_biinv") 289 | where [implicit]: "is_biinv f \ Equivalence.is_biinv {} {} f" 290 | 291 | translations "is_biinv f" \ "CONST Equivalence.is_biinv A B f" 292 | 293 | Lemma is_biinvI: 294 | assumes 295 | "A: U i" "B: U i" "f: A \ B" 296 | "g: B \ A" "h: B \ A" 297 | "H1: g \ f ~ id A" "H2: f \ h ~ id B" 298 | shows "is_biinv f" 299 | unfolding is_biinv_def 300 | proof intro 301 | show ": \g: B \ A. g \ f ~ id A" by typechk 302 | show ": \g: B \ A. f \ g ~ id B" by typechk 303 | qed 304 | 305 | Lemma is_biinv_components [type]: 306 | assumes "A: U i" "B: U i" "f: A \ B" "pf: is_biinv f" 307 | shows 308 | section_of_is_biinv: "p\<^sub>1\<^sub>1 pf: B \ A" and 309 | retraction_of_is_biinv: "p\<^sub>2\<^sub>1 pf: B \ A" and 310 | ret_of_is_biinv: "p\<^sub>1\<^sub>2 pf: p\<^sub>1\<^sub>1 pf \ f ~ id A" and 311 | sec_of_is_biinv: "p\<^sub>2\<^sub>2 pf: f \ p\<^sub>2\<^sub>1 pf ~ id B" 312 | using assms unfolding is_biinv_def 313 | by typechk+ 314 | 315 | Lemma (def) is_biinv_if_is_qinv: 316 | assumes "A: U i" "B: U i" "f: A \ B" 317 | shows "is_qinv f \ is_biinv f" 318 | apply intros 319 | unfolding is_qinv_def is_biinv_def 320 | by (rule distribute_Sig) 321 | 322 | Lemma (def) is_qinv_if_is_biinv: 323 | assumes "A: U i" "B: U i" "f: A \ B" 324 | shows "is_biinv f \ is_qinv f" 325 | apply intro 326 | unfolding is_biinv_def apply elims 327 | focus vars _ _ _ g H1 h H2 328 | apply (rule is_qinvI) 329 | \<^item> by (fact \g: _\) 330 | \<^item> by (fact \H1: _\) 331 | \<^item> proof - 332 | have "g ~ g \ (id B)" by compute refl 333 | also have ".. ~ g \ f \ h" by rhtpy (rule \H2:_\[symmetric]) 334 | also have ".. ~ (id A) \ h" by (comp funcomp_assoc[symmetric]) (lhtpy, fact) 335 | also have ".. ~ h" by compute refl 336 | finally have "g ~ h" by this 337 | then have "f \ g ~ f \ h" by (rhtpy, this) 338 | also note \H2:_\ 339 | finally show "f \ g ~ id B" by this 340 | qed 341 | done 342 | done 343 | 344 | Lemma (def) id_is_biinv: 345 | "A: U i \ is_biinv (id A)" 346 | by (rule is_biinv_if_is_qinv) (rule id_is_qinv) 347 | 348 | Lemma (def) funcomp_is_biinv: 349 | assumes 350 | "A: U i" "B: U i" "C: U i" 351 | "f: A \ B" "g: B \ C" 352 | shows "is_biinv f \ is_biinv g \ is_biinv (g \ f)" 353 | apply intros 354 | focus vars pf pg 355 | by (rule is_biinv_if_is_qinv) 356 | (rule funcomp_is_qinv; rule is_qinv_if_is_biinv, fact) 357 | done 358 | 359 | 360 | section \Equivalence\ 361 | 362 | text \ 363 | Following the HoTT book, we first define equivalence in terms of 364 | bi-invertibility. 365 | \ 366 | 367 | definition equivalence (infix "\" 110) 368 | where "A \ B \ \f: A \ B. Equivalence.is_biinv A B f" 369 | 370 | Lemma equivalence_type [type]: 371 | assumes "A: U i" "B: U i" 372 | shows "A \ B: U i" 373 | unfolding equivalence_def by typechk 374 | 375 | Lemma (def) equivalence_refl: 376 | assumes "A: U i" 377 | shows "A \ A" 378 | unfolding equivalence_def 379 | proof intro 380 | show "is_biinv (id A)" by (rule is_biinv_if_is_qinv) (rule id_is_qinv) 381 | qed typechk 382 | 383 | Lemma (def) equivalence_symmetric: 384 | assumes "A: U i" "B: U i" 385 | shows "A \ B \ B \ A" 386 | apply intros 387 | unfolding equivalence_def 388 | apply elim 389 | apply (dest (4) is_qinv_if_is_biinv) 390 | apply intro 391 | \<^item> by (rule qinv_of_is_qinv) facts 392 | \<^item> by (rule is_biinv_if_is_qinv) (rule qinv_is_qinv) 393 | done 394 | 395 | Lemma (def) equivalence_transitive: 396 | assumes "A: U i" "B: U i" "C: U i" 397 | shows "A \ B \ B \ C \ A \ C" 398 | proof intros 399 | fix AB BC assume *: "AB: A \ B" "BC: B \ C" 400 | then have 401 | "fst AB: A \ B" and 1: "snd AB: is_biinv (fst AB)" 402 | "fst BC: B \ C" and 2: "snd BC: is_biinv (fst BC)" 403 | unfolding equivalence_def by typechk+ 404 | then have "fst BC \ fst AB: A \ C" by typechk 405 | moreover have "is_biinv (fst BC \ fst AB)" 406 | using * unfolding equivalence_def by (rule funcomp_is_biinv 1 2) facts 407 | ultimately show "A \ C" 408 | unfolding equivalence_def by intro facts 409 | qed 410 | 411 | text \ 412 | Equal types are equivalent. We give two proofs: the first by induction, and 413 | the second by following the HoTT book and showing that transport is an 414 | equivalence. 415 | \ 416 | 417 | Lemma 418 | assumes "A: U i" "B: U i" "p: A =\<^bsub>U i\<^esub> B" 419 | shows "A \ B" 420 | by (eq p) (rule equivalence_refl) 421 | 422 | text \ 423 | The following proof is wordy because (1) typechecker normalization is still 424 | rudimentary, and (2) we don't yet have universe level inference. 425 | \ 426 | 427 | Lemma (def) equiv_if_equal: 428 | assumes 429 | "A: U i" "B: U i" "p: A =\<^bsub>U i\<^esub> B" 430 | shows ": A \ B" 431 | unfolding equivalence_def 432 | apply intro defer 433 | \<^item> apply (eq p) 434 | \<^enum> vars A B 435 | apply (comp at A in "A \ B" id_comp[symmetric]) 436 | using [[solve_side_conds=1]] 437 | apply (comp at B in "_ \ B" id_comp[symmetric]) 438 | apply (rule transport, rule Ui_in_USi) 439 | by (rule lift_universe_codomain, rule Ui_in_USi) 440 | \<^enum> vars A 441 | using [[solve_side_conds=1]] 442 | apply (comp transport_comp) 443 | \ by (rule Ui_in_USi) 444 | \ by compute (rule U_lift) 445 | \ by compute (rule id_is_biinv) 446 | done 447 | done 448 | 449 | \<^item> \ \Similar proof as in the first subgoal above\ 450 | apply (comp at A in "A \ B" id_comp[symmetric]) 451 | using [[solve_side_conds=1]] 452 | apply (comp at B in "_ \ B" id_comp[symmetric]) 453 | apply (rule transport, rule Ui_in_USi) 454 | by (rule lift_universe_codomain, rule Ui_in_USi) 455 | done 456 | 457 | 458 | end 459 | -------------------------------------------------------------------------------- /mltt/core/eqsubst.ML: -------------------------------------------------------------------------------- 1 | (* Title: eqsubst.ML 2 | Author: Lucas Dixon, University of Edinburgh 3 | Modified: Joshua Chen, University of Innsbruck 4 | 5 | Perform a substitution using an equation. 6 | 7 | This code is slightly modified from the original at Tools/eqsubst..ML, 8 | to incorporate auto-typechecking for type theory. 9 | *) 10 | 11 | signature EQSUBST = 12 | sig 13 | type match = 14 | ((indexname * (sort * typ)) list (* type instantiations *) 15 | * (indexname * (typ * term)) list) (* term instantiations *) 16 | * (string * typ) list (* fake named type abs env *) 17 | * (string * typ) list (* type abs env *) 18 | * term (* outer term *) 19 | 20 | type searchinfo = 21 | Proof.context 22 | * int (* maxidx *) 23 | * Zipper.T (* focusterm to search under *) 24 | 25 | datatype 'a skipseq = SkipMore of int | SkipSeq of 'a Seq.seq Seq.seq 26 | 27 | val skip_first_asm_occs_search: ('a -> 'b -> 'c Seq.seq Seq.seq) -> 'a -> int -> 'b -> 'c skipseq 28 | val skip_first_occs_search: int -> ('a -> 'b -> 'c Seq.seq Seq.seq) -> 'a -> 'b -> 'c Seq.seq 29 | val skipto_skipseq: int -> 'a Seq.seq Seq.seq -> 'a skipseq 30 | 31 | (* tactics *) 32 | val eqsubst_asm_tac: Proof.context -> int list -> thm list -> int -> tactic 33 | val eqsubst_asm_tac': Proof.context -> 34 | (searchinfo -> int -> term -> match skipseq) -> int -> thm -> int -> tactic 35 | val eqsubst_tac: Proof.context -> 36 | int list -> (* list of occurrences to rewrite, use [0] for any *) 37 | thm list -> int -> tactic 38 | val eqsubst_tac': Proof.context -> 39 | (searchinfo -> term -> match Seq.seq) (* search function *) 40 | -> thm (* equation theorem to rewrite with *) 41 | -> int (* subgoal number in goal theorem *) 42 | -> thm (* goal theorem *) 43 | -> thm Seq.seq (* rewritten goal theorem *) 44 | 45 | (* search for substitutions *) 46 | val valid_match_start: Zipper.T -> bool 47 | val search_lr_all: Zipper.T -> Zipper.T Seq.seq 48 | val search_lr_valid: (Zipper.T -> bool) -> Zipper.T -> Zipper.T Seq.seq 49 | val searchf_lr_unify_all: searchinfo -> term -> match Seq.seq Seq.seq 50 | val searchf_lr_unify_valid: searchinfo -> term -> match Seq.seq Seq.seq 51 | val searchf_bt_unify_valid: searchinfo -> term -> match Seq.seq Seq.seq 52 | end; 53 | 54 | structure EqSubst: EQSUBST = 55 | struct 56 | 57 | (* changes object "=" to meta "==" which prepares a given rewrite rule *) 58 | fun prep_meta_eq ctxt = 59 | Simplifier.mksimps ctxt #> map Drule.zero_var_indexes; 60 | 61 | (* make free vars into schematic vars with index zero *) 62 | fun unfix_frees frees = 63 | fold (K (Thm.forall_elim_var 0)) frees o Drule.forall_intr_list frees; 64 | 65 | 66 | type match = 67 | ((indexname * (sort * typ)) list (* type instantiations *) 68 | * (indexname * (typ * term)) list) (* term instantiations *) 69 | * (string * typ) list (* fake named type abs env *) 70 | * (string * typ) list (* type abs env *) 71 | * term; (* outer term *) 72 | 73 | type searchinfo = 74 | Proof.context 75 | * int (* maxidx *) 76 | * Zipper.T; (* focusterm to search under *) 77 | 78 | 79 | (* skipping non-empty sub-sequences but when we reach the end 80 | of the seq, remembering how much we have left to skip. *) 81 | datatype 'a skipseq = 82 | SkipMore of int | 83 | SkipSeq of 'a Seq.seq Seq.seq; 84 | 85 | (* given a seqseq, skip the first m non-empty seq's, note deficit *) 86 | fun skipto_skipseq m s = 87 | let 88 | fun skip_occs n sq = 89 | (case Seq.pull sq of 90 | NONE => SkipMore n 91 | | SOME (h, t) => 92 | (case Seq.pull h of 93 | NONE => skip_occs n t 94 | | SOME _ => if n <= 1 then SkipSeq (Seq.cons h t) else skip_occs (n - 1) t)) 95 | in skip_occs m s end; 96 | 97 | (* note: outerterm is the taget with the match replaced by a bound 98 | variable : ie: "P lhs" beocmes "%x. P x" 99 | insts is the types of instantiations of vars in lhs 100 | and typinsts is the type instantiations of types in the lhs 101 | Note: Final rule is the rule lifted into the ontext of the 102 | taget thm. *) 103 | fun mk_foo_match mkuptermfunc Ts t = 104 | let 105 | val ty = Term.type_of t 106 | val bigtype = rev (map snd Ts) ---> ty 107 | fun mk_foo 0 t = t 108 | | mk_foo i t = mk_foo (i - 1) (t $ (Bound (i - 1))) 109 | val num_of_bnds = length Ts 110 | (* foo_term = "fooabs y0 ... yn" where y's are local bounds *) 111 | val foo_term = mk_foo num_of_bnds (Bound num_of_bnds) 112 | in Abs ("fooabs", bigtype, mkuptermfunc foo_term) end; 113 | 114 | (* T is outer bound vars, n is number of locally bound vars *) 115 | (* THINK: is order of Ts correct...? or reversed? *) 116 | fun mk_fake_bound_name n = ":b_" ^ n; 117 | fun fakefree_badbounds Ts t = 118 | let val (FakeTs, Ts, newnames) = 119 | fold_rev (fn (n, ty) => fn (FakeTs, Ts, usednames) => 120 | let 121 | val newname = singleton (Name.variant_list usednames) n 122 | in 123 | ((mk_fake_bound_name newname, ty) :: FakeTs, 124 | (newname, ty) :: Ts, 125 | newname :: usednames) 126 | end) Ts ([], [], []) 127 | in (FakeTs, Ts, Term.subst_bounds (map Free FakeTs, t)) end; 128 | 129 | (* before matching we need to fake the bound vars that are missing an 130 | abstraction. In this function we additionally construct the 131 | abstraction environment, and an outer context term (with the focus 132 | abstracted out) for use in rewriting with RW_Inst.rw *) 133 | fun prep_zipper_match z = 134 | let 135 | val t = Zipper.trm z 136 | val c = Zipper.ctxt z 137 | val Ts = Zipper.C.nty_ctxt c 138 | val (FakeTs', Ts', t') = fakefree_badbounds Ts t 139 | val absterm = mk_foo_match (Zipper.C.apply c) Ts' t' 140 | in 141 | (t', (FakeTs', Ts', absterm)) 142 | end; 143 | 144 | (* Unification with exception handled *) 145 | (* given context, max var index, pat, tgt; returns Seq of instantiations *) 146 | fun clean_unify ctxt ix (a as (pat, tgt)) = 147 | let 148 | (* type info will be re-derived, maybe this can be cached 149 | for efficiency? *) 150 | val pat_ty = Term.type_of pat; 151 | val tgt_ty = Term.type_of tgt; 152 | (* FIXME is it OK to ignore the type instantiation info? 153 | or should I be using it? *) 154 | val typs_unify = 155 | SOME (Sign.typ_unify (Proof_Context.theory_of ctxt) (pat_ty, tgt_ty) (Vartab.empty, ix)) 156 | handle Type.TUNIFY => NONE; 157 | in 158 | (case typs_unify of 159 | SOME (typinsttab, ix2) => 160 | let 161 | (* FIXME is it right to throw away the flexes? 162 | or should I be using them somehow? *) 163 | fun mk_insts env = 164 | (Vartab.dest (Envir.type_env env), 165 | Vartab.dest (Envir.term_env env)); 166 | val initenv = 167 | Envir.Envir {maxidx = ix2, tenv = Vartab.empty, tyenv = typinsttab}; 168 | val useq = Unify.smash_unifiers (Context.Proof ctxt) [a] initenv 169 | handle ListPair.UnequalLengths => Seq.empty 170 | | Term.TERM _ => Seq.empty; 171 | fun clean_unify' useq () = 172 | (case (Seq.pull useq) of 173 | NONE => NONE 174 | | SOME (h, t) => SOME (mk_insts h, Seq.make (clean_unify' t))) 175 | handle ListPair.UnequalLengths => NONE 176 | | Term.TERM _ => NONE; 177 | in 178 | (Seq.make (clean_unify' useq)) 179 | end 180 | | NONE => Seq.empty) 181 | end; 182 | 183 | (* Unification for zippers *) 184 | (* Note: Ts is a modified version of the original names of the outer 185 | bound variables. New names have been introduced to make sure they are 186 | unique w.r.t all names in the term and each other. usednames' is 187 | oldnames + new names. *) 188 | fun clean_unify_z ctxt maxidx pat z = 189 | let val (t, (FakeTs, Ts, absterm)) = prep_zipper_match z in 190 | Seq.map (fn insts => (insts, FakeTs, Ts, absterm)) 191 | (clean_unify ctxt maxidx (t, pat)) 192 | end; 193 | 194 | 195 | fun bot_left_leaf_of (l $ _) = bot_left_leaf_of l 196 | | bot_left_leaf_of (Abs (_, _, t)) = bot_left_leaf_of t 197 | | bot_left_leaf_of x = x; 198 | 199 | (* Avoid considering replacing terms which have a var at the head as 200 | they always succeed trivially, and uninterestingly. *) 201 | fun valid_match_start z = 202 | (case bot_left_leaf_of (Zipper.trm z) of 203 | Var _ => false 204 | | _ => true); 205 | 206 | (* search from top, left to right, then down *) 207 | val search_lr_all = ZipperSearch.all_bl_ur; 208 | 209 | (* search from top, left to right, then down *) 210 | fun search_lr_valid validf = 211 | let 212 | fun sf_valid_td_lr z = 213 | let val here = if validf z then [Zipper.Here z] else [] in 214 | (case Zipper.trm z of 215 | _ $ _ => 216 | [Zipper.LookIn (Zipper.move_down_left z)] @ here @ 217 | [Zipper.LookIn (Zipper.move_down_right z)] 218 | | Abs _ => here @ [Zipper.LookIn (Zipper.move_down_abs z)] 219 | | _ => here) 220 | end; 221 | in Zipper.lzy_search sf_valid_td_lr end; 222 | 223 | (* search from bottom to top, left to right *) 224 | fun search_bt_valid validf = 225 | let 226 | fun sf_valid_td_lr z = 227 | let val here = if validf z then [Zipper.Here z] else [] in 228 | (case Zipper.trm z of 229 | _ $ _ => 230 | [Zipper.LookIn (Zipper.move_down_left z), 231 | Zipper.LookIn (Zipper.move_down_right z)] @ here 232 | | Abs _ => [Zipper.LookIn (Zipper.move_down_abs z)] @ here 233 | | _ => here) 234 | end; 235 | in Zipper.lzy_search sf_valid_td_lr end; 236 | 237 | fun searchf_unify_gen f (ctxt, maxidx, z) lhs = 238 | Seq.map (clean_unify_z ctxt maxidx lhs) (Zipper.limit_apply f z); 239 | 240 | (* search all unifications *) 241 | val searchf_lr_unify_all = searchf_unify_gen search_lr_all; 242 | 243 | (* search only for 'valid' unifiers (non abs subterms and non vars) *) 244 | val searchf_lr_unify_valid = searchf_unify_gen (search_lr_valid valid_match_start); 245 | 246 | val searchf_bt_unify_valid = searchf_unify_gen (search_bt_valid valid_match_start); 247 | 248 | (* apply a substitution in the conclusion of the theorem *) 249 | (* cfvs are certified free var placeholders for goal params *) 250 | (* conclthm is a theorem of for just the conclusion *) 251 | (* m is instantiation/match information *) 252 | (* rule is the equation for substitution *) 253 | fun apply_subst_in_concl ctxt i st (cfvs, conclthm) rule m = 254 | RW_Inst.rw ctxt m rule conclthm 255 | |> unfix_frees cfvs 256 | |> Conv.fconv_rule Drule.beta_eta_conversion 257 | |> (fn r => resolve_tac ctxt [r] i st); 258 | 259 | (* substitute within the conclusion of goal i of gth, using a meta 260 | equation rule. Note that we assume rule has var indicies zero'd *) 261 | fun prep_concl_subst ctxt i gth = 262 | let 263 | val th = Thm.incr_indexes 1 gth; 264 | val tgt_term = Thm.prop_of th; 265 | 266 | val (fixedbody, fvs) = IsaND.fix_alls_term ctxt i tgt_term; 267 | val cfvs = rev (map (Thm.cterm_of ctxt) fvs); 268 | 269 | val conclterm = Logic.strip_imp_concl fixedbody; 270 | val conclthm = Thm.trivial (Thm.cterm_of ctxt conclterm); 271 | val maxidx = Thm.maxidx_of th; 272 | val ft = 273 | (Zipper.move_down_right (* ==> *) 274 | o Zipper.move_down_left (* Trueprop *) 275 | o Zipper.mktop 276 | o Thm.prop_of) conclthm 277 | in 278 | ((cfvs, conclthm), (ctxt, maxidx, ft)) 279 | end; 280 | 281 | (* substitute using an object or meta level equality *) 282 | fun eqsubst_tac' ctxt searchf instepthm i st = 283 | let 284 | val (cvfsconclthm, searchinfo) = prep_concl_subst ctxt i st; 285 | val stepthms = Seq.of_list (prep_meta_eq ctxt instepthm); 286 | fun rewrite_with_thm r = 287 | let val (lhs,_) = Logic.dest_equals (Thm.concl_of r) in 288 | searchf searchinfo lhs 289 | |> Seq.maps (apply_subst_in_concl ctxt i st cvfsconclthm r) 290 | end; 291 | in stepthms |> Seq.maps rewrite_with_thm end; 292 | 293 | 294 | (* General substitution of multiple occurrences using one of 295 | the given theorems *) 296 | 297 | fun skip_first_occs_search occ srchf sinfo lhs = 298 | (case skipto_skipseq occ (srchf sinfo lhs) of 299 | SkipMore _ => Seq.empty 300 | | SkipSeq ss => Seq.flat ss); 301 | 302 | (* The "occs" argument is a list of integers indicating which occurrence 303 | w.r.t. the search order, to rewrite. Backtracking will also find later 304 | occurrences, but all earlier ones are skipped. Thus you can use [0] to 305 | just find all rewrites. *) 306 | 307 | fun eqsubst_tac ctxt occs thms i st = 308 | let val nprems = Thm.nprems_of st in 309 | if nprems < i then Seq.empty else 310 | let 311 | val thmseq = Seq.of_list thms; 312 | fun apply_occ occ st = 313 | thmseq |> Seq.maps (fn r => 314 | eqsubst_tac' ctxt 315 | (skip_first_occs_search occ searchf_lr_unify_valid) r 316 | (i + (Thm.nprems_of st - nprems)) st); 317 | val sorted_occs = Library.sort (rev_order o int_ord) occs; 318 | in 319 | Seq.maps distinct_subgoals_tac (Seq.EVERY (map apply_occ sorted_occs) st) 320 | end 321 | end; 322 | 323 | 324 | (* apply a substitution inside assumption j, keeps asm in the same place *) 325 | fun apply_subst_in_asm ctxt i st rule ((cfvs, j, _, pth),m) = 326 | let 327 | val st2 = Thm.rotate_rule (j - 1) i st; (* put premice first *) 328 | val preelimrule = 329 | RW_Inst.rw ctxt m rule pth 330 | |> (Seq.hd o prune_params_tac ctxt) 331 | |> Thm.permute_prems 0 ~1 (* put old asm first *) 332 | |> unfix_frees cfvs (* unfix any global params *) 333 | |> Conv.fconv_rule Drule.beta_eta_conversion; (* normal form *) 334 | in 335 | (* ~j because new asm starts at back, thus we subtract 1 *) 336 | Seq.map (Thm.rotate_rule (~ j) (Thm.nprems_of rule + i)) 337 | (dresolve_tac ctxt [preelimrule] i st2) 338 | end; 339 | 340 | 341 | (* prepare to substitute within the j'th premise of subgoal i of gth, 342 | using a meta-level equation. Note that we assume rule has var indicies 343 | zero'd. Note that we also assume that premt is the j'th premice of 344 | subgoal i of gth. Note the repetition of work done for each 345 | assumption, i.e. this can be made more efficient for search over 346 | multiple assumptions. *) 347 | fun prep_subst_in_asm ctxt i gth j = 348 | let 349 | val th = Thm.incr_indexes 1 gth; 350 | val tgt_term = Thm.prop_of th; 351 | 352 | val (fixedbody, fvs) = IsaND.fix_alls_term ctxt i tgt_term; 353 | val cfvs = rev (map (Thm.cterm_of ctxt) fvs); 354 | 355 | val asmt = nth (Logic.strip_imp_prems fixedbody) (j - 1); 356 | val asm_nprems = length (Logic.strip_imp_prems asmt); 357 | 358 | val pth = Thm.trivial ((Thm.cterm_of ctxt) asmt); 359 | val maxidx = Thm.maxidx_of th; 360 | 361 | val ft = 362 | (Zipper.move_down_right (* trueprop *) 363 | o Zipper.mktop 364 | o Thm.prop_of) pth 365 | in ((cfvs, j, asm_nprems, pth), (ctxt, maxidx, ft)) end; 366 | 367 | (* prepare subst in every possible assumption *) 368 | fun prep_subst_in_asms ctxt i gth = 369 | map (prep_subst_in_asm ctxt i gth) 370 | ((fn l => Library.upto (1, length l)) 371 | (Logic.prems_of_goal (Thm.prop_of gth) i)); 372 | 373 | 374 | (* substitute in an assumption using an object or meta level equality *) 375 | fun eqsubst_asm_tac' ctxt searchf skipocc instepthm i st = 376 | let 377 | val asmpreps = prep_subst_in_asms ctxt i st; 378 | val stepthms = Seq.of_list (prep_meta_eq ctxt instepthm); 379 | fun rewrite_with_thm r = 380 | let 381 | val (lhs,_) = Logic.dest_equals (Thm.concl_of r); 382 | fun occ_search occ [] = Seq.empty 383 | | occ_search occ ((asminfo, searchinfo)::moreasms) = 384 | (case searchf searchinfo occ lhs of 385 | SkipMore i => occ_search i moreasms 386 | | SkipSeq ss => 387 | Seq.append (Seq.map (Library.pair asminfo) (Seq.flat ss)) 388 | (occ_search 1 moreasms)) (* find later substs also *) 389 | in 390 | occ_search skipocc asmpreps |> Seq.maps (apply_subst_in_asm ctxt i st r) 391 | end; 392 | in stepthms |> Seq.maps rewrite_with_thm end; 393 | 394 | 395 | fun skip_first_asm_occs_search searchf sinfo occ lhs = 396 | skipto_skipseq occ (searchf sinfo lhs); 397 | 398 | fun eqsubst_asm_tac ctxt occs thms i st = 399 | let val nprems = Thm.nprems_of st in 400 | if nprems < i then Seq.empty 401 | else 402 | let 403 | val thmseq = Seq.of_list thms; 404 | fun apply_occ occ st = 405 | thmseq |> Seq.maps (fn r => 406 | eqsubst_asm_tac' ctxt 407 | (skip_first_asm_occs_search searchf_lr_unify_valid) occ r 408 | (i + (Thm.nprems_of st - nprems)) st); 409 | val sorted_occs = Library.sort (rev_order o int_ord) occs; 410 | in 411 | Seq.maps distinct_subgoals_tac (Seq.EVERY (map apply_occ sorted_occs) st) 412 | end 413 | end; 414 | 415 | (* combination method that takes a flag (true indicates that subst 416 | should be done to an assumption, false = apply to the conclusion of 417 | the goal) as well as the theorems to use *) 418 | val _ = 419 | let 420 | val parser = 421 | Scan.lift (Args.mode "asm" 422 | -- Scan.optional (Args.parens (Scan.repeat Parse.nat)) [0]) 423 | -- Attrib.thms 424 | fun eqsubst_asm_ctac occs inthms = 425 | CONTEXT_TACTIC' (fn ctxt => eqsubst_asm_tac ctxt occs inthms) 426 | fun eqsubst_ctac occs inthms = 427 | CONTEXT_TACTIC' (fn ctxt => eqsubst_tac ctxt occs inthms) 428 | in 429 | Theory.setup ( 430 | Method.setup \<^binding>\sub\ 431 | (parser >> (fn ((asm, occs), inthms) => fn ctxt => SIMPLE_METHOD' ( 432 | (if asm then eqsubst_asm_tac else eqsubst_tac) ctxt occs inthms))) 433 | "single-step substitution" (* #> 434 | Method.setup \<^binding>\subst\ 435 | (parser >> (fn ((asm, occs), inthms) => K (CONTEXT_METHOD ( 436 | CHEADGOAL o SIDE_CONDS 0 437 | ((if asm then eqsubst_asm_ctac else eqsubst_ctac) occs inthms))))) 438 | "single-step substitution with automatic discharge of side conditions" *) 439 | ) 440 | end 441 | 442 | end; 443 | -------------------------------------------------------------------------------- /mltt/core/comp.ML: -------------------------------------------------------------------------------- 1 | (* Title: compute.ML 2 | Author: Christoph Traut, Lars Noschinski, TU Muenchen 3 | Modified: Joshua Chen, University of Innsbruck 4 | 5 | This is a method for rewriting computational equalities that supports subterm 6 | selection based on patterns. 7 | 8 | This code has been slightly modified from the original at HOL/Library/compute.ML 9 | to incorporate automatic discharge of type-theoretic side conditions. 10 | 11 | Comment from the original code follows: 12 | 13 | The patterns accepted by compute are of the following form: 14 | ::= | "concl" | "asm" | "for" "(" ")" 15 | ::= (in | at ) [] 16 | ::= [] ("to" ) 17 | 18 | This syntax was clearly inspired by Gonthier's and Tassi's language of 19 | patterns but has diverged significantly during its development. 20 | 21 | We also allow introduction of identifiers for bound variables, 22 | which can then be used to match arbitrary subterms inside abstractions. 23 | *) 24 | 25 | infix 1 then_pconv; 26 | infix 0 else_pconv; 27 | 28 | signature COMPUTE = 29 | sig 30 | type patconv = Proof.context -> Type.tyenv * (string * term) list -> cconv 31 | val then_pconv: patconv * patconv -> patconv 32 | val else_pconv: patconv * patconv -> patconv 33 | val abs_pconv: patconv -> string option * typ -> patconv (*XXX*) 34 | val fun_pconv: patconv -> patconv 35 | val arg_pconv: patconv -> patconv 36 | val imp_pconv: patconv -> patconv 37 | val params_pconv: patconv -> patconv 38 | val forall_pconv: patconv -> string option * typ option -> patconv 39 | val all_pconv: patconv 40 | val for_pconv: patconv -> (string option * typ option) list -> patconv 41 | val concl_pconv: patconv -> patconv 42 | val asm_pconv: patconv -> patconv 43 | val asms_pconv: patconv -> patconv 44 | val judgment_pconv: patconv -> patconv 45 | val in_pconv: patconv -> patconv 46 | val match_pconv: patconv -> term * (string option * typ) list -> patconv 47 | val comps_pconv: term option -> thm list -> patconv 48 | 49 | datatype ('a, 'b) pattern = At | In | Term of 'a | Concl | Asm | For of 'b list 50 | 51 | val mk_hole: int -> typ -> term 52 | 53 | val compute_conv: Proof.context 54 | -> (term * (string * typ) list, string * typ option) pattern list * term option 55 | -> thm list 56 | -> conv 57 | end 58 | 59 | structure Compute : COMPUTE = 60 | struct 61 | 62 | datatype ('a, 'b) pattern = At | In | Term of 'a | Concl | Asm | For of 'b list 63 | 64 | exception NO_TO_MATCH 65 | 66 | val holeN = Name.internal "_hole" 67 | 68 | fun prep_meta_eq ctxt = Simplifier.mksimps ctxt #> map Drule.zero_var_indexes 69 | 70 | 71 | (* holes *) 72 | 73 | fun mk_hole i T = Var ((holeN, i), T) 74 | 75 | fun is_hole (Var ((name, _), _)) = (name = holeN) 76 | | is_hole _ = false 77 | 78 | fun is_hole_const (Const (\<^const_name>\compute_hole\, _)) = true 79 | | is_hole_const _ = false 80 | 81 | val hole_syntax = 82 | let 83 | (* Modified variant of Term.replace_hole *) 84 | fun replace_hole Ts (Const (\<^const_name>\compute_hole\, T)) i = 85 | (list_comb (mk_hole i (Ts ---> T), map_range Bound (length Ts)), i + 1) 86 | | replace_hole Ts (Abs (x, T, t)) i = 87 | let val (t', i') = replace_hole (T :: Ts) t i 88 | in (Abs (x, T, t'), i') end 89 | | replace_hole Ts (t $ u) i = 90 | let 91 | val (t', i') = replace_hole Ts t i 92 | val (u', i'') = replace_hole Ts u i' 93 | in (t' $ u', i'') end 94 | | replace_hole _ a i = (a, i) 95 | fun prep_holes ts = #1 (fold_map (replace_hole []) ts 1) 96 | in 97 | Context.proof_map (Syntax_Phases.term_check 101 "hole_expansion" (K prep_holes)) 98 | #> Proof_Context.set_mode Proof_Context.mode_pattern 99 | end 100 | 101 | 102 | (* pattern conversions *) 103 | 104 | type patconv = Proof.context -> Type.tyenv * (string * term) list -> cterm -> thm 105 | 106 | fun (cv1 then_pconv cv2) ctxt tytenv ct = (cv1 ctxt tytenv then_conv cv2 ctxt tytenv) ct 107 | 108 | fun (cv1 else_pconv cv2) ctxt tytenv ct = (cv1 ctxt tytenv else_conv cv2 ctxt tytenv) ct 109 | 110 | fun raw_abs_pconv cv ctxt tytenv ct = 111 | case Thm.term_of ct of 112 | Abs _ => CConv.abs_cconv (fn (x, ctxt') => cv x ctxt' tytenv) ctxt ct 113 | | t => raise TERM ("raw_abs_pconv", [t]) 114 | 115 | fun raw_fun_pconv cv ctxt tytenv ct = 116 | case Thm.term_of ct of 117 | _ $ _ => CConv.fun_cconv (cv ctxt tytenv) ct 118 | | t => raise TERM ("raw_fun_pconv", [t]) 119 | 120 | fun raw_arg_pconv cv ctxt tytenv ct = 121 | case Thm.term_of ct of 122 | _ $ _ => CConv.arg_cconv (cv ctxt tytenv) ct 123 | | t => raise TERM ("raw_arg_pconv", [t]) 124 | 125 | fun abs_pconv cv (s,T) ctxt (tyenv, ts) ct = 126 | let val u = Thm.term_of ct 127 | in 128 | case try (fastype_of #> dest_funT) u of 129 | NONE => raise TERM ("abs_pconv: no function type", [u]) 130 | | SOME (U, _) => 131 | let 132 | val tyenv' = 133 | if T = dummyT then tyenv 134 | else Sign.typ_match (Proof_Context.theory_of ctxt) (T, U) tyenv 135 | val eta_expand_cconv = 136 | case u of 137 | Abs _=> Thm.reflexive 138 | | _ => CConv.rewr_cconv @{thm eta_expand} 139 | fun add_ident NONE _ l = l 140 | | add_ident (SOME name) ct l = (name, Thm.term_of ct) :: l 141 | val abs_cv = CConv.abs_cconv (fn (ct, ctxt) => cv ctxt (tyenv', add_ident s ct ts)) ctxt 142 | in (eta_expand_cconv then_conv abs_cv) ct end 143 | handle Pattern.MATCH => raise TYPE ("abs_pconv: types don't match", [T,U], [u]) 144 | end 145 | 146 | fun fun_pconv cv ctxt tytenv ct = 147 | case Thm.term_of ct of 148 | _ $ _ => CConv.fun_cconv (cv ctxt tytenv) ct 149 | | Abs (_, T, _ $ Bound 0) => abs_pconv (fun_pconv cv) (NONE, T) ctxt tytenv ct 150 | | t => raise TERM ("fun_pconv", [t]) 151 | 152 | local 153 | 154 | fun arg_pconv_gen cv0 cv ctxt tytenv ct = 155 | case Thm.term_of ct of 156 | _ $ _ => cv0 (cv ctxt tytenv) ct 157 | | Abs (_, T, _ $ Bound 0) => abs_pconv (arg_pconv_gen cv0 cv) (NONE, T) ctxt tytenv ct 158 | | t => raise TERM ("arg_pconv_gen", [t]) 159 | 160 | in 161 | 162 | fun arg_pconv ctxt = arg_pconv_gen CConv.arg_cconv ctxt 163 | fun imp_pconv ctxt = arg_pconv_gen (CConv.concl_cconv 1) ctxt 164 | 165 | end 166 | 167 | (* Move to B in !!x_1 ... x_n. B. Do not eta-expand *) 168 | fun params_pconv cv ctxt tytenv ct = 169 | let val pconv = 170 | case Thm.term_of ct of 171 | Const (\<^const_name>\Pure.all\, _) $ Abs _ => (raw_arg_pconv o raw_abs_pconv) (fn _ => params_pconv cv) 172 | | Const (\<^const_name>\Pure.all\, _) => raw_arg_pconv (params_pconv cv) 173 | | _ => cv 174 | in pconv ctxt tytenv ct end 175 | 176 | fun forall_pconv cv ident ctxt tytenv ct = 177 | case Thm.term_of ct of 178 | Const (\<^const_name>\Pure.all\, T) $ _ => 179 | let 180 | val def_U = T |> dest_funT |> fst |> dest_funT |> fst 181 | val ident' = apsnd (the_default (def_U)) ident 182 | in arg_pconv (abs_pconv cv ident') ctxt tytenv ct end 183 | | t => raise TERM ("forall_pconv", [t]) 184 | 185 | fun all_pconv _ _ = Thm.reflexive 186 | 187 | fun for_pconv cv idents ctxt tytenv ct = 188 | let 189 | fun f rev_idents (Const (\<^const_name>\Pure.all\, _) $ t) = 190 | let val (rev_idents', cv') = f rev_idents (case t of Abs (_,_,u) => u | _ => t) 191 | in 192 | case rev_idents' of 193 | [] => ([], forall_pconv cv' (NONE, NONE)) 194 | | (x :: xs) => (xs, forall_pconv cv' x) 195 | end 196 | | f rev_idents _ = (rev_idents, cv) 197 | in 198 | case f (rev idents) (Thm.term_of ct) of 199 | ([], cv') => cv' ctxt tytenv ct 200 | | _ => raise CTERM ("for_pconv", [ct]) 201 | end 202 | 203 | fun concl_pconv cv ctxt tytenv ct = 204 | case Thm.term_of ct of 205 | (Const (\<^const_name>\Pure.imp\, _) $ _) $ _ => imp_pconv (concl_pconv cv) ctxt tytenv ct 206 | | _ => cv ctxt tytenv ct 207 | 208 | fun asm_pconv cv ctxt tytenv ct = 209 | case Thm.term_of ct of 210 | (Const (\<^const_name>\Pure.imp\, _) $ _) $ _ => CConv.with_prems_cconv ~1 (cv ctxt tytenv) ct 211 | | t => raise TERM ("asm_pconv", [t]) 212 | 213 | fun asms_pconv cv ctxt tytenv ct = 214 | case Thm.term_of ct of 215 | (Const (\<^const_name>\Pure.imp\, _) $ _) $ _ => 216 | ((CConv.with_prems_cconv ~1 oo cv) else_pconv imp_pconv (asms_pconv cv)) ctxt tytenv ct 217 | | t => raise TERM ("asms_pconv", [t]) 218 | 219 | fun judgment_pconv cv ctxt tytenv ct = 220 | if Object_Logic.is_judgment ctxt (Thm.term_of ct) 221 | then arg_pconv cv ctxt tytenv ct 222 | else cv ctxt tytenv ct 223 | 224 | fun in_pconv cv ctxt tytenv ct = 225 | (cv else_pconv 226 | raw_fun_pconv (in_pconv cv) else_pconv 227 | raw_arg_pconv (in_pconv cv) else_pconv 228 | raw_abs_pconv (fn _ => in_pconv cv)) 229 | ctxt tytenv ct 230 | 231 | fun replace_idents idents t = 232 | let 233 | fun subst ((n1, s)::ss) (t as Free (n2, _)) = if n1 = n2 then s else subst ss t 234 | | subst _ t = t 235 | in Term.map_aterms (subst idents) t end 236 | 237 | fun match_pconv cv (t,fixes) ctxt (tyenv, env_ts) ct = 238 | let 239 | val t' = replace_idents env_ts t 240 | val thy = Proof_Context.theory_of ctxt 241 | val u = Thm.term_of ct 242 | 243 | fun descend_hole fixes (Abs (_, _, t)) = 244 | (case descend_hole fixes t of 245 | NONE => NONE 246 | | SOME (fix :: fixes', pos) => SOME (fixes', abs_pconv pos fix) 247 | | SOME ([], _) => raise Match (* less fixes than abstractions on path to hole *)) 248 | | descend_hole fixes (t as l $ r) = 249 | let val (f, _) = strip_comb t 250 | in 251 | if is_hole f 252 | then SOME (fixes, cv) 253 | else 254 | (case descend_hole fixes l of 255 | SOME (fixes', pos) => SOME (fixes', fun_pconv pos) 256 | | NONE => 257 | (case descend_hole fixes r of 258 | SOME (fixes', pos) => SOME (fixes', arg_pconv pos) 259 | | NONE => NONE)) 260 | end 261 | | descend_hole fixes t = 262 | if is_hole t then SOME (fixes, cv) else NONE 263 | 264 | val to_hole = descend_hole (rev fixes) #> the_default ([], cv) #> snd 265 | in 266 | case try (Pattern.match thy (apply2 Logic.mk_term (t',u))) (tyenv, Vartab.empty) of 267 | NONE => raise TERM ("match_pconv: Does not match pattern", [t, t',u]) 268 | | SOME (tyenv', _) => to_hole t ctxt (tyenv', env_ts) ct 269 | end 270 | 271 | fun comps_pconv to thms ctxt (tyenv, env_ts) = 272 | let 273 | fun instantiate_normalize_env ctxt env thm = 274 | let 275 | val prop = Thm.prop_of thm 276 | val norm_type = Envir.norm_type o Envir.type_env 277 | val insts = Term.add_vars prop [] 278 | |> map (fn x as (s, T) => 279 | ((s, norm_type env T), Thm.cterm_of ctxt (Envir.norm_term env (Var x)))) 280 | val tyinsts = Term.add_tvars prop [] 281 | |> map (fn x => (x, Thm.ctyp_of ctxt (norm_type env (TVar x)))) 282 | in Drule.instantiate_normalize (tyinsts, insts) thm end 283 | 284 | fun unify_with_rhs context to env thm = 285 | let 286 | val (_, rhs) = thm |> Thm.concl_of |> Logic.dest_equals 287 | val env' = Pattern.unify context (Logic.mk_term to, Logic.mk_term rhs) env 288 | handle Pattern.Unif => raise NO_TO_MATCH 289 | in env' end 290 | 291 | fun inst_thm_to _ (NONE, _) thm = thm 292 | | inst_thm_to (ctxt : Proof.context) (SOME to, env) thm = 293 | instantiate_normalize_env ctxt (unify_with_rhs (Context.Proof ctxt) to env thm) thm 294 | 295 | fun inst_thm ctxt idents (to, tyenv) thm = 296 | let 297 | (* Replace any identifiers with their corresponding bound variables. *) 298 | val maxidx = Term.maxidx_typs (map (snd o snd) (Vartab.dest tyenv)) 0 299 | val env = Envir.Envir {maxidx = maxidx, tenv = Vartab.empty, tyenv = tyenv} 300 | val maxidx = Envir.maxidx_of env |> fold Term.maxidx_term (the_list to) 301 | val thm' = Thm.incr_indexes (maxidx + 1) thm 302 | in SOME (inst_thm_to ctxt (Option.map (replace_idents idents) to, env) thm') end 303 | handle NO_TO_MATCH => NONE 304 | 305 | in CConv.rewrs_cconv (map_filter (inst_thm ctxt env_ts (to, tyenv)) thms) end 306 | 307 | fun compute_conv ctxt (pattern, to) thms ct = 308 | let 309 | fun apply_pat At = judgment_pconv 310 | | apply_pat In = in_pconv 311 | | apply_pat Asm = params_pconv o asms_pconv 312 | | apply_pat Concl = params_pconv o concl_pconv 313 | | apply_pat (For idents) = (fn cv => for_pconv cv (map (apfst SOME) idents)) 314 | | apply_pat (Term x) = (fn cv => match_pconv cv (apsnd (map (apfst SOME)) x)) 315 | 316 | val cv = fold_rev apply_pat pattern 317 | 318 | fun distinct_prems th = 319 | case Seq.pull (distinct_subgoals_tac th) of 320 | NONE => th 321 | | SOME (th', _) => th' 322 | 323 | val compute = comps_pconv to (maps (prep_meta_eq ctxt) thms) 324 | in cv compute ctxt (Vartab.empty, []) ct |> distinct_prems end 325 | 326 | fun compute_export_tac ctxt (pat, pat_ctxt) thms = 327 | let 328 | val export = case pat_ctxt of 329 | NONE => I 330 | | SOME inner => singleton (Proof_Context.export inner ctxt) 331 | in CCONVERSION (export o compute_conv ctxt pat thms) end 332 | 333 | val _ = 334 | Theory.setup 335 | let 336 | fun mk_fix s = (Binding.name s, NONE, NoSyn) 337 | 338 | val raw_pattern : (string, binding * string option * mixfix) pattern list parser = 339 | let 340 | val sep = (Args.$$$ "at" >> K At) || (Args.$$$ "in" >> K In) 341 | val atom = (Args.$$$ "asm" >> K Asm) || 342 | (Args.$$$ "concl" >> K Concl) || 343 | (Args.$$$ "for" |-- Args.parens (Scan.optional Parse.vars []) >> For) || 344 | (Parse.term >> Term) 345 | val sep_atom = sep -- atom >> (fn (s,a) => [s,a]) 346 | 347 | fun append_default [] = [Concl, In] 348 | | append_default (ps as Term _ :: _) = Concl :: In :: ps 349 | | append_default [For x, In] = [For x, Concl, In] 350 | | append_default (For x :: (ps as In :: Term _:: _)) = For x :: Concl :: ps 351 | | append_default ps = ps 352 | 353 | in Scan.repeats sep_atom >> (rev #> append_default) end 354 | 355 | fun context_lift (scan : 'a parser) f = fn (context : Context.generic, toks) => 356 | let 357 | val (r, toks') = scan toks 358 | val (r', context') = Context.map_proof_result (fn ctxt => f ctxt r) context 359 | in (r', (context', toks' : Token.T list)) end 360 | 361 | fun read_fixes fixes ctxt = 362 | let fun read_typ (b, rawT, mx) = (b, Option.map (Syntax.read_typ ctxt) rawT, mx) 363 | in Proof_Context.add_fixes (map read_typ fixes) ctxt end 364 | 365 | fun prep_pats ctxt (ps : (string, binding * string option * mixfix) pattern list) = 366 | let 367 | fun add_constrs ctxt n (Abs (x, T, t)) = 368 | let 369 | val (x', ctxt') = yield_singleton Proof_Context.add_fixes (mk_fix x) ctxt 370 | in 371 | (case add_constrs ctxt' (n+1) t of 372 | NONE => NONE 373 | | SOME ((ctxt'', n', xs), t') => 374 | let 375 | val U = Type_Infer.mk_param n [] 376 | val u = Type.constraint (U --> dummyT) (Abs (x, T, t')) 377 | in SOME ((ctxt'', n', (x', U) :: xs), u) end) 378 | end 379 | | add_constrs ctxt n (l $ r) = 380 | (case add_constrs ctxt n l of 381 | SOME (c, l') => SOME (c, l' $ r) 382 | | NONE => 383 | (case add_constrs ctxt n r of 384 | SOME (c, r') => SOME (c, l $ r') 385 | | NONE => NONE)) 386 | | add_constrs ctxt n t = 387 | if is_hole_const t then SOME ((ctxt, n, []), t) else NONE 388 | 389 | fun prep (Term s) (n, ctxt) = 390 | let 391 | val t = Syntax.parse_term ctxt s 392 | val ((ctxt', n', bs), t') = 393 | the_default ((ctxt, n, []), t) (add_constrs ctxt (n+1) t) 394 | in (Term (t', bs), (n', ctxt')) end 395 | | prep (For ss) (n, ctxt) = 396 | let val (ns, ctxt') = read_fixes ss ctxt 397 | in (For ns, (n, ctxt')) end 398 | | prep At (n,ctxt) = (At, (n, ctxt)) 399 | | prep In (n,ctxt) = (In, (n, ctxt)) 400 | | prep Concl (n,ctxt) = (Concl, (n, ctxt)) 401 | | prep Asm (n,ctxt) = (Asm, (n, ctxt)) 402 | 403 | val (xs, (_, ctxt')) = fold_map prep ps (0, ctxt) 404 | 405 | in (xs, ctxt') end 406 | 407 | fun prep_args ctxt (((raw_pats, raw_to), raw_ths)) = 408 | let 409 | 410 | fun check_terms ctxt ps to = 411 | let 412 | fun safe_chop (0: int) xs = ([], xs) 413 | | safe_chop n (x :: xs) = chop (n - 1) xs |>> cons x 414 | | safe_chop _ _ = raise Match 415 | 416 | fun reinsert_pat _ (Term (_, cs)) (t :: ts) = 417 | let val (cs', ts') = safe_chop (length cs) ts 418 | in (Term (t, map dest_Free cs'), ts') end 419 | | reinsert_pat _ (Term _) [] = raise Match 420 | | reinsert_pat ctxt (For ss) ts = 421 | let val fixes = map (fn s => (s, Variable.default_type ctxt s)) ss 422 | in (For fixes, ts) end 423 | | reinsert_pat _ At ts = (At, ts) 424 | | reinsert_pat _ In ts = (In, ts) 425 | | reinsert_pat _ Concl ts = (Concl, ts) 426 | | reinsert_pat _ Asm ts = (Asm, ts) 427 | 428 | fun free_constr (s,T) = Type.constraint T (Free (s, dummyT)) 429 | fun mk_free_constrs (Term (t, cs)) = t :: map free_constr cs 430 | | mk_free_constrs _ = [] 431 | 432 | val ts = maps mk_free_constrs ps @ the_list to 433 | |> Syntax.check_terms (hole_syntax ctxt) 434 | val ctxt' = fold Variable.declare_term ts ctxt 435 | val (ps', (to', ts')) = fold_map (reinsert_pat ctxt') ps ts 436 | ||> (fn xs => case to of NONE => (NONE, xs) | SOME _ => (SOME (hd xs), tl xs)) 437 | val _ = case ts' of (_ :: _) => raise Match | [] => () 438 | in ((ps', to'), ctxt') end 439 | 440 | val (pats, ctxt') = prep_pats ctxt raw_pats 441 | 442 | val ths = Attrib.eval_thms ctxt' raw_ths 443 | val to = Option.map (Syntax.parse_term ctxt') raw_to 444 | 445 | val ((pats', to'), ctxt'') = check_terms ctxt' pats to 446 | 447 | in ((pats', ths, (to', ctxt)), ctxt'') end 448 | 449 | val to_parser = Scan.option ((Args.$$$ "to") |-- Parse.term) 450 | 451 | val subst_parser = 452 | let val scan = raw_pattern -- to_parser -- Parse.thms1 453 | in context_lift scan prep_args end 454 | 455 | fun compute_export_ctac inputs inthms = 456 | CONTEXT_TACTIC' (fn ctxt => compute_export_tac ctxt inputs inthms) 457 | in 458 | Method.setup \<^binding>\cmp\ (subst_parser >> 459 | (fn (pattern, inthms, (to, pat_ctxt)) => fn orig_ctxt => SIMPLE_METHOD' 460 | (compute_export_tac orig_ctxt ((pattern, to), SOME pat_ctxt) inthms))) 461 | "single-step rewriting, allowing subterm selection via patterns" #> 462 | Method.setup \<^binding>\comp\ (subst_parser >> 463 | (fn (pattern, inthms, (to, pat_ctxt)) => K (CONTEXT_METHOD ( 464 | CHEADGOAL o SIDE_CONDS 0 465 | (compute_export_ctac ((pattern, to), SOME pat_ctxt) inthms))))) 466 | "single-step rewriting with auto-typechecking" 467 | end 468 | end 469 | -------------------------------------------------------------------------------- /mltt/core/elaborated_statement.ML: -------------------------------------------------------------------------------- 1 | (* Title: elaborated_statement.ML 2 | Author: Joshua Chen 3 | 4 | Term elaboration for goal statements and proof commands. 5 | 6 | Contains code from parts of 7 | ~~/Pure/Isar/element.ML and 8 | ~~/Pure/Isar/expression.ML 9 | in both verbatim and modified forms. 10 | *) 11 | 12 | structure Elaborated_Statement: sig 13 | 14 | val read_goal_statement: 15 | (string, string, Facts.ref) Element.ctxt list -> 16 | (string, string) Element.stmt -> 17 | Proof.context -> 18 | (Attrib.binding * (term * term list) list) list * Proof.context 19 | 20 | end = struct 21 | 22 | 23 | (* Elaborated goal statements *) 24 | 25 | local 26 | 27 | fun mk_type T = (Logic.mk_type T, []) 28 | fun mk_term t = (t, []) 29 | fun mk_propp (p, pats) = (Type.constraint propT p, pats) 30 | 31 | fun dest_type (T, []) = Logic.dest_type T 32 | fun dest_term (t, []) = t 33 | fun dest_propp (p, pats) = (p, pats) 34 | 35 | fun extract_inst (_, (_, ts)) = map mk_term ts 36 | fun restore_inst ((l, (p, _)), cs) = (l, (p, map dest_term cs)) 37 | 38 | fun extract_eqns es = map (mk_term o snd) es 39 | fun restore_eqns (es, cs) = map2 (fn (b, _) => fn c => (b, dest_term c)) es cs 40 | 41 | fun extract_elem (Element.Fixes fixes) = map (#2 #> the_list #> map mk_type) fixes 42 | | extract_elem (Element.Constrains csts) = map (#2 #> single #> map mk_type) csts 43 | | extract_elem (Element.Assumes asms) = map (#2 #> map mk_propp) asms 44 | | extract_elem (Element.Defines defs) = map (fn (_, (t, ps)) => [mk_propp (t, ps)]) defs 45 | | extract_elem (Element.Notes _) = [] 46 | | extract_elem (Element.Lazy_Notes _) = [] 47 | 48 | fun restore_elem (Element.Fixes fixes, css) = 49 | (fixes ~~ css) |> map (fn ((x, _, mx), cs) => 50 | (x, cs |> map dest_type |> try hd, mx)) |> Element.Fixes 51 | | restore_elem (Element.Constrains csts, css) = 52 | (csts ~~ css) |> map (fn ((x, _), cs) => 53 | (x, cs |> map dest_type |> hd)) |> Element.Constrains 54 | | restore_elem (Element.Assumes asms, css) = 55 | (asms ~~ css) |> map (fn ((b, _), cs) => (b, map dest_propp cs)) |> Element.Assumes 56 | | restore_elem (Element.Defines defs, css) = 57 | (defs ~~ css) |> map (fn ((b, _), [c]) => (b, dest_propp c)) |> Element.Defines 58 | | restore_elem (elem as Element.Notes _, _) = elem 59 | | restore_elem (elem as Element.Lazy_Notes _, _) = elem 60 | 61 | fun prep (_, pats) (ctxt, t :: ts) = 62 | let val ctxt' = Proof_Context.augment t ctxt 63 | in 64 | ((t, Syntax.check_props 65 | (Proof_Context.set_mode Proof_Context.mode_pattern ctxt') pats), 66 | (ctxt', ts)) 67 | end 68 | 69 | fun check cs ctxt = 70 | let 71 | val (cs', (ctxt', _)) = fold_map prep cs 72 | (ctxt, Syntax.check_terms 73 | (Proof_Context.set_mode Proof_Context.mode_schematic ctxt) (map fst cs)) 74 | in (cs', ctxt') end 75 | 76 | fun inst_morphism params ((prfx, mandatory), insts') ctxt = 77 | let 78 | (*parameters*) 79 | val parm_types = map #2 params; 80 | val type_parms = fold Term.add_tfreesT parm_types []; 81 | 82 | (*type inference*) 83 | val parm_types' = map (Type_Infer.paramify_vars o Logic.varifyT_global) parm_types; 84 | val type_parms' = fold Term.add_tvarsT parm_types' []; 85 | val checked = 86 | (map (Logic.mk_type o TVar) type_parms' @ map2 Type.constraint parm_types' insts') 87 | |> Syntax.check_terms (Config.put Type_Infer.object_logic false ctxt) 88 | val (type_parms'', insts'') = chop (length type_parms') checked; 89 | 90 | (*context*) 91 | val ctxt' = fold Proof_Context.augment checked ctxt; 92 | val certT = Thm.trim_context_ctyp o Thm.ctyp_of ctxt'; 93 | val cert = Thm.trim_context_cterm o Thm.cterm_of ctxt'; 94 | 95 | (*instantiation*) 96 | val instT = 97 | (type_parms ~~ map Logic.dest_type type_parms'') 98 | |> map_filter (fn (v, T) => if TFree v = T then NONE else SOME (v, T)); 99 | val cert_inst = 100 | ((map #1 params ~~ map (Term_Subst.instantiateT_frees instT) parm_types) ~~ insts'') 101 | |> map_filter (fn (v, t) => if Free v = t then NONE else SOME (v, cert t)); 102 | in 103 | (Element.instantiate_normalize_morphism (map (apsnd certT) instT, cert_inst) $> 104 | Morphism.binding_morphism "Expression.inst" (Binding.prefix mandatory prfx), ctxt') 105 | end; 106 | 107 | fun abs_def ctxt = 108 | Thm.cterm_of ctxt #> Assumption.assume ctxt #> Local_Defs.abs_def_rule ctxt #> Thm.prop_of; 109 | 110 | fun declare_elem prep_var (Element.Fixes fixes) ctxt = 111 | let val (vars, _) = fold_map prep_var fixes ctxt 112 | in ctxt |> Proof_Context.add_fixes vars |> snd end 113 | | declare_elem prep_var (Element.Constrains csts) ctxt = 114 | ctxt |> fold_map (fn (x, T) => prep_var (Binding.name x, SOME T, NoSyn)) csts |> snd 115 | | declare_elem _ (Element.Assumes _) ctxt = ctxt 116 | | declare_elem _ (Element.Defines _) ctxt = ctxt 117 | | declare_elem _ (Element.Notes _) ctxt = ctxt 118 | | declare_elem _ (Element.Lazy_Notes _) ctxt = ctxt; 119 | 120 | fun parameters_of thy strict (expr, fixed) = 121 | let 122 | val ctxt = Proof_Context.init_global thy; 123 | 124 | fun reject_dups message xs = 125 | (case duplicates (op =) xs of 126 | [] => () 127 | | dups => error (message ^ commas dups)); 128 | 129 | fun parm_eq ((p1, mx1), (p2, mx2)) = 130 | p1 = p2 andalso 131 | (Mixfix.equal (mx1, mx2) orelse 132 | error ("Conflicting syntax for parameter " ^ quote p1 ^ " in expression" ^ 133 | Position.here_list [Mixfix.pos_of mx1, Mixfix.pos_of mx2])); 134 | 135 | fun params_loc loc = Locale.params_of thy loc |> map (apfst #1); 136 | fun params_inst (loc, (prfx, (Expression.Positional insts, eqns))) = 137 | let 138 | val ps = params_loc loc; 139 | val d = length ps - length insts; 140 | val insts' = 141 | if d < 0 then 142 | error ("More arguments than parameters in instantiation of locale " ^ 143 | quote (Locale.markup_name ctxt loc)) 144 | else insts @ replicate d NONE; 145 | val ps' = (ps ~~ insts') |> 146 | map_filter (fn (p, NONE) => SOME p | (_, SOME _) => NONE); 147 | in (ps', (loc, (prfx, (Expression.Positional insts', eqns)))) end 148 | | params_inst (loc, (prfx, (Expression.Named insts, eqns))) = 149 | let 150 | val _ = 151 | reject_dups "Duplicate instantiation of the following parameter(s): " 152 | (map fst insts); 153 | val ps' = (insts, params_loc loc) |-> fold (fn (p, _) => fn ps => 154 | if AList.defined (op =) ps p then AList.delete (op =) p ps 155 | else error (quote p ^ " not a parameter of instantiated expression")); 156 | in (ps', (loc, (prfx, (Expression.Named insts, eqns)))) end; 157 | fun params_expr is = 158 | let 159 | val (is', ps') = fold_map (fn i => fn ps => 160 | let 161 | val (ps', i') = params_inst i; 162 | val ps'' = distinct parm_eq (ps @ ps'); 163 | in (i', ps'') end) is [] 164 | in (ps', is') end; 165 | 166 | val (implicit, expr') = params_expr expr; 167 | 168 | val implicit' = map #1 implicit; 169 | val fixed' = map (Variable.check_name o #1) fixed; 170 | val _ = reject_dups "Duplicate fixed parameter(s): " fixed'; 171 | val implicit'' = 172 | if strict then [] 173 | else 174 | let 175 | val _ = 176 | reject_dups 177 | "Parameter(s) declared simultaneously in expression and for clause: " 178 | (implicit' @ fixed'); 179 | in map (fn (x, mx) => (Binding.name x, NONE, mx)) implicit end; 180 | in (expr', implicit'' @ fixed) end; 181 | 182 | fun parse_elem prep_typ prep_term ctxt = 183 | Element.map_ctxt 184 | {binding = I, 185 | typ = prep_typ ctxt, 186 | term = prep_term (Proof_Context.set_mode Proof_Context.mode_schematic ctxt), 187 | pattern = prep_term (Proof_Context.set_mode Proof_Context.mode_pattern ctxt), 188 | fact = I, 189 | attrib = I}; 190 | 191 | fun prepare_stmt prep_prop prep_obtains ctxt stmt = 192 | (case stmt of 193 | Element.Shows raw_shows => 194 | raw_shows |> (map o apsnd o map) (fn (t, ps) => 195 | (prep_prop (Proof_Context.set_mode Proof_Context.mode_schematic ctxt) t, 196 | map (prep_prop (Proof_Context.set_mode Proof_Context.mode_pattern ctxt)) ps)) 197 | | Element.Obtains raw_obtains => 198 | let 199 | val ((_, thesis), thesis_ctxt) = Obtain.obtain_thesis ctxt; 200 | val obtains = prep_obtains thesis_ctxt thesis raw_obtains; 201 | in map (fn (b, t) => ((b, []), [(t, [])])) obtains end); 202 | 203 | fun finish_fixes (parms: (string * typ) list) = map (fn (binding, _, mx) => 204 | let val x = Binding.name_of binding 205 | in (binding, AList.lookup (op =) parms x, mx) end) 206 | 207 | fun finish_inst ctxt (loc, (prfx, inst)) = 208 | let 209 | val thy = Proof_Context.theory_of ctxt; 210 | val (morph, _) = inst_morphism (map #1 (Locale.params_of thy loc)) (prfx, inst) ctxt; 211 | in (loc, morph) end 212 | 213 | fun closeup _ _ false elem = elem 214 | | closeup (outer_ctxt, ctxt) parms true elem = 215 | let 216 | (*FIXME consider closing in syntactic phase -- before type checking*) 217 | fun close_frees t = 218 | let 219 | val rev_frees = 220 | Term.fold_aterms (fn Free (x, T) => 221 | if Variable.is_fixed outer_ctxt x orelse AList.defined (op =) parms x then I 222 | else insert (op =) (x, T) | _ => I) t []; 223 | in fold (Logic.all o Free) rev_frees t end; 224 | 225 | fun no_binds [] = [] 226 | | no_binds _ = error "Illegal term bindings in context element"; 227 | in 228 | (case elem of 229 | Element.Assumes asms => Element.Assumes (asms |> map (fn (a, propps) => 230 | (a, map (fn (t, ps) => (close_frees t, no_binds ps)) propps))) 231 | | Element.Defines defs => Element.Defines (defs |> map (fn ((name, atts), (t, ps)) => 232 | let val ((c, _), t') = Local_Defs.cert_def ctxt (K []) (close_frees t) 233 | in ((Thm.def_binding_optional (Binding.name c) name, atts), (t', no_binds ps)) end)) 234 | | e => e) 235 | end 236 | 237 | fun finish_elem _ parms _ (Element.Fixes fixes) = Element.Fixes (finish_fixes parms fixes) 238 | | finish_elem _ _ _ (Element.Constrains _) = Element.Constrains [] 239 | | finish_elem ctxts parms do_close (Element.Assumes asms) = closeup ctxts parms do_close (Element.Assumes asms) 240 | | finish_elem ctxts parms do_close (Element.Defines defs) = closeup ctxts parms do_close (Element.Defines defs) 241 | | finish_elem _ _ _ (elem as Element.Notes _) = elem 242 | | finish_elem _ _ _ (elem as Element.Lazy_Notes _) = elem 243 | 244 | fun check_autofix insts eqnss elems concl ctxt = 245 | let 246 | val inst_cs = map extract_inst insts; 247 | val eqns_cs = map extract_eqns eqnss; 248 | val elem_css = map extract_elem elems; 249 | val concl_cs = (map o map) mk_propp (map snd concl); 250 | (*Type inference*) 251 | val (inst_cs' :: eqns_cs' :: css', ctxt') = 252 | (fold_burrow o fold_burrow) check (inst_cs :: eqns_cs :: elem_css @ [concl_cs]) ctxt; 253 | val (elem_css', [concl_cs']) = chop (length elem_css) css'; 254 | in 255 | ((map restore_inst (insts ~~ inst_cs'), 256 | map restore_eqns (eqnss ~~ eqns_cs'), 257 | map restore_elem (elems ~~ elem_css'), 258 | map fst concl ~~ concl_cs'), ctxt') 259 | end 260 | 261 | fun prep_full_context_statement 262 | parse_typ parse_prop 263 | prep_obtains prep_var_elem prep_inst prep_eqns prep_attr prep_var_inst prep_expr 264 | {strict, do_close, fixed_frees} raw_import init_body raw_elems raw_stmt 265 | ctxt1 266 | = 267 | let 268 | val thy = Proof_Context.theory_of ctxt1 269 | val (raw_insts, fixed) = parameters_of thy strict (apfst (prep_expr thy) raw_import) 270 | fun prep_insts_cumulative (loc, (prfx, (inst, eqns))) (i, insts, eqnss, ctxt) = 271 | let 272 | val params = map #1 (Locale.params_of thy loc) 273 | val inst' = prep_inst ctxt (map #1 params) inst 274 | val parm_types' = 275 | params |> map (#2 #> Logic.varifyT_global #> 276 | Term.map_type_tvar (fn ((x, _), S) => TVar ((x, i), S)) #> 277 | Type_Infer.paramify_vars) 278 | val inst'' = map2 Type.constraint parm_types' inst' 279 | val insts' = insts @ [(loc, (prfx, inst''))] 280 | val ((insts'', _, _, _), ctxt2) = check_autofix insts' [] [] [] ctxt 281 | val inst''' = insts'' |> List.last |> snd |> snd 282 | val (inst_morph, _) = inst_morphism params (prfx, inst''') ctxt 283 | val ctxt' = Locale.activate_declarations (loc, inst_morph) ctxt2 284 | handle ERROR msg => if null eqns then error msg else 285 | (Locale.tracing ctxt1 286 | (msg ^ "\nFalling back to reading rewrites clause before activation."); 287 | ctxt2) 288 | val attrss = map (apsnd (map (prep_attr ctxt)) o fst) eqns 289 | val eqns' = (prep_eqns ctxt' o map snd) eqns 290 | val eqnss' = [attrss ~~ eqns'] 291 | val ((_, [eqns''], _, _), _) = check_autofix insts'' eqnss' [] [] ctxt' 292 | val rewrite_morph = eqns' 293 | |> map (abs_def ctxt') 294 | |> Variable.export_terms ctxt' ctxt 295 | |> Element.eq_term_morphism (Proof_Context.theory_of ctxt) 296 | |> the_default Morphism.identity 297 | val ctxt'' = Locale.activate_declarations (loc, inst_morph $> rewrite_morph) ctxt 298 | val eqnss' = eqnss @ [attrss ~~ Variable.export_terms ctxt' ctxt eqns'] 299 | in (i + 1, insts', eqnss', ctxt'') end 300 | 301 | fun prep_elem raw_elem ctxt = 302 | let 303 | val ctxt' = ctxt 304 | |> Context_Position.set_visible false 305 | |> declare_elem prep_var_elem raw_elem 306 | |> Context_Position.restore_visible ctxt 307 | val elems' = parse_elem parse_typ parse_prop ctxt' raw_elem 308 | in (elems', ctxt') end 309 | 310 | val fors = fold_map prep_var_inst fixed ctxt1 |> fst 311 | val ctxt2 = ctxt1 |> Proof_Context.add_fixes fors |> snd 312 | val (_, insts', eqnss', ctxt3) = fold prep_insts_cumulative raw_insts (0, [], [], ctxt2) 313 | 314 | fun prep_stmt elems ctxt = 315 | check_autofix insts' [] elems (prepare_stmt parse_prop prep_obtains ctxt raw_stmt) ctxt 316 | 317 | val _ = 318 | if fixed_frees then () 319 | else 320 | (case fold (fold (Variable.add_frees ctxt3) o snd o snd) insts' [] of 321 | [] => () 322 | | frees => error ("Illegal free variables in expression: " ^ 323 | commas_quote (map (Syntax.string_of_term ctxt3 o Free) (rev frees)))) 324 | 325 | val ((insts, _, elems', concl), ctxt4) = ctxt3 326 | |> init_body 327 | |> fold_map prep_elem raw_elems 328 | |-> prep_stmt 329 | 330 | (*parameters from expression and elements*) 331 | val xs = maps (fn Element.Fixes fixes => map (Variable.check_name o #1) fixes | _ => []) 332 | (Element.Fixes fors :: elems') 333 | val (parms, ctxt5) = fold_map Proof_Context.inferred_param xs ctxt4 334 | val fors' = finish_fixes parms fors 335 | val fixed = map (fn (b, SOME T, mx) => ((Binding.name_of b, T), mx)) fors' 336 | val deps = map (finish_inst ctxt5) insts 337 | val elems'' = map (finish_elem (ctxt1, ctxt5) parms do_close) elems' 338 | in ((fixed, deps, eqnss', elems'', concl), (parms, ctxt5)) end 339 | 340 | fun prep_inst prep_term ctxt parms (Expression.Positional insts) = 341 | (insts ~~ parms) |> map 342 | (fn (NONE, p) => Free (p, dummyT) 343 | | (SOME t, _) => prep_term ctxt t) 344 | | prep_inst prep_term ctxt parms (Expression.Named insts) = 345 | parms |> map (fn p => 346 | (case AList.lookup (op =) insts p of 347 | SOME t => prep_term ctxt t | 348 | NONE => Free (p, dummyT))) 349 | fun parse_inst x = prep_inst Syntax.parse_term x 350 | fun check_expr thy instances = map (apfst (Locale.check thy)) instances 351 | 352 | val read_full_context_statement = prep_full_context_statement 353 | Syntax.parse_typ Syntax.parse_prop Obtain.parse_obtains 354 | Proof_Context.read_var parse_inst Syntax.read_props Attrib.check_src 355 | Proof_Context.read_var check_expr 356 | 357 | fun filter_assumes ((x as Element.Assumes _) :: xs) = x :: filter_assumes xs 358 | | filter_assumes (_ :: xs) = filter_assumes xs 359 | | filter_assumes [] = [] 360 | 361 | fun prep_statement prep activate raw_elems raw_stmt ctxt = 362 | let 363 | val ((_, _, _, elems, concl), _) = 364 | prep {strict = true, do_close = false, fixed_frees = true} 365 | ([], []) I raw_elems raw_stmt ctxt 366 | 367 | val (elems', ctxt') = ctxt 368 | |> Proof_Context.set_stmt true 369 | |> fold_map activate elems 370 | |> apsnd (Proof_Context.restore_stmt ctxt) 371 | 372 | val assumes = filter_assumes elems' 373 | val assms = flat (flat (map 374 | (fn (Element.Assumes asms) => 375 | map (fn (_, facts) => map (Thm.cterm_of ctxt' o #1) facts) asms) 376 | assumes)) 377 | val concl' = Elab.elaborate ctxt' assms concl handle error => concl 378 | in (concl', ctxt') end 379 | 380 | fun activate_i elem ctxt = 381 | let 382 | val elem' = 383 | (case (Element.map_ctxt_attrib o map) Token.init_assignable elem of 384 | Element.Defines defs => 385 | Element.Defines (defs |> map (fn ((a, atts), (t, ps)) => 386 | ((Thm.def_binding_optional 387 | (Binding.name (#1 (#1 (Local_Defs.cert_def ctxt (K []) t)))) a, atts), 388 | (t, ps)))) 389 | | Element.Assumes assms => Element.Assumes (Elab.elaborate ctxt [] assms) 390 | | e => e); 391 | val ctxt' = Context.proof_map (Element.init elem') ctxt; 392 | in ((Element.map_ctxt_attrib o map) Token.closure elem', ctxt') end 393 | 394 | fun activate raw_elem ctxt = 395 | let val elem = raw_elem |> Element.map_ctxt 396 | {binding = I, 397 | typ = I, 398 | term = I, 399 | pattern = I, 400 | fact = Proof_Context.get_fact ctxt, 401 | attrib = Attrib.check_src ctxt} 402 | in activate_i elem ctxt end 403 | 404 | in 405 | 406 | val read_goal_statement = prep_statement read_full_context_statement activate 407 | 408 | end 409 | 410 | 411 | (* Proof assumption command *) 412 | 413 | local 414 | 415 | val structured_statement = 416 | Parse_Spec.statement -- Parse_Spec.if_statement' -- Parse.for_fixes 417 | >> (fn ((shows, assumes), fixes) => (fixes, assumes, shows)) 418 | 419 | fun these_factss more_facts (named_factss, state) = 420 | (named_factss, state |> Proof.set_facts (maps snd named_factss @ more_facts)) 421 | 422 | fun gen_assume prep_statement prep_att export raw_fixes raw_prems raw_concls state = 423 | let 424 | val ctxt = Proof.context_of state; 425 | 426 | val bindings = map (apsnd (map (prep_att ctxt)) o fst) raw_concls; 427 | val {fixes = params, assumes = prems_propss, shows = concl_propss, result_binds, text, ...} = 428 | #1 (prep_statement raw_fixes raw_prems (map snd raw_concls) ctxt); 429 | val propss = (map o map) (Logic.close_prop params (flat prems_propss)) concl_propss; 430 | in 431 | state 432 | |> Proof.assert_forward 433 | |> Proof.map_context_result (fn ctxt => 434 | ctxt 435 | |> Proof_Context.augment text 436 | |> fold Variable.maybe_bind_term result_binds 437 | |> fold_burrow (Assumption.add_assms export o map (Thm.cterm_of ctxt)) propss 438 | |-> (fn premss => fn ctxt => 439 | (premss, Context_Facts.register_facts (flat premss) ctxt)) 440 | |-> (fn premss => 441 | Proof_Context.note_thmss "" (bindings ~~ (map o map) (fn th => ([th], [])) premss))) 442 | |> these_factss [] |> #2 443 | end 444 | 445 | val assume = 446 | gen_assume Proof_Context.cert_statement (K I) Assumption.assume_export 447 | 448 | in 449 | 450 | val _ = Outer_Syntax.command \<^command_keyword>\assuming\ "elaborated assumption" 451 | (structured_statement >> (fn (a, b, c) => Toplevel.proof (fn state => 452 | let 453 | val ctxt = Proof.context_of state 454 | 455 | fun read_option_typ NONE = NONE 456 | | read_option_typ (SOME s) = SOME (Syntax.read_typ ctxt s) 457 | fun read_terms (s, ss) = 458 | let val f = Syntax.read_term ctxt in (f s, map f ss) end 459 | 460 | val a' = map (fn (b, s, m) => (b, read_option_typ s, m)) a 461 | val b' = map (map read_terms) b 462 | val c' = c |> map (fn ((b, atts), ss) => 463 | ((b, map (Attrib.attribute_cmd ctxt) atts), map read_terms ss)) 464 | val c'' = Elab.elaborate ctxt [] c' 465 | in assume a' b' c'' state end))) 466 | 467 | end 468 | 469 | 470 | end -------------------------------------------------------------------------------- /mltt/core/MLTT.thy: -------------------------------------------------------------------------------- 1 | theory MLTT 2 | imports 3 | Pure 4 | "HOL-Eisbach.Eisbach" 5 | "HOL-Eisbach.Eisbach_Tools" 6 | keywords 7 | "Theorem" "Lemma" "Corollary" "Proposition" "Definition" :: thy_goal_stmt and 8 | "assuming" :: prf_asm % "proof" and 9 | "focus" "\<^item>" "\<^enum>" "\" "\" "~" :: prf_script_goal % "proof" and 10 | "calc" "print_coercions" :: thy_decl and 11 | "rhs" "def" "vars" :: quasi_command 12 | 13 | begin 14 | 15 | section \Notation\ 16 | 17 | declare [[eta_contract=false]] 18 | 19 | text \ 20 | Rebind notation for meta-lambdas since we want to use \\\ for the object 21 | lambdas. Metafunctions now use the binder \fn\. 22 | \ 23 | setup \ 24 | let 25 | val typ = Simple_Syntax.read_typ 26 | fun mixfix (sy, ps, p) = Mixfix (Input.string sy, ps, p, Position.no_range) 27 | in 28 | Sign.del_syntax (Print_Mode.ASCII, true) 29 | [("_lambda", typ "pttrns \ 'a \ logic", mixfix ("(3%_./ _)", [0, 3], 3))] 30 | #> Sign.del_syntax Syntax.mode_default 31 | [("_lambda", typ "pttrns \ 'a \ logic", mixfix ("(3\_./ _)", [0, 3], 3))] 32 | #> Sign.add_syntax Syntax.mode_default 33 | [("_lambda", typ "pttrns \ 'a \ logic", mixfix ("(3fn _./ _)", [0, 3], 3))] 34 | end 35 | \ 36 | 37 | syntax "_app" :: \logic \ logic \ logic\ (infixr "$" 3) 38 | translations "a $ b" \ "a (b)" 39 | 40 | abbreviation (input) K where "K x \ fn _. x" 41 | 42 | 43 | section \Metalogic\ 44 | 45 | text \ 46 | HOAS embedding of dependent type theory: metatype of expressions, and typing 47 | judgment. 48 | \ 49 | 50 | typedecl o 51 | 52 | consts has_type :: \o \ o \ prop\ ("(2_:/ _)" 999) 53 | 54 | 55 | section \Axioms\ 56 | 57 | subsection \Universes\ 58 | 59 | text \\-many cumulative Russell universes.\ 60 | 61 | typedecl lvl 62 | 63 | axiomatization 64 | O :: \lvl\ and 65 | S :: \lvl \ lvl\ and 66 | lt :: \lvl \ lvl \ prop\ (infix "<\<^sub>U" 900) 67 | where 68 | O_min: "O <\<^sub>U S i" and 69 | lt_S: "i <\<^sub>U S i" and 70 | lt_trans: "i <\<^sub>U j \ j <\<^sub>U k \ i <\<^sub>U k" 71 | 72 | axiomatization U :: \lvl \ o\ where 73 | Ui_in_Uj: "i <\<^sub>U j \ U i: U j" and 74 | U_cumul: "A: U i \ i <\<^sub>U j \ A: U j" 75 | 76 | lemma Ui_in_USi: 77 | "U i: U (S i)" 78 | by (rule Ui_in_Uj, rule lt_S) 79 | 80 | lemma U_lift: 81 | "A: U i \ A: U (S i)" 82 | by (erule U_cumul, rule lt_S) 83 | 84 | subsection \\-type\ 85 | 86 | axiomatization 87 | Pi :: \o \ (o \ o) \ o\ and 88 | lam :: \o \ (o \ o) \ o\ and 89 | app :: \o \ o \ o\ ("(1_ `_)" [120, 121] 120) 90 | 91 | syntax 92 | "_Pi" :: \idts \ o \ o \ o\ ("(2\_: _./ _)" 30) 93 | "_Pi2" :: \idts \ o \ o \ o\ 94 | "_lam" :: \idts \ o \ o \ o\ ("(2\_: _./ _)" 30) 95 | "_lam2" :: \idts \ o \ o \ o\ 96 | translations 97 | "\x xs: A. B" \ "CONST Pi A (fn x. _Pi2 xs A B)" 98 | "_Pi2 x A B" \ "\x: A. B" 99 | "\x: A. B" \ "CONST Pi A (fn x. B)" 100 | "\x xs: A. b" \ "CONST lam A (fn x. _lam2 xs A b)" 101 | "_lam2 x A b" \ "\x: A. b" 102 | "\x: A. b" \ "CONST lam A (fn x. b)" 103 | 104 | abbreviation Fn (infixr "\" 40) where "A \ B \ \_: A. B" 105 | 106 | axiomatization where 107 | PiF: "\A: U i; \x. x: A \ B x: U i\ \ \x: A. B x: U i" and 108 | 109 | PiI: "\A: U i; \x. x: A \ b x: B x\ \ \x: A. b x: \x: A. B x" and 110 | 111 | PiE: "\f: \x: A. B x; a: A\ \ f `a: B a" and 112 | 113 | beta: "\a: A; \x. x: A \ b x: B x\ \ (\x: A. b x) `a \ b a" and 114 | 115 | eta: "f: \x: A. B x \ \x: A. f `x \ f" and 116 | 117 | Pi_cong: "\ 118 | \x. x: A \ B x \ B' x; 119 | A: U i; 120 | \x. x: A \ B x: U j; 121 | \x. x: A \ B' x: U j 122 | \ \ \x: A. B x \ \x: A. B' x" and 123 | 124 | lam_cong: "\\x. x: A \ b x \ c x; A: U i\ \ \x: A. b x \ \x: A. c x" 125 | 126 | subsection \\-type\ 127 | 128 | axiomatization 129 | Sig :: \o \ (o \ o) \ o\ and 130 | pair :: \o \ o \ o\ ("(2<_,/ _>)") and 131 | SigInd :: \o \ (o \ o) \ (o \ o) \ (o \ o \ o) \ o \ o\ 132 | 133 | syntax "_Sum" :: \idt \ o \ o \ o\ ("(2\_: _./ _)" 20) 134 | 135 | translations "\x: A. B" \ "CONST Sig A (fn x. B)" 136 | 137 | abbreviation Prod (infixl "\" 60) 138 | where "A \ B \ \_: A. B" 139 | 140 | axiomatization where 141 | SigF: "\A: U i; \x. x: A \ B x: U i\ \ \x: A. B x: U i" and 142 | 143 | SigI: "\\x. x: A \ B x: U i; a: A; b: B a\ \ : \x: A. B x" and 144 | 145 | SigE: "\ 146 | p: \x: A. B x; 147 | A: U i; 148 | \x. x : A \ B x: U j; 149 | \p. p: \x: A. B x \ C p: U k; 150 | \x y. \x: A; y: B x\ \ f x y: C 151 | \ \ SigInd A (fn x. B x) (fn p. C p) f p: C p" and 152 | 153 | Sig_comp: "\ 154 | a: A; 155 | b: B a; 156 | \x. x: A \ B x: U i; 157 | \p. p: \x: A. B x \ C p: U i; 158 | \x y. \x: A; y: B x\ \ f x y: C 159 | \ \ SigInd A (fn x. B x) (fn p. C p) f \ f a b" and 160 | 161 | Sig_cong: "\ 162 | \x. x: A \ B x \ B' x; 163 | A: U i; 164 | \x. x : A \ B x: U j; 165 | \x. x : A \ B' x: U j 166 | \ \ \x: A. B x \ \x: A. B' x" 167 | 168 | 169 | section \Type checking & inference\ 170 | 171 | ML_file \lib.ML\ 172 | ML_file \context_facts.ML\ 173 | ML_file \context_tactical.ML\ 174 | 175 | \ \Rule attributes for the typechecker\ 176 | named_theorems form and intr and comp 177 | 178 | \ \Elimination/induction automation and the `elim` attribute\ 179 | ML_file \elimination.ML\ 180 | 181 | lemmas 182 | [form] = PiF SigF and 183 | [intr] = PiI SigI and 184 | [elim ?f] = PiE and 185 | [elim ?p] = SigE and 186 | [comp] = beta Sig_comp and 187 | [cong] = Pi_cong lam_cong Sig_cong 188 | 189 | \ \Subsumption rule\ 190 | lemma sub: 191 | assumes "a: A" "A \ A'" 192 | shows "a: A'" 193 | using assms by simp 194 | 195 | \ \Basic rewriting of computational equality\ 196 | ML_file \~~/src/Tools/misc_legacy.ML\ 197 | ML_file \~~/src/Tools/IsaPlanner/isand.ML\ 198 | ML_file \~~/src/Tools/IsaPlanner/rw_inst.ML\ 199 | ML_file \~~/src/Tools/IsaPlanner/zipper.ML\ 200 | ML_file \~~/src/Tools/eqsubst.ML\ 201 | 202 | \ \Term normalization, type checking & inference\ 203 | ML_file \types.ML\ 204 | 205 | method_setup typechk = 206 | \Scan.succeed (K (CONTEXT_METHOD ( 207 | CHEADGOAL o Types.check_infer)))\ 208 | 209 | method_setup known = 210 | \Scan.succeed (K (CONTEXT_METHOD ( 211 | CHEADGOAL o Types.known_ctac)))\ 212 | 213 | setup \ 214 | let val typechk = fn ctxt => 215 | NO_CONTEXT_TACTIC ctxt o Types.check_infer 216 | (Simplifier.prems_of ctxt @ Context_Facts.known ctxt) 217 | in 218 | map_theory_simpset (fn ctxt => ctxt 219 | addSolver (mk_solver "" typechk)) 220 | end 221 | \ 222 | 223 | section \Implicits\ 224 | 225 | text \ 226 | \{}\ is used to mark implicit arguments in definitions, while \?\ is expanded 227 | immediately for elaboration in statements. 228 | \ 229 | 230 | consts 231 | iarg :: \'a\ ("{}") 232 | hole :: \'b\ ("?") 233 | 234 | ML_file \implicits.ML\ 235 | 236 | attribute_setup implicit = \Scan.succeed Implicits.implicit_defs_attr\ 237 | 238 | ML \val _ = Context.>> (Syntax_Phases.term_check 1 "" Implicits.make_holes)\ 239 | 240 | text \Automatically insert inhabitation judgments where needed:\ 241 | syntax inhabited :: \o \ prop\ ("(_)") 242 | translations "inhabited A" \ "CONST has_type ? A" 243 | 244 | 245 | section \Statements and goals\ 246 | 247 | ML_file \focus.ML\ 248 | ML_file \elaboration.ML\ 249 | ML_file \elaborated_statement.ML\ 250 | ML_file \goals.ML\ 251 | 252 | text \Syntax for definition bodies.\ 253 | syntax defn :: \o \ prop\ ("(:=_)") 254 | translations "defn t" \ "CONST has_type t ?" 255 | 256 | 257 | section \Proof methods\ 258 | 259 | named_theorems intro \ \Logical introduction rules\ 260 | 261 | lemmas [intro] = PiI[rotated] SigI 262 | 263 | \ \Case reasoning rules\ 264 | ML_file \cases.ML\ 265 | 266 | ML_file \tactics.ML\ 267 | 268 | method_setup rule = 269 | \Attrib.thms >> (fn ths => K (CONTEXT_METHOD ( 270 | CHEADGOAL o SIDE_CONDS 0 (rule_ctac ths))))\ 271 | 272 | method_setup dest = 273 | \Scan.lift (Scan.option (Args.parens Parse.nat)) 274 | -- Attrib.thms >> (fn (n_opt, ths) => K (CONTEXT_METHOD ( 275 | CHEADGOAL o SIDE_CONDS 0 (dest_ctac n_opt ths))))\ 276 | 277 | method_setup intro = 278 | \Scan.succeed (K (CONTEXT_METHOD ( 279 | CHEADGOAL o SIDE_CONDS 0 intro_ctac)))\ 280 | 281 | method_setup intros = 282 | \Scan.lift (Scan.option Parse.nat) >> (fn n_opt => 283 | K (CONTEXT_METHOD (fn facts => 284 | case n_opt of 285 | SOME n => CREPEAT_N n (CHEADGOAL (SIDE_CONDS 0 intro_ctac facts)) 286 | | NONE => CCHANGED (CREPEAT (CCHANGED ( 287 | CHEADGOAL (SIDE_CONDS 0 intro_ctac facts)))))))\ 288 | 289 | method_setup elim = 290 | \Scan.repeat Args.term >> (fn tms => K (CONTEXT_METHOD ( 291 | CHEADGOAL o SIDE_CONDS 0 (elim_ctac tms))))\ 292 | 293 | method_setup cases = 294 | \Args.term >> (fn tm => K (CONTEXT_METHOD ( 295 | CHEADGOAL o SIDE_CONDS 0 (cases_ctac tm))))\ 296 | 297 | method elims = elim+ 298 | method facts = fact+ 299 | 300 | 301 | subsection \Reflexivity\ 302 | 303 | named_theorems refl 304 | method refl = (rule refl) 305 | 306 | 307 | subsection \Trivial proofs (modulo automatic discharge of side conditions)\ 308 | 309 | method_setup this = 310 | \Scan.succeed (K (CONTEXT_METHOD (fn facts => 311 | CHEADGOAL (SIDE_CONDS 0 312 | (CONTEXT_TACTIC' (fn ctxt => resolve_tac ctxt facts)) 313 | facts))))\ 314 | 315 | 316 | subsection \Rewriting\ 317 | 318 | consts compute_hole :: "'a::{}" ("\") 319 | 320 | lemma eta_expand: 321 | fixes f :: "'a::{} \ 'b::{}" 322 | shows "f \ fn x. f x" . 323 | 324 | lemma rewr_imp: 325 | assumes "PROP A \ PROP B" 326 | shows "(PROP A \ PROP C) \ (PROP B \ PROP C)" 327 | apply (Pure.rule Pure.equal_intr_rule) 328 | apply (drule equal_elim_rule2[OF assms]; assumption) 329 | apply (drule equal_elim_rule1[OF assms]; assumption) 330 | done 331 | 332 | lemma imp_cong_eq: 333 | "(PROP A \ (PROP B \ PROP C) \ (PROP B' \ PROP C')) \ 334 | ((PROP B \ PROP A \ PROP C) \ (PROP B' \ PROP A \ PROP C'))" 335 | apply (Pure.intro Pure.equal_intr_rule) 336 | apply (drule (1) cut_rl; drule Pure.equal_elim_rule1 Pure.equal_elim_rule2; 337 | assumption)+ 338 | apply (drule Pure.equal_elim_rule1 Pure.equal_elim_rule2; assumption)+ 339 | done 340 | 341 | ML_file \~~/src/HOL/Library/cconv.ML\ 342 | ML_file \comp.ML\ 343 | 344 | \ \\compute\ simplifies terms via computational equalities\ 345 | method compute uses add = 346 | changed \repeat_new \(simp add: comp add | subst comp); typechk?\\ 347 | 348 | 349 | subsection \Calculational reasoning\ 350 | 351 | consts "rhs" :: \'a\ ("..") 352 | 353 | ML_file \calc.ML\ 354 | 355 | 356 | subsection \Implicit lambdas\ 357 | 358 | definition lam_i where [implicit]: "lam_i f \ lam {} f" 359 | 360 | syntax 361 | "_lam_i" :: \idts \ o \ o\ ("(2\_./ _)" 30) 362 | "_lam_i2" :: \idts \ o \ o\ 363 | translations 364 | "\x xs. b" \ "CONST lam_i (fn x. _lam_i2 xs b)" 365 | "_lam_i2 x b" \ "\x. b" 366 | "\x. b" \ "CONST lam_i (fn x. b)" 367 | 368 | translations "\x. b" \ "\x: A. b" 369 | 370 | 371 | section \Lambda coercion\ 372 | 373 | \ \Coerce object lambdas to meta-lambdas\ 374 | abbreviation (input) to_meta :: \o \ o \ o\ 375 | where "to_meta f \ fn x. f `x" 376 | 377 | ML_file \~~/src/Tools/subtyping.ML\ 378 | declare [[coercion_enabled, coercion to_meta]] 379 | 380 | translations "f x" \ "f `x" 381 | 382 | 383 | section \Functions\ 384 | 385 | Lemma eta_exp: 386 | assumes "f: \x: A. B x" 387 | shows "f \ \x: A. f x" 388 | by (rule eta[symmetric]) 389 | 390 | Lemma refine_codomain: 391 | assumes 392 | "A: U i" 393 | "f: \x: A. B x" 394 | "\x. x: A \ f `x: C x" 395 | shows "f: \x: A. C x" 396 | by (comp eta_exp) 397 | 398 | Lemma lift_universe_codomain: 399 | assumes "A: U i" "f: A \ U j" 400 | shows "f: A \ U (S j)" 401 | using U_lift 402 | by (rule refine_codomain) 403 | 404 | subsection \Function composition\ 405 | 406 | definition "funcomp A g f \ \x: A. g `(f `x)" 407 | 408 | syntax 409 | "_funcomp" :: \o \ o \ o \ o\ ("(2_ \\<^bsub>_\<^esub>/ _)" [111, 0, 110] 110) 410 | translations 411 | "g \\<^bsub>A\<^esub> f" \ "CONST funcomp A g f" 412 | 413 | Lemma funcompI [type]: 414 | assumes 415 | "A: U i" 416 | "B: U i" 417 | "\x. x: B \ C x: U i" 418 | "f: A \ B" 419 | "g: \x: B. C x" 420 | shows 421 | "g \\<^bsub>A\<^esub> f: \x: A. C (f x)" 422 | unfolding funcomp_def by typechk 423 | 424 | Lemma funcomp_assoc [comp]: 425 | assumes 426 | "A: U i" 427 | "f: A \ B" 428 | "g: B \ C" 429 | "h: \x: C. D x" 430 | shows 431 | "(h \\<^bsub>B\<^esub> g) \\<^bsub>A\<^esub> f \ h \\<^bsub>A\<^esub> g \\<^bsub>A\<^esub> f" 432 | unfolding funcomp_def by compute 433 | 434 | Lemma funcomp_lambda_comp [comp]: 435 | assumes 436 | "A: U i" 437 | "\x. x: A \ b x: B" 438 | "\x. x: B \ c x: C x" 439 | shows 440 | "(\x: B. c x) \\<^bsub>A\<^esub> (\x: A. b x) \ \x: A. c (b x)" 441 | unfolding funcomp_def by compute 442 | 443 | Lemma funcomp_apply_comp [comp]: 444 | assumes 445 | "A: U i" "B: U i" "\x. x: B \ C x: U i" 446 | "f: A \ B" "g: \x: B. C x" 447 | "x: A" 448 | shows "(g \\<^bsub>A\<^esub> f) x \ g (f x)" 449 | unfolding funcomp_def by compute 450 | 451 | subsection \Notation\ 452 | 453 | definition funcomp_i (infixr "\" 120) 454 | where [implicit]: "funcomp_i g f \ g \\<^bsub>{}\<^esub> f" 455 | 456 | translations "g \ f" \ "g \\<^bsub>A\<^esub> f" 457 | 458 | subsection \Identity function\ 459 | 460 | abbreviation id where "id A \ \x: A. x" 461 | 462 | lemma 463 | id_type [type]: "A: U i \ id A: A \ A" and 464 | id_comp [comp]: "x: A \ (id A) x \ x" \ \for the occasional manual rewrite\ 465 | by compute+ 466 | 467 | Lemma id_left [comp]: 468 | assumes "A: U i" "B: U i" "f: A \ B" 469 | shows "(id B) \\<^bsub>A\<^esub> f \ f" 470 | by (comp eta_exp[of f]) (compute, rule eta) 471 | 472 | Lemma id_right [comp]: 473 | assumes "A: U i" "B: U i" "f: A \ B" 474 | shows "f \\<^bsub>A\<^esub> (id A) \ f" 475 | by (comp eta_exp[of f]) (compute, rule eta) 476 | 477 | lemma id_U [type]: 478 | "id (U i): U i \ U i" 479 | using Ui_in_USi by typechk 480 | 481 | 482 | section \Pairs\ 483 | 484 | definition "fst A B \ \p: \x: A. B x. SigInd A B (fn _. A) (fn x y. x) p" 485 | definition "snd A B \ \p: \x: A. B x. SigInd A B (fn p. B (fst A B p)) (fn x y. y) p" 486 | 487 | Lemma fst_type [type]: 488 | assumes "A: U i" "\x. x: A \ B x: U i" 489 | shows "fst A B: (\x: A. B x) \ A" 490 | unfolding fst_def by typechk 491 | 492 | Lemma fst_comp [comp]: 493 | assumes 494 | "A: U i" "\x. x: A \ B x: U i" "a: A" "b: B a" 495 | shows "fst A B \ a" 496 | unfolding fst_def by compute 497 | 498 | Lemma snd_type [type]: 499 | assumes "A: U i" "\x. x: A \ B x: U i" 500 | shows "snd A B: \p: \x: A. B x. B (fst A B p)" 501 | unfolding snd_def by typechk 502 | 503 | Lemma snd_comp [comp]: 504 | assumes "A: U i" "\x. x: A \ B x: U i" "a: A" "b: B a" 505 | shows "snd A B \ b" 506 | unfolding snd_def by compute 507 | 508 | subsection \Notation\ 509 | 510 | definition fst_i ("fst") 511 | where [implicit]: "fst \ MLTT.fst {} {}" 512 | 513 | definition snd_i ("snd") 514 | where [implicit]: "snd \ MLTT.snd {} {}" 515 | 516 | translations 517 | "fst" \ "CONST MLTT.fst A B" 518 | "snd" \ "CONST MLTT.snd A B" 519 | 520 | subsection \Projections\ 521 | 522 | Lemma fst [type]: 523 | assumes 524 | "A: U i" "\x. x: A \ B x: U i" 525 | "p: \x: A. B x" 526 | shows "fst p: A" 527 | by typechk 528 | 529 | Lemma snd [type]: 530 | assumes 531 | "A: U i" "\x. x: A \ B x: U i" 532 | "p: \x: A. B x" 533 | shows "snd p: B (fst p)" 534 | by typechk 535 | 536 | method fst for p::o = rule fst[where ?p=p] 537 | method snd for p::o = rule snd[where ?p=p] 538 | 539 | text \Double projections:\ 540 | 541 | definition [implicit]: "p\<^sub>1\<^sub>1 p \ MLTT.fst {} {} (MLTT.fst {} {} p)" 542 | definition [implicit]: "p\<^sub>1\<^sub>2 p \ MLTT.snd {} {} (MLTT.fst {} {} p)" 543 | definition [implicit]: "p\<^sub>2\<^sub>1 p \ MLTT.fst {} {} (MLTT.snd {} {} p)" 544 | definition [implicit]: "p\<^sub>2\<^sub>2 p \ MLTT.snd {} {} (MLTT.snd {} {} p)" 545 | 546 | translations 547 | "CONST p\<^sub>1\<^sub>1 p" \ "fst (fst p)" 548 | "CONST p\<^sub>1\<^sub>2 p" \ "snd (fst p)" 549 | "CONST p\<^sub>2\<^sub>1 p" \ "fst (snd p)" 550 | "CONST p\<^sub>2\<^sub>2 p" \ "snd (snd p)" 551 | 552 | Lemma (def) distribute_Sig: 553 | assumes 554 | "A: U i" 555 | "\x. x: A \ B x: U i" 556 | "\x. x: A \ C x: U i" 557 | "p: \x: A. B x \ C x" 558 | shows "(\x: A. B x) \ (\x: A. C x)" 559 | proof intro 560 | have "fst p: A" and "snd p: B (fst p) \ C (fst p)" 561 | by typechk+ 562 | thus ": \x: A. B x" 563 | and ": \x: A. C x" 564 | by typechk+ 565 | qed 566 | 567 | 568 | end 569 | --------------------------------------------------------------------------------