├── README.md ├── exercises ├── forwardsimulation.v ├── group.v └── natcycleperm.v ├── exsolutions ├── forwardsimulation.v ├── group.v └── natcycleperm.v ├── l01.v ├── l01sol.v ├── l02.v ├── l02sol.v ├── l05.v ├── l05sol.v ├── l06.v ├── l06sol.v ├── l14.v ├── lec ├── l01-simple.v ├── l02-list.v ├── l05-decreasing.v ├── l06-increasing.v └── l10-perm-inv-add.v ├── lecsolutions ├── l01-simple.v ├── l02-list.v ├── l05-decreasing.v └── l06-increasing.v ├── naivecoq.md ├── pset1 ├── README.md ├── pset1bnat.v ├── pset1btree.v └── pset1compare.v ├── pset2 ├── README.md └── pset2permutation.v ├── pset3 ├── README.md ├── pset3regex.v └── pset3sort.v ├── pset4 ├── README.md └── pset4osview.v ├── sep-sol.v ├── sep.v └── sep2.v /README.md: -------------------------------------------------------------------------------- 1 | CS 260r 2017: Verified Systems 2 | ============================== 3 | 4 | This is the source code and lecture code repository for Harvard’s CS 260r class, 5 | Projects and Close Readings in Software Systems, for spring 2017, with the topic 6 | of Verified Systems. 7 | -------------------------------------------------------------------------------- /exercises/forwardsimulation.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Require Import Relation_Operators. 7 | 8 | Section ForwardSimulation. 9 | 10 | (* S is the type of a specification. *) 11 | Variable S:Type. 12 | 13 | (* I is the type of an implementation state. *) 14 | Variable I:Type. 15 | 16 | (* This function takes a single deterministic step on an 17 | implementation state. *) 18 | Variable istep:I -> I. 19 | 20 | (* This relation represents a nondeterministic step on the 21 | specification. *) 22 | Variable sstep:S -> S -> Prop. 23 | 24 | (* This relation says whether an implementation state matches a 25 | specification. *) 26 | Variable ispec:I -> S -> Prop. 27 | 28 | 29 | (* Kleene-star on `istep`: `isteps i i'` is True iff you can get 30 | from i to i' in zero or more `istep`s. *) 31 | Inductive isteps: I -> I -> Prop := 32 | | isteps_refl: forall i, isteps i i 33 | | isteps_cons: forall i i', isteps i i' -> isteps i (istep i'). 34 | 35 | (* Kleene-star on `sstep`: `ssteps s s'` is True iff you can get 36 | from s to s' in zero or more `sstep`s. *) 37 | Inductive ssteps: S -> S -> Prop := 38 | | ssteps_refl: forall s, ssteps s s 39 | | ssteps_cons: forall a b c, ssteps a b -> sstep b c -> ssteps a c. 40 | 41 | 42 | (* Prove this lemma. *) 43 | Lemma ssteps_trans (a b c:S): 44 | ssteps a b -> ssteps b c -> ssteps a c. 45 | (* YOUR CODE HERE *) 46 | Admitted. 47 | 48 | 49 | (* Now, *state* the definition of forward simulation. 50 | 51 | There are many forward simulation definitions. The one we want is 52 | that single deterministic implementation steps are matched by 53 | zero or more specification steps. 54 | 55 | So `PS`, a Prop over specifications, is a forward simulation iff: 56 | 57 | 1. For every specification `s` that fits `PS` (i.e. `PS s` is 58 | True), 59 | 2. And every implementation `i` that maps to `s` under 60 | `ispec`, 61 | 3. When implementation `i` takes a step via `istep`, 62 | 4. That next implementation maps to some specification `s'` that 63 | also fits `PS`. *) 64 | 65 | Definition ForwardSimulation (PS:S -> Prop) := 66 | forall ... (* YOUR CODE HERE! *) 67 | 68 | Hint Unfold ForwardSimulation. 69 | 70 | 71 | (* Now, state and prove this lemma, which says that forward 72 | simulation is preserved over MULTIPLE implementation steps 73 | (isteps rather than istep). *) 74 | Lemma ForwardSimulation_star 75 | (PS:S -> Prop) 76 | (fsim:ForwardSimulation PS) 77 | (* YOUR CODE HERE! *) 78 | 79 | End ForwardSimulation. 80 | -------------------------------------------------------------------------------- /exercises/group.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Require Import Classical. 7 | 8 | (* A *group* is an algebraic structure comprising a set of elements 9 | `T` and a binary operation `f` and satisfying four laws: 10 | 11 | 1. CLOSURE: If `x, y ∈ T`, then `f x y ∈ T`. 12 | 2. ASSOCIATIVITY: `f x (f y z) = f (f x y) z`. 13 | 3. RIGHT IDENTITY: There exists an identity element `e ∈ T` 14 | where `f x e = x` for all `x`. 15 | 4. RIGHT INVERSE: Every element `x` has a right inverse, `y`, 16 | so that `f x y = e`. 17 | 18 | We can express this in Coq like so, using a *type class*. The 19 | type class is parameterized on the set `T`, the operation `f`, 20 | the right-identity element `e`, and the inverse function `i`. 21 | Then there are three laws that must hold. *) 22 | 23 | Class Group {T} {f:T -> T -> T} {e:T} {i:T -> T} := { 24 | gassoc : forall a b c:T, f a (f b c) = f (f a b) c; 25 | grightid : forall a:T, f a e = a; 26 | grightinv : forall a:T, f a (i a) = e 27 | }. 28 | 29 | 30 | (* To show that a given set and operation form a group, we create an 31 | *instance* of the Group type class. 32 | 33 | Let’s show that addition on integers (the Coq `Z` type) is a group. 34 | The identity element is 0 (`Z0`), and the inverse function is 35 | `Z.opp` (Coq’s name for the unary negative operator). 36 | 37 | To declare the type instance, we first give its arguments: 38 | `Z Z.add Z0 Z.opp`. Then we need to show that the group laws hold. 39 | Here, we do that using a `Proof` environment, which opens three 40 | subgoals, one per law. *) 41 | 42 | Instance Z_Group: @Group Z Z.add Z0 Z.opp := { }. 43 | Proof. 44 | - apply Z.add_assoc. 45 | - apply Z.add_0_r. 46 | - apply Z.add_opp_diag_r. 47 | Qed. 48 | 49 | 50 | (* Given the `Z_Group` instance, we can prove facts about `Z`. *) 51 | Lemma Z_add_assoc x y z: 52 | (x + (y + z))%Z = (x + y + z)%Z. 53 | 54 | apply gassoc. (* Magic! Coq finds `Z_Group`. *) 55 | Qed. 56 | 57 | 58 | (* But this becomes much more interesting if we can prove some facts 59 | about *all* groups. *) 60 | 61 | Section GroupFacts. 62 | Variable T:Type. 63 | Variable f:T -> T -> T. 64 | Variable e:T. 65 | Variable i:T -> T. 66 | Variable G:@Group T f e i. 67 | 68 | Infix "*" := f. 69 | 70 | (* Gain local names for the group laws on group G. *) 71 | Local Definition Gassoc := @gassoc _ _ _ _ G. 72 | Local Definition Grightinv := @grightinv _ _ _ _ G. 73 | Local Definition Grightid := @grightid _ _ _ _ G. 74 | 75 | Set Implicit Arguments. 76 | 77 | Lemma mult_both a b c d1 d2: 78 | a * c = d1 -> 79 | b * c = d2 -> 80 | a = b -> 81 | d1 = d2. 82 | 83 | intros; rewrite <- H; rewrite <- H0; rewrite H1; auto. 84 | Qed. 85 | 86 | 87 | (* Your turn! These generic laws are true for all groups. *) 88 | Lemma characterizingid a: 89 | a * a = a -> 90 | a = e. 91 | Admitted. 92 | 93 | Lemma leftinv a: 94 | i a * a = e. 95 | Admitted. 96 | 97 | Lemma leftid a: 98 | e * a = a. 99 | Admitted. 100 | 101 | Lemma leftid_uniq p a: 102 | p * a = a -> 103 | p = e. 104 | Admitted. 105 | 106 | Lemma rightinv_uniq a b: 107 | a * b = e -> 108 | b = i a. 109 | Admitted. 110 | 111 | Lemma leftinv_uniq a b: 112 | a * b = e -> 113 | a = i b. 114 | Admitted. 115 | 116 | Lemma right_cancel a b x: 117 | a * x = b * x -> 118 | a = b. 119 | Admitted. 120 | 121 | Lemma left_cancel a b x: 122 | x * a = x * b -> 123 | a = b. 124 | Admitted. 125 | 126 | Lemma inv_distrib a b: 127 | i (a * b) = i b * i a. 128 | Admitted. 129 | 130 | Lemma double_inv a: 131 | i (i a) = a. 132 | Admitted. 133 | 134 | Lemma id_inv: 135 | i e = e. 136 | Admitted. 137 | 138 | Unset Implicit Arguments. 139 | End GroupFacts. 140 | 141 | 142 | (* Now these *general* group facts can be used to prove properties 143 | about any group, such as Z. *) 144 | 145 | Lemma Z_inv_distrib (a b:Z): 146 | (- (a + b))%Z = (- b + - a)%Z. 147 | 148 | apply (inv_distrib Z_Group). 149 | Qed. 150 | 151 | 152 | (* But of course there are many other groups. We now show that 153 | addition modulo 32 is a group. *) 154 | 155 | (* `Z32` is the set `{0, 1, ..., 31}`. We write it as a dependent 156 | type. Constructing a `Z32` requires two things: a `nat` and a proof 157 | that the `nat` is less than 32. *) 158 | Inductive Z32 : Set := 159 | | z32 : forall n:nat, n < 32 -> Z32. 160 | Hint Constructors Z32. 161 | 162 | (* Two Z32s are equal iff their `nat` components are equal. 163 | The proof of this lemma uses classical logic. *) 164 | Lemma z32_proof_irrelevance n pf1 p pf2: 165 | n = p -> z32 n pf1 = z32 p pf2. 166 | 167 | intros; subst. 168 | assert (pf1 = pf2) by (apply proof_irrelevance). 169 | subst; auto. 170 | Qed. 171 | 172 | 173 | (* Addition on integers (mod 32) *) 174 | Definition z32_add: Z32 -> Z32 -> Z32. 175 | (* What we want to do: given `x y`, return `(x + y) mod 32`. 176 | But in order to construct the answer as a Z32, we must provide a 177 | proof that the returned value’s `nat` component is less than 32. 178 | It’s easiest to do this by editing a proof. *) 179 | refine (fun x y => match (x, y) with 180 | | (z32 n _, z32 p _) => z32 ((n + p) mod 32) _ 181 | end). 182 | apply Nat.mod_upper_bound; discriminate. 183 | Defined. 184 | 185 | (* Addition on integers mod 32 is associative *) 186 | Lemma z32_add_assoc x y z: 187 | z32_add x (z32_add y z) = z32_add (z32_add x y) z. 188 | 189 | destruct x, y, z; unfold z32_add. 190 | apply z32_proof_irrelevance. 191 | rewrite Nat.add_mod_idemp_l by omega. 192 | rewrite Nat.add_mod_idemp_r by omega. 193 | rewrite plus_assoc; auto. 194 | Qed. 195 | 196 | 197 | (* Zero (mod 32) *) 198 | Definition z32_0 : Z32. 199 | refine (z32 0 _). 200 | omega. 201 | Defined. 202 | 203 | 204 | (* Inverse on integers (mod 32) *) 205 | Definition z32_inv: Z32 -> Z32. 206 | refine (fun x => match x with 207 | | (z32 0 _) => x 208 | | (z32 (S n) pf) => z32 (32 - (S n)) _ 209 | end). 210 | omega. 211 | Defined. 212 | 213 | (* Inverse is an inverse *) 214 | Lemma z32_add_inv_diag x: 215 | z32_add x (z32_inv x) = z32_0. 216 | remember (z32_inv x) as y; destruct x, y. 217 | simpl; apply z32_proof_irrelevance. 218 | destruct n. 219 | - simpl in Heqy; inversion Heqy; auto. 220 | - assert (n0 = 32 - S n) by (simpl in Heqy; inversion Heqy; auto). 221 | subst. 222 | rewrite <- le_plus_minus by omega. 223 | apply Nat.mod_same; discriminate. 224 | Qed. 225 | 226 | 227 | (* Now we are ready to show that Z32 is a group. *) 228 | Instance Z32_Group : @Group Z32 z32_add z32_0 z32_inv := { }. 229 | Proof. 230 | - apply z32_add_assoc. 231 | - intros; unfold z32_add; destruct a. 232 | apply z32_proof_irrelevance. 233 | rewrite <- plus_n_O. 234 | apply Nat.mod_small; auto. 235 | - apply z32_add_inv_diag. 236 | Qed. 237 | 238 | 239 | (* Which lets us easily prove facts about Z32. *) 240 | Lemma Z32_rightinv_uniq x y: 241 | z32_add x y = z32_0 -> 242 | y = z32_inv x. 243 | 244 | apply (rightinv_uniq Z32_Group). 245 | Qed. 246 | -------------------------------------------------------------------------------- /exercises/natcycleperm.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Require Import Program.Equality. 7 | Require Import Sorting.Permutation. 8 | Import ListNotations. 9 | 10 | (** * Cyclic permutations 11 | 12 | This file contains a permutation development based on cycling: 13 | 14 | [[ 15 | Inductive Perm2 {A}: list A -> list A -> Prop := 16 | | perm2_nil : Perm2 [] [] 17 | | perm2_cons : forall x y a, 18 | Perm2 x y -> Perm2 (a :: x) (a :: y) 19 | | perm2_cycle : forall x y a, 20 | Perm2 (a :: x) y -> Perm2 (x ++ [a]) y. 21 | ]] 22 | 23 | We prove that cycling permutations are equivalent to Coq Permutations. 24 | As you’ve seen, though, this definition is radically difficult to work with. 25 | There is no way I’ve found to prove anything about Perm2 directly 26 | (the equivalent of the [Permutation_Add_inv] lemma must be assumed). 27 | 28 | Instead we work with a stronger definition, [NatCyclePerm], about which 29 | we can prove stronger properties. [NatCyclePerm] is a cyclic permutation 30 | for _distinct natural numbers_: 31 | 32 | [[ 33 | Inductive NatCyclePerm : list nat -> list nat -> Prop := 34 | | ncp_nil : NatCyclePerm [] [] 35 | | ncp_cons : forall is js i, 36 | NatCyclePerm is js -> 37 | ~ In i is -> 38 | NatCyclePerm (i::is) (i::js) 39 | | ncp_cycle : forall is js i, 40 | NatCyclePerm (i::is) js -> NatCyclePerm (is++[i]) js. 41 | ]] 42 | 43 | When the members of a set are distinct, it becomes easier to prove 44 | properties about permutations. In particular, we can prove this strong 45 | statement: 46 | 47 | [[ 48 | Lemma ncp_all is js: 49 | NoDup is -> 50 | NoDup js -> 51 | (forall i, In i is <-> In i js) -> 52 | NatCyclePerm is js. 53 | ]] 54 | 55 | That is, if [is] and [js] are lists of distinct [nat]s, and they are subsets 56 | of each other (every element of [is] is in [js] and vice versa), then there 57 | exists a cycle-based permutation between [is] and [js]. 58 | 59 | But how can we go from permutations of [nat]s to permutations of anything? 60 | We use this function, which looks up the [i]th element of a list: 61 | 62 | [[ 63 | Fixpoint map_nth {A} is l : list A := 64 | match is with 65 | | [] => [] 66 | | i::is' => match nth_error l i with 67 | | Some a => a :: map_nth is' l 68 | | None => [] 69 | end 70 | end. 71 | ]] 72 | 73 | Working through the proof still involved a lot of excitement, and (as almost 74 | every Coq development seems to require) proving facts about lists. A critical 75 | lemma was [NoDup_nat_complete], which allows us to assert the existence of 76 | a [NatCyclePerm] at an important point. 77 | 78 | *) 79 | 80 | 81 | (* ** Facts about [In] *) 82 | 83 | Section InFacts. 84 | Context {A:Type}. 85 | Implicit Types l:list A. 86 | 87 | Lemma in_middle a l1 l2: 88 | In a (l1 ++ a :: l2). 89 | 90 | induction l1; intros; simpl; [ left | right ]; auto. 91 | Qed. 92 | 93 | Lemma in_add_middle a b l1 l2: 94 | In a (l1 ++ l2) -> 95 | In a (l1 ++ b :: l2). 96 | 97 | intros; apply in_app_or in H; intuition. 98 | Qed. 99 | 100 | Lemma in_remove_middle a b l1 l2: 101 | In a (l1 ++ b :: l2) -> 102 | a <> b -> 103 | In a (l1 ++ l2). 104 | 105 | intros; apply in_app_or in H; intuition. 106 | destruct H1; intuition. 107 | Qed. 108 | 109 | Lemma in_remove_left a l1 l2: 110 | In a (l1 ++ l2) -> 111 | ~ In a l1 -> 112 | In a l2. 113 | 114 | intros; apply in_app_or in H; intuition. 115 | Qed. 116 | 117 | Lemma in_remove_right a l1 l2: 118 | In a (l1 ++ l2) -> 119 | ~ In a l2 -> 120 | In a l1. 121 | 122 | intros; apply in_app_or in H; intuition. 123 | Qed. 124 | 125 | End InFacts. 126 | 127 | 128 | (** ** [AllLess] 129 | 130 | This predicate says that everything in a [list nat] is less than 131 | a number. We could’ve written it as [Forall (ge n)]. *) 132 | 133 | Section AllLess. 134 | 135 | Definition AllLess n l := 136 | forall i, In i l -> i < n. 137 | 138 | (** These facts make [AllLess] easier to work with in proofs. *) 139 | Lemma AllLess_cons n l i: 140 | AllLess n l -> i < n -> AllLess n (i :: l). 141 | 142 | unfold AllLess; intros. 143 | destruct H1; intuition. 144 | Qed. 145 | 146 | Lemma AllLess_skip {n l i}: 147 | AllLess n (i :: l) -> AllLess n l. 148 | 149 | unfold AllLess; intros; apply H; intuition. 150 | Qed. 151 | 152 | Lemma AllLess_app_1 {n l1 l2}: 153 | AllLess n (l1 ++ l2) -> AllLess n l1. 154 | 155 | unfold AllLess; intros; apply H; intuition. 156 | Qed. 157 | 158 | Lemma AllLess_app_2 {n l1 l2}: 159 | AllLess n (l1 ++ l2) -> AllLess n l2. 160 | 161 | unfold AllLess; intros; apply H; intuition. 162 | Qed. 163 | 164 | Lemma AllLess_zero_nil l: 165 | AllLess 0 l -> l = []. 166 | 167 | destruct l; intros; auto. 168 | assert (n < 0) by (apply H; intuition). 169 | omega. 170 | Qed. 171 | 172 | End AllLess. 173 | 174 | 175 | (** ** Facts about [map_nth] *) 176 | 177 | Fixpoint map_nth {A} is l : list A := 178 | match is with 179 | | [] => [] 180 | | i::is' => match nth_error l i with 181 | | Some a => a :: map_nth is' l 182 | | None => [] 183 | end 184 | end. 185 | 186 | Section MapNthFacts. 187 | Context {A:Type}. 188 | Implicit Types l:list A. 189 | Implicit Types is js:list nat. 190 | 191 | (* A useful Ltac tactic *) 192 | Ltac map_nth_head := 193 | match goal with 194 | | [ H : context [ AllLess (length ?l) (?a :: ?is) ] |- _ ] => 195 | let M := fresh "SO" in 196 | let ne := fresh "ne" in 197 | assert (a < length l) as M by (apply H; intuition); 198 | rewrite <- nth_error_Some in M; simpl; 199 | remember (nth_error l a) as ne; destruct ne; [ clear M | contradiction ] 200 | end. 201 | 202 | 203 | (** Easy facts about [map_nth] *) 204 | 205 | Lemma in_map_nth_position is l a: 206 | In a (map_nth is l) -> 207 | exists i, In i is /\ i < length l /\ nth_error l i = Some a. 208 | Admitted. 209 | 210 | Lemma in_map_nth is l a: 211 | In a (map_nth is l) -> 212 | In a l. 213 | 214 | intros; apply in_map_nth_position in H. 215 | destruct H as [i [X [L Y]]]. 216 | now apply nth_error_In in Y. 217 | Qed. 218 | 219 | Lemma map_nth_empty is: 220 | map_nth is ([]:list A) = ([]:list A). 221 | Admitted. 222 | 223 | Lemma map_nth_app is1 is2 l: 224 | AllLess (length l) is1 -> 225 | map_nth (is1 ++ is2) l = map_nth is1 l ++ map_nth is2 l. 226 | Admitted. 227 | 228 | Lemma map_nth_cons is l a: 229 | map_nth (map S is) (a :: l) = map_nth is l. 230 | Admitted. 231 | 232 | Lemma map_nth_length is l: 233 | AllLess (length l) is <-> 234 | length is = length (map_nth is l). 235 | Admitted. 236 | 237 | 238 | (* NoDup is Coq’s inductive type representing the absence of 239 | duplicates in a list. *) 240 | Local Hint Constructors NoDup. 241 | 242 | Lemma map_nth_NoDup is l: 243 | NoDup is -> 244 | NoDup l -> 245 | NoDup (map_nth is l). 246 | Admitted. 247 | 248 | Lemma map_nth_nth is i j l: 249 | nth_error is i = Some j -> 250 | AllLess (length l) is -> 251 | nth_error (map_nth is l) i = nth_error l j. 252 | Admitted. 253 | 254 | (** A critical lemma for proving [Perm2] transitivity. *) 255 | Lemma map_nth_compose is js l: 256 | AllLess (length js) is -> 257 | AllLess (length l) js -> 258 | map_nth is (map_nth js l) = map_nth (map_nth is js) l. 259 | Admitted. 260 | 261 | End MapNthFacts. 262 | 263 | 264 | (** ** Facts about [NoDup] *) 265 | 266 | Section NoDupFacts. 267 | Context {A:Type}. 268 | Implicit Types a b:A. 269 | Implicit Types l:list A. 270 | 271 | Lemma in_NoDup_inv a b l: 272 | NoDup (b :: l) -> 273 | a <> b -> 274 | In a (b :: l) -> 275 | In a l. 276 | 277 | intros ND neq I. 278 | inversion ND; destruct I; intuition. 279 | Qed. 280 | 281 | Lemma NoDup_app_swap l1 l2: 282 | NoDup (l1 ++ l2) <-> NoDup (l2 ++ l1). 283 | 284 | induction l1. 285 | - simpl; rewrite app_nil_r; intuition. 286 | - rewrite <- app_comm_cons; split; intro ND. 287 | + inversion ND; subst. 288 | assert (Add a (l2 ++ l1) (l2 ++ a :: l1)) by (apply Add_app). 289 | rewrite (NoDup_Add H). 290 | rewrite <- IHl1; split; auto. 291 | contradict H1; rewrite in_app_iff in *; intuition. 292 | + apply NoDup_remove in ND; destruct ND as [ND NI]. 293 | apply NoDup_cons. 294 | contradict NI; rewrite in_app_iff in *; intuition. 295 | rewrite IHl1; assumption. 296 | Qed. 297 | 298 | Lemma NoDup_remove_iff a x1 x2 y: 299 | NoDup (x1 ++ a :: x2) -> 300 | NoDup (a :: y) -> 301 | (forall i, In i (x1 ++ a :: x2) <-> In i (a :: y)) -> 302 | forall i, In i (x1 ++ x2) <-> In i y. 303 | 304 | intros NDx NDy EQ i. 305 | apply NoDup_remove in NDx. 306 | destruct NDx as [NDx NIx]. 307 | inversion NDy; subst. 308 | 309 | split; intros. 310 | - apply in_remove_left with (l1:=[a]). 311 | apply EQ. 312 | now apply in_add_middle. 313 | contradict NIx. 314 | inversion NIx; subst; intuition. 315 | - apply in_remove_middle with (b:=a). 316 | apply EQ. 317 | intuition. 318 | contradict H1. 319 | subst; assumption. 320 | Qed. 321 | 322 | Lemma NoDup_in_iff_length l1 l2: 323 | NoDup l1 -> 324 | NoDup l2 -> 325 | (forall i, In i l1 <-> In i l2) -> 326 | length l1 = length l2. 327 | 328 | intros NX; revert l2; induction NX; 329 | intros l2 NY HI. 330 | - destruct l2; auto. 331 | assert (In a (a :: l2)) by intuition. 332 | rewrite <- HI in H; destruct H. 333 | - assert (In x l2) by (rewrite <- HI; intuition). 334 | apply in_split in H0. 335 | destruct H0 as [m1 [m2 Hm2]]; subst. 336 | rewrite app_length; simpl. 337 | rewrite <- plus_n_Sm; apply eq_S. 338 | rewrite <- app_length. 339 | apply IHNX. 340 | apply NoDup_remove_1 with (a:=x); auto. 341 | symmetry; revert i. 342 | apply NoDup_remove_iff with (a:=x); auto. 343 | now constructor. 344 | symmetry; revert i; now assumption. 345 | Qed. 346 | 347 | End NoDupFacts. 348 | 349 | 350 | 351 | (** ** Facts about [iota] 352 | 353 | [iota n] counts from 0 to [n - 1], and is defined in terms of 354 | the library’s [seq]. *) 355 | 356 | Definition iota n := seq 0 n. 357 | 358 | Section IotaFacts. 359 | 360 | Lemma in_iota_iff n i: 361 | In i (iota n) <-> i < n. 362 | Admitted. 363 | 364 | Lemma iota_NoDup n: 365 | NoDup (iota n). 366 | 367 | now apply seq_NoDup. 368 | Qed. 369 | 370 | Lemma iota_length n: 371 | length (iota n) = n. 372 | 373 | now apply seq_length. 374 | Qed. 375 | 376 | Lemma iota_AllLess n: 377 | AllLess n (iota n). 378 | 379 | unfold AllLess; intros; now apply in_iota_iff. 380 | Qed. 381 | 382 | Lemma seq_app start len1 len2: 383 | seq start (len1 + len2) = seq start len1 ++ seq (start + len1) len2. 384 | 385 | revert start len2; induction len1; intros. 386 | - simpl; auto. 387 | - cbn; rewrite IHlen1; rewrite plus_Sn_m; rewrite <- plus_n_Sm; reflexivity. 388 | Qed. 389 | 390 | Lemma iota_nth n i: 391 | i < n -> nth_error (iota n) i = Some i. 392 | Admitted. 393 | 394 | Lemma iota_S n: 395 | iota (S n) = 0 :: map S (iota n). 396 | 397 | unfold iota; rewrite seq_shift; reflexivity. 398 | Qed. 399 | 400 | Lemma map_nth_iota {A} (xs:list A): 401 | map_nth (iota (length xs)) xs = xs. 402 | 403 | induction xs; cbn in *; auto. 404 | rewrite <- seq_shift. 405 | rewrite map_nth_cons. 406 | unfold iota in IHxs; now rewrite IHxs. 407 | Qed. 408 | 409 | End IotaFacts. 410 | 411 | 412 | (** ** [NoDup_nat_complete] 413 | 414 | This section states and proves this lemma: If a list [l] contains 415 | [n] [nat]s with no duplicates, and all elements of [l] are less 416 | than [n], then [l] contains _every_ [nat] less than [n]. We need 417 | it to prove that [Perm2]s are transitive. 418 | 419 | This is too hard to prove in a single induction, so we introduce 420 | a [transfer] type to let us state the induction-aware lemma, 421 | [NoDup_AllLess_transfer]. *) 422 | 423 | Section NoDupNatComplete. 424 | 425 | Inductive transfer {A}: list A -> list A -> list A -> Prop := 426 | | transfer_nil : forall ys, transfer [] ys ys 427 | | transfer_cons : forall xs ys1 y ys2 zs, 428 | transfer xs (ys1 ++ y :: ys2) zs -> 429 | transfer (y :: xs) (ys1 ++ ys2) zs. 430 | Hint Constructors transfer. 431 | 432 | Lemma in_transfer_iff {A} (xs ys zs:list A) a: 433 | transfer xs ys zs -> 434 | In a zs <-> In a (xs ++ ys). 435 | Admitted. 436 | 437 | Lemma transfer_length {A} (xs ys zs:list A): 438 | transfer xs ys zs -> 439 | length zs = length xs + length ys. 440 | Admitted. 441 | 442 | Lemma in_transfer_rest {A} (xs ys zs:list A) a: 443 | transfer xs ys zs -> 444 | In a zs -> 445 | ~ In a xs -> 446 | In a ys. 447 | Admitted. 448 | 449 | 450 | Lemma NoDup_AllLess_transfer n xs: 451 | NoDup xs -> 452 | AllLess n xs -> 453 | exists ys, transfer xs ys (iota n). 454 | Admitted. 455 | 456 | Lemma NoDup_nat_complete i is: 457 | NoDup is -> 458 | AllLess (length is) is -> 459 | i < length is -> 460 | In i is. 461 | Admitted. 462 | 463 | End NoDupNatComplete. 464 | 465 | 466 | (** ** [NatCyclePerm] 467 | 468 | This is the cycle-based permutation definition that lets us show 469 | reflexivity, symmetry, and transitivity without further 470 | assumptions. *) 471 | 472 | Section NatCyclePerm. 473 | 474 | Inductive NatCyclePerm : list nat -> list nat -> Prop := 475 | | ncp_nil : NatCyclePerm [] [] 476 | | ncp_cons : forall is js i, 477 | NatCyclePerm is js -> 478 | ~ In i is -> 479 | NatCyclePerm (i::is) (i::js) 480 | | ncp_cycle : forall is js i, 481 | NatCyclePerm (i::is) js -> NatCyclePerm (is++[i]) js. 482 | Hint Constructors NatCyclePerm. 483 | 484 | Lemma ncp_length_eq {is js}: 485 | NatCyclePerm is js -> 486 | length is = length js. 487 | 488 | intro PE; induction PE; auto. 489 | now apply eq_S. 490 | rewrite app_length; simpl in *. omega. 491 | Qed. 492 | 493 | Lemma ncp_eq_nil is: 494 | NatCyclePerm is [] -> 495 | is = []. 496 | 497 | intros; apply ncp_length_eq in H. 498 | simpl; apply length_zero_iff_nil; subst; auto. 499 | Qed. 500 | 501 | 502 | Lemma ncp_refl is: 503 | NoDup is -> 504 | NatCyclePerm is is. 505 | 506 | induction is; auto; intros. 507 | constructor. 508 | - apply IHis; inversion H; auto. 509 | - inversion H; auto. 510 | Qed. 511 | 512 | Lemma ncp_app_cycle is1 is2 js: 513 | NatCyclePerm (is1 ++ is2) js -> 514 | NatCyclePerm (is2 ++ is1) js. 515 | 516 | revert is2; induction is1; intros. 517 | simpl; rewrite app_nil_r; auto. 518 | replace (is2 ++ a :: is1) with ((is2 ++ [a]) ++ is1). 519 | apply IHis1; rewrite app_assoc; apply ncp_cycle; apply H. 520 | rewrite <- app_assoc; apply eq_refl. 521 | Qed. 522 | 523 | (** This important lemma lets us construct a [NatCyclePerm] 524 | from _any_ [nat] lists that meet some simple conditions. *) 525 | Lemma ncp_all is js: 526 | NoDup is -> 527 | NoDup js -> 528 | (forall i, In i is <-> In i js) -> 529 | NatCyclePerm is js. 530 | Admitted. 531 | 532 | Lemma ncp_expand is js: 533 | NatCyclePerm is js -> 534 | NoDup is 535 | /\ NoDup js 536 | /\ (forall i, In i is <-> In i js). 537 | Admitted. 538 | 539 | Lemma ncp_NoDup_is {is js}: 540 | NatCyclePerm is js -> NoDup is. 541 | 542 | intros PE; apply ncp_expand in PE; intuition. 543 | Qed. 544 | 545 | Lemma ncp_NoDup_js {is js}: 546 | NatCyclePerm is js -> NoDup js. 547 | 548 | intros PE; apply ncp_expand in PE; intuition. 549 | Qed. 550 | 551 | Lemma ncp_in_iff {is js}: 552 | NatCyclePerm is js -> forall i, In i is <-> In i js. 553 | 554 | intro PE; apply ncp_expand in PE; intuition. 555 | Qed. 556 | 557 | 558 | 559 | (** Once we have [ncp_expand] and [ncp_all], we can prove symmetry 560 | and transitivity. *) 561 | 562 | Lemma ncp_sym is js: 563 | NatCyclePerm is js -> 564 | NatCyclePerm js is. 565 | 566 | intros. 567 | apply ncp_expand in H. 568 | destruct_conjs. 569 | apply ncp_all; auto. 570 | split; intros; now apply H1. 571 | Qed. 572 | 573 | Lemma ncp_trans is js ks: 574 | NatCyclePerm is js -> 575 | NatCyclePerm js ks -> 576 | NatCyclePerm is ks. 577 | 578 | intros. 579 | apply ncp_expand in H. 580 | apply ncp_expand in H0. 581 | destruct_conjs. 582 | apply ncp_all; auto. 583 | split; intros. 584 | apply H2; now apply H4. 585 | apply H4; now apply H2. 586 | Qed. 587 | 588 | 589 | (** Given symmetry and transitivity, we can prove the equivalence 590 | of the library’s [Permutation] and [NatCyclePerm] (for 591 | duplicate-free lists). *) 592 | 593 | Lemma perm_ncp is js: 594 | NoDup is -> 595 | Permutation is js -> 596 | NatCyclePerm is js. 597 | Admitted. 598 | 599 | Lemma ncp_perm is js: 600 | NatCyclePerm is js -> 601 | Permutation is js. 602 | Admitted. 603 | 604 | End NatCyclePerm. 605 | 606 | 607 | (** ** [Perm2] Facts 608 | 609 | We are finally ready to state the [Perm2] definition 610 | and prove it equivalent to [NatCyclePerm], and therefore 611 | [Permutation]. *) 612 | 613 | Section Perm2. 614 | Hint Constructors NatCyclePerm. 615 | 616 | Inductive Perm2 {A}: list A -> list A -> Prop := 617 | | perm2_nil : Perm2 [] [] 618 | | perm2_cons : forall x y a, 619 | Perm2 x y -> Perm2 (a :: x) (a :: y) 620 | | perm2_cycle : forall x y a, 621 | Perm2 (a :: x) y -> Perm2 (x ++ [a]) y. 622 | Hint Constructors Perm2. 623 | 624 | Lemma perm2_length_eq {A} (xs ys:list A): 625 | Perm2 xs ys -> 626 | length xs = length ys. 627 | 628 | intros PE; induction PE; auto. 629 | simpl; now apply eq_S. 630 | rewrite app_length; rewrite <- IHPE; simpl. omega. 631 | Qed. 632 | 633 | 634 | Lemma ncp_perm2 {A} is js (ctx:list A): 635 | NatCyclePerm is js -> 636 | AllLess (length ctx) is -> 637 | Perm2 (map_nth is ctx) (map_nth js ctx). 638 | Admitted. 639 | 640 | Lemma perm2_np {A} (xs ys:list A): 641 | Perm2 xs ys -> 642 | exists is, NatCyclePerm is (iota (length xs)) 643 | /\ map_nth is ys = xs. 644 | Admitted. 645 | 646 | 647 | Lemma perm2_refl {A} (xs:list A): 648 | Perm2 xs xs. 649 | 650 | rewrite <- (map_nth_iota xs). 651 | apply ncp_perm2. 652 | apply ncp_refl. 653 | apply iota_NoDup. 654 | apply iota_AllLess. 655 | Qed. 656 | 657 | Lemma perm2_sym {A} (xs ys:list A): 658 | Perm2 xs ys -> Perm2 ys xs. 659 | Admitted. 660 | 661 | (* Transitivity in [Perm2] doesn’t follow immediately from 662 | transitivity in [NatCyclePerm], for a funny reason. The 663 | [perm2_np] lemma lets us construct [NatCyclePerm]s 664 | between [xs] and [ys], and between [ys] and [zs]---but 665 | unfortunately, those [NatCyclePerm]s are unrelated! 666 | (The [nat] list used to represent [ys] differs.) So we 667 | need to add another link to the transitive chain. 668 | That’s what NoDup_nat_complete is for. *) 669 | 670 | Lemma perm2_trans {A} (xs ys zs:list A): 671 | Perm2 xs ys -> 672 | Perm2 ys zs -> 673 | Perm2 xs zs. 674 | Admitted. 675 | 676 | End Perm2. 677 | -------------------------------------------------------------------------------- /exsolutions/forwardsimulation.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Require Import Relation_Operators. 7 | 8 | Section ForwardSimulation. 9 | 10 | (* S is the type of a specification. *) 11 | Variable S:Type. 12 | 13 | (* I is the type of an implementation state. *) 14 | Variable I:Type. 15 | 16 | (* This function takes a single deterministic step on an 17 | implementation state. *) 18 | Variable istep:I -> I. 19 | 20 | (* This relation represents a nondeterministic step on the 21 | specification. *) 22 | Variable sstep:S -> S -> Prop. 23 | 24 | (* This relation says whether an implementation state matches a 25 | specification. *) 26 | Variable ispec:I -> S -> Prop. 27 | 28 | 29 | (* Kleene-star on `istep`: `isteps i i'` is True iff you can get 30 | from i to i' in zero or more `istep`s. *) 31 | Inductive isteps: I -> I -> Prop := 32 | | isteps_refl: forall i, isteps i i 33 | | isteps_cons: forall i i', isteps i i' -> isteps i (istep i'). 34 | 35 | (* Kleene-star on `sstep`: `ssteps s s'` is True iff you can get 36 | from s to s' in zero or more `sstep`s. *) 37 | Inductive ssteps: S -> S -> Prop := 38 | | ssteps_refl: forall s, ssteps s s 39 | | ssteps_cons: forall a b c, ssteps a b -> sstep b c -> ssteps a c. 40 | 41 | 42 | (* Prove this lemma. *) 43 | Lemma ssteps_trans (a b c:S): 44 | ssteps a b -> ssteps b c -> ssteps a c. 45 | 46 | intros H1 H2; revert a H1; induction H2; intros; auto. 47 | apply IHssteps in H1; now apply ssteps_cons with (b:=b). 48 | Qed. 49 | 50 | 51 | (* Now, *state* the definition of forward simulation. 52 | 53 | There are many forward simulation definitions. The one we want is 54 | that single deterministic implementation steps are matched by 55 | zero or more specification steps. 56 | 57 | So `PS`, a Prop over specifications, is a forward simulation iff: 58 | 59 | 1. For every specification `s` that fits `PS` (i.e. `PS s` is 60 | True), 61 | 2. And every implementation `i` that maps to `s` under 62 | `ispec`, 63 | 3. When implementation `i` takes a step via `istep`, 64 | 4. That next implementation maps to some specification `s'` that 65 | also fits `PS`. *) 66 | 67 | Definition ForwardSimulation (PS:S -> Prop) := 68 | forall i s, 69 | PS s -> 70 | ispec i s -> 71 | exists s', ispec (istep i) s' /\ PS s' /\ ssteps s s'. 72 | Hint Unfold ForwardSimulation. 73 | 74 | 75 | (* Now, state and prove this lemma, which says that forward 76 | simulation is preserved over MULTIPLE implementation steps 77 | (isteps rather than istep). *) 78 | Lemma ForwardSimulation_star 79 | (PS:S -> Prop) 80 | (fsim:ForwardSimulation PS) 81 | (i i':I) (s:S): 82 | PS s -> 83 | ispec i s -> 84 | isteps i i' -> 85 | exists s':S, ispec i' s' /\ PS s' /\ ssteps s s'. 86 | 87 | intros Hps Hispec Histeps. 88 | induction Histeps. 89 | 90 | - exists s; repeat split; auto. 91 | apply ssteps_refl. 92 | - apply IHHisteps in Hispec. 93 | inversion Hispec; destruct_pairs; subst. 94 | 95 | pose (fsim i' x H0 H). 96 | destruct e; destruct_pairs. 97 | exists x0; repeat split; auto. 98 | apply ssteps_trans with (b:=x); auto. 99 | Qed. 100 | 101 | End ForwardSimulation. 102 | -------------------------------------------------------------------------------- /exsolutions/group.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Require Import Classical. 7 | 8 | (* A *group* is an algebraic structure comprising a set of elements 9 | `T` and a binary operation `f` and satisfying four laws: 10 | 11 | 1. CLOSURE: If `x, y ∈ T`, then `f x y ∈ T`. 12 | 2. ASSOCIATIVITY: `f x (f y z) = f (f x y) z`. 13 | 3. RIGHT IDENTITY: There exists an identity element `e ∈ T` 14 | where `f x e = x` for all `x`. 15 | 4. RIGHT INVERSE: Every element `x` has a right inverse, `y`, 16 | so that `f x y = e`. 17 | 18 | We can express this in Coq like so, using a *type class*. The 19 | type class is parameterized on the set `T`, the operation `f`, 20 | the right-identity element `e`, and the inverse function `i`. 21 | Then there are three laws that must hold. *) 22 | 23 | Class Group {T} {f:T -> T -> T} {e:T} {i:T -> T} := { 24 | gassoc : forall a b c:T, f a (f b c) = f (f a b) c; 25 | grightid : forall a:T, f a e = a; 26 | grightinv : forall a:T, f a (i a) = e 27 | }. 28 | 29 | 30 | (* To show that a given set and operation form a group, we create an 31 | *instance* of the Group type class. 32 | 33 | Let’s show that addition on integers (the Coq `Z` type) is a group. 34 | The identity element is 0 (`Z0`), and the inverse function is 35 | `Z.opp` (Coq’s name for the unary negative operator). 36 | 37 | To declare the type instance, we first give its arguments: 38 | `Z Z.add Z0 Z.opp`. Then we need to show that the group laws hold. 39 | Here, we do that using a `Proof` environment, which opens three 40 | subgoals, one per law. *) 41 | 42 | Instance Z_Group: @Group Z Z.add Z0 Z.opp := { }. 43 | Proof. 44 | - apply Z.add_assoc. 45 | - apply Z.add_0_r. 46 | - apply Z.add_opp_diag_r. 47 | Qed. 48 | 49 | 50 | (* Given the `Z_Group` instance, we can prove facts about `Z`. *) 51 | Lemma Z_add_assoc x y z: 52 | (x + (y + z))%Z = (x + y + z)%Z. 53 | 54 | apply gassoc. (* Magic! Coq finds `Z_Group`. *) 55 | Qed. 56 | 57 | 58 | (* But this becomes much more interesting if we can prove some facts 59 | about *all* groups. *) 60 | 61 | Section GroupFacts. 62 | Variable T:Type. 63 | Variable f:T -> T -> T. 64 | Variable e:T. 65 | Variable i:T -> T. 66 | Variable G:@Group T f e i. 67 | 68 | Infix "*" := f. 69 | 70 | (* Gain local names for the group laws on group G. *) 71 | Local Definition Gassoc := @gassoc _ _ _ _ G. 72 | Local Definition Grightinv := @grightinv _ _ _ _ G. 73 | Local Definition Grightid := @grightid _ _ _ _ G. 74 | 75 | Set Implicit Arguments. 76 | 77 | Lemma mult_both a b c d1 d2: 78 | a * c = d1 -> 79 | b * c = d2 -> 80 | a = b -> 81 | d1 = d2. 82 | 83 | intros; rewrite <- H; rewrite <- H0; rewrite H1; auto. 84 | Qed. 85 | 86 | 87 | (* Your turn! These generic laws are true for all groups. *) 88 | Lemma characterizingid a: 89 | a * a = a -> 90 | a = e. 91 | 92 | intros. 93 | rewrite <- (Grightid a). 94 | rewrite <- (Grightinv a). 95 | rewrite gassoc. 96 | rewrite H. 97 | auto. 98 | Qed. 99 | 100 | Lemma leftinv a: 101 | i a * a = e. 102 | 103 | apply characterizingid. 104 | rewrite gassoc. 105 | rewrite <- (gassoc (i a) a). 106 | rewrite Grightinv. 107 | rewrite Grightid. 108 | auto. 109 | Qed. 110 | 111 | Lemma leftid a: 112 | e * a = a. 113 | 114 | rewrite <- (Grightinv a). 115 | rewrite <- Gassoc. 116 | rewrite leftinv. 117 | rewrite Grightid. 118 | auto. 119 | Qed. 120 | 121 | Lemma leftid_uniq p a: 122 | p * a = a -> 123 | p = e. 124 | 125 | intros. 126 | rewrite <- Grightid at 1. 127 | rewrite <- (Grightinv a). 128 | rewrite Gassoc. 129 | rewrite H. 130 | auto. 131 | Qed. 132 | 133 | Lemma rightinv_uniq a b: 134 | a * b = e -> 135 | b = i a. 136 | 137 | intros. 138 | rewrite <- (Grightid (i a)). 139 | rewrite <- H. 140 | rewrite Gassoc. 141 | rewrite (leftinv a). 142 | rewrite leftid. 143 | auto. 144 | Qed. 145 | 146 | Lemma leftinv_uniq a b: 147 | a * b = e -> 148 | a = i b. 149 | 150 | intros. 151 | rewrite <- (leftid (i b)). 152 | rewrite <- H. 153 | rewrite <- Gassoc. 154 | rewrite Grightinv. 155 | rewrite Grightid. 156 | auto. 157 | Qed. 158 | 159 | Lemma right_cancel a b x: 160 | a * x = b * x -> 161 | a = b. 162 | 163 | intros. 164 | rewrite <- Grightid at 1. 165 | rewrite <- Grightid. 166 | rewrite <- (Grightinv x) at 1. 167 | rewrite <- (Grightinv x) at 1. 168 | rewrite Gassoc. 169 | rewrite H. 170 | rewrite <- Gassoc. 171 | auto. 172 | Qed. 173 | 174 | Lemma left_cancel a b x: 175 | x * a = x * b -> 176 | a = b. 177 | 178 | intros. 179 | rewrite <- leftid. 180 | rewrite <- leftid at 1. 181 | rewrite <- (leftinv x). 182 | rewrite <- Gassoc. 183 | rewrite H. 184 | rewrite Gassoc. 185 | auto. 186 | Qed. 187 | 188 | Lemma inv_distrib a b: 189 | i (a * b) = i b * i a. 190 | 191 | apply right_cancel with (x:=(a * b)). 192 | rewrite leftinv. 193 | rewrite <- Gassoc. 194 | rewrite (Gassoc (i a)). 195 | rewrite leftinv. 196 | rewrite leftid. 197 | rewrite leftinv. 198 | auto. 199 | Qed. 200 | 201 | Lemma double_inv a: 202 | i (i a) = a. 203 | 204 | rewrite <- Grightid. 205 | rewrite <- Grightid at 1. 206 | rewrite <- (Grightinv (i a)). 207 | rewrite Gassoc. 208 | rewrite leftinv. 209 | rewrite leftid. 210 | rewrite Gassoc. 211 | rewrite Grightinv. 212 | rewrite leftid. 213 | auto. 214 | Qed. 215 | 216 | Lemma id_inv: 217 | i e = e. 218 | 219 | rewrite <- leftid at 1. 220 | rewrite Grightinv. 221 | auto. 222 | Qed. 223 | 224 | Unset Implicit Arguments. 225 | End GroupFacts. 226 | 227 | 228 | (* Now these *general* group facts can be used to prove properties 229 | about any group, such as Z. *) 230 | 231 | Lemma Z_inv_distrib (a b:Z): 232 | (- (a + b))%Z = (- b + - a)%Z. 233 | 234 | apply (inv_distrib Z_Group). 235 | Qed. 236 | 237 | 238 | (* But of course there are many other groups. We now show that 239 | addition modulo 32 is a group. *) 240 | 241 | (* `Z32` is the set `{0, 1, ..., 31}`. We write it as a dependent 242 | type. Constructing a `Z32` requires two things: a `nat` and a proof 243 | that the `nat` is less than 32. *) 244 | Inductive Z32 : Set := 245 | | z32 : forall n:nat, n < 32 -> Z32. 246 | Hint Constructors Z32. 247 | 248 | Check proof_irrelevance. 249 | 250 | (* Two Z32s are equal iff their `nat` components are equal. 251 | The proof of this lemma uses classical logic. *) 252 | Lemma z32_proof_irrelevance n pf1 p pf2: 253 | n = p -> z32 n pf1 = z32 p pf2. 254 | 255 | intros. 256 | subst. 257 | assert (pf1 = pf2) by (apply proof_irrelevance). 258 | subst; auto. 259 | Qed. 260 | 261 | 262 | (* Addition on integers (mod 32) *) 263 | Definition z32_add: Z32 -> Z32 -> Z32. 264 | (* What we want to do: given `x y`, return `(x + y) mod 32`. 265 | But in order to construct the answer as a Z32, we must provide a 266 | proof that the returned value’s `nat` component is less than 32. 267 | It’s easiest to do this by editing a proof. *) 268 | refine (fun x y => match (x, y) with 269 | | (z32 n _, z32 p _) => z32 ((n + p) mod 32) _ 270 | end). 271 | apply Nat.mod_upper_bound. 272 | discriminate. 273 | Defined. 274 | 275 | Definition z32_1 : Z32. 276 | refine (z32 1 _). 277 | omega. 278 | Defined. 279 | 280 | Definition z32_2 : Z32. 281 | refine (z32 2 _). 282 | omega. 283 | Defined. 284 | 285 | Definition z32_31 : Z32. 286 | refine (z32 31 _). 287 | omega. 288 | Defined. 289 | 290 | Definition z32_32 : Z32. 291 | refine (z32 32 _). 292 | Abort. 293 | 294 | Eval compute in (z32_add z32_31 z32_1). 295 | Extraction z32_add. 296 | 297 | (* Addition on integers mod 32 is associative *) 298 | Lemma z32_add_assoc x y z: 299 | z32_add x (z32_add y z) = z32_add (z32_add x y) z. 300 | 301 | destruct x, y, z; unfold z32_add. 302 | apply z32_proof_irrelevance. 303 | rewrite Nat.add_mod_idemp_l by omega. 304 | rewrite Nat.add_mod_idemp_r by omega. 305 | rewrite plus_assoc; auto. 306 | Qed. 307 | 308 | 309 | (* Zero (mod 32) *) 310 | Definition z32_0 : Z32. 311 | refine (z32 0 _). 312 | omega. 313 | Defined. 314 | 315 | 316 | (* Inverse on integers (mod 32) *) 317 | Definition z32_inv: Z32 -> Z32. 318 | refine (fun x => match x with 319 | | (z32 0 _) => x 320 | | (z32 (S n) pf) => z32 (32 - (S n)) _ 321 | end). 322 | omega. 323 | Defined. 324 | 325 | (* Inverse is an inverse *) 326 | Lemma z32_add_inv_diag x: 327 | z32_add x (z32_inv x) = z32_0. 328 | remember (z32_inv x) as y; destruct x, y. 329 | simpl; apply z32_proof_irrelevance. 330 | destruct n. 331 | - simpl in Heqy; inversion Heqy; auto. 332 | - assert (n0 = 32 - S n) by (simpl in Heqy; inversion Heqy; auto). 333 | subst. 334 | rewrite <- le_plus_minus by omega. 335 | apply Nat.mod_same; discriminate. 336 | Qed. 337 | 338 | 339 | (* Now we are ready to show that Z32 is a group. *) 340 | Instance Z32_Group : @Group Z32 z32_add z32_0 z32_inv := { }. 341 | Proof. 342 | - apply z32_add_assoc. 343 | - intros; unfold z32_add; destruct a. 344 | apply z32_proof_irrelevance. 345 | rewrite <- plus_n_O. 346 | apply Nat.mod_small; auto. 347 | - apply z32_add_inv_diag. 348 | Qed. 349 | 350 | 351 | (* Which lets us easily prove facts about Z32. *) 352 | Lemma Z32_rightinv_uniq x y: 353 | z32_add x y = z32_0 -> 354 | y = z32_inv x. 355 | 356 | apply (rightinv_uniq Z32_Group). 357 | Qed. 358 | -------------------------------------------------------------------------------- /l01.v: -------------------------------------------------------------------------------- 1 | Module Lecture1. 2 | Require Import Arith. 3 | Require Import List. 4 | 5 | Definition AnIntegerExists : nat. 6 | (* First prove by construction. Start with 7 | Definition AnIntegerExists : nat := _. 8 | and fill in the underscore. *) 9 | 10 | Lemma AnIntegerExistsB : nat. 11 | (* Then prove using tactics. Start with 12 | "Proof." and check out Coq's reports. 13 | Tactics: `apply`, `intros`, `split`, `destruct`. *) 14 | 15 | 16 | Definition Proj1 {A B:Prop} : A /\ B -> A. 17 | 18 | Lemma Proj1B {A B:Prop} : A /\ B -> A. 19 | 20 | 21 | (* Use `Locate`, `Check`, and `Print` to figure out 22 | how <-> works. Start with `Locate "_ <-> _".` *) 23 | Lemma ObjectivismB {A:Prop} : A <-> A. 24 | 25 | Definition Objectivism {A:Prop} : A <-> A. 26 | 27 | 28 | Lemma DistributeAnd {A B C:Prop} : 29 | A /\ (B \/ C) -> (A /\ B) \/ (A /\ C). 30 | 31 | End Lecture1. 32 | -------------------------------------------------------------------------------- /l01sol.v: -------------------------------------------------------------------------------- 1 | Module Lecture1. 2 | Require Import Arith. 3 | Require Import List. 4 | 5 | Definition AnIntegerExists : nat := 2. 6 | 7 | Lemma AnIntegerExistsB : nat. 8 | Proof. 9 | apply 0. 10 | Qed. 11 | 12 | Print nat. 13 | 14 | 15 | Locate "_ /\ _". 16 | Check and. 17 | Print and. 18 | 19 | Definition Proj1 {A B:Prop} : A /\ B -> A := 20 | fun (H:A /\ B) => 21 | match H with 22 | | conj x y => x 23 | end. 24 | 25 | Lemma Proj1B {A B:Prop} : A /\ B -> A. 26 | Proof. 27 | intros. 28 | destruct H. 29 | apply H. 30 | Qed. 31 | 32 | 33 | Locate "_ <-> _". 34 | Check iff. 35 | Print iff. 36 | Print and. 37 | 38 | Lemma ObjectivismB {A:Prop} : A <-> A. 39 | Proof. 40 | split. 41 | intros. 42 | apply H. 43 | intros. 44 | apply H. 45 | Qed. 46 | 47 | Lemma ObjectivismB_Semicolons {A:Prop} : A <-> A. 48 | Proof. 49 | split; intros; apply H. 50 | Qed. 51 | 52 | Lemma ObjectivismB_Auto {A:Prop} : A <-> A. 53 | Proof. 54 | split; auto. 55 | Qed. 56 | 57 | Definition Objectivism {A:Prop} : A <-> A := 58 | conj (fun x => x) (fun x => x). 59 | 60 | 61 | Lemma DistributeAnd {A B C:Prop} : 62 | A /\ (B \/ C) -> (A /\ B) \/ (A /\ C). 63 | Proof. 64 | intros. 65 | destruct H. 66 | destruct H0. 67 | left. 68 | split. 69 | apply H. 70 | apply H0. 71 | right. 72 | split. 73 | apply H. 74 | apply H0. 75 | Qed. 76 | 77 | Lemma DistributeAnd_Semicolons {A B C:Prop} : 78 | A /\ (B \/ C) -> (A /\ B) \/ (A /\ C). 79 | Proof. 80 | intros. 81 | destruct H. 82 | destruct H0; 83 | (* [X1 | X2 | ... | Xn] applies X1 to the 1st 84 | subgoal, X2 to the second, and so forth. *) 85 | [left | right]; 86 | split; try apply H; try apply H0. 87 | Qed. 88 | 89 | End Lecture1. 90 | -------------------------------------------------------------------------------- /l02.v: -------------------------------------------------------------------------------- 1 | Module Lecture2. 2 | Require Import List. 3 | Require Import Arith. 4 | Require Import Omega. (* the `omega` tactic knows arithmetic *) 5 | Import ListNotations. (* `[]`, `[a; b; c]` notation *) 6 | 7 | 8 | 9 | (* Let's define our own functions on lists and prove 10 | some things about them. 11 | 12 | These functions are recursive on the `list` type, 13 | so they are defined with `Function` (not `Definition`). *) 14 | 15 | (* xlength should return the length of a list. *) 16 | 17 | (* xappend should append two lists together. *) 18 | 19 | (* xreverse should reverse a list in O(N^2) time. *) 20 | 21 | (* xireverse should reverse a list in O(N) time. *) 22 | 23 | 24 | (* Use `Eval compute` to test out your functions. *) 25 | 26 | 27 | (* Now, prove some things! *) 28 | 29 | End Lecture2. 30 | -------------------------------------------------------------------------------- /l02sol.v: -------------------------------------------------------------------------------- 1 | Module Lecture2. 2 | Require Import List. 3 | Require Import Arith. 4 | Require Import Omega. (* the `omega` tactic knows arithmetic *) 5 | Import ListNotations. (* `[]`, `[a; b; c]` notation *) 6 | 7 | 8 | Print list. 9 | 10 | Definition x : list nat := 1 :: 2 :: 3 :: nil. 11 | Print x. 12 | 13 | 14 | (* Let's define our own functions on lists and prove 15 | some things about them. 16 | 17 | These functions are recursive on the `list` type, 18 | so they are defined with `Function` (not `Definition`). *) 19 | 20 | (* xlength should return the length of a list. *) 21 | Function xlength {A} (x:list A) := 22 | match x with 23 | | nil => 0 24 | | _ :: x' => S (xlength x') 25 | end. 26 | 27 | (* xappend should append two lists together. *) 28 | Function xappend {A} (x y:list A) := 29 | match x with 30 | | nil => y 31 | | a :: x' => a :: (xappend x' y) 32 | end. 33 | 34 | Eval compute in xlength []. 35 | Eval compute in xlength [1;2;3;4]. 36 | Eval compute in xlength [[];[];[1]]. 37 | Eval compute in xappend [1;2;3] [4;5;6;7]. 38 | 39 | (* xreverse should reverse a list in O(N^2) time. *) 40 | Function xreverse {A} (x:list A) := 41 | match x with 42 | | nil => nil 43 | | a :: x' => xappend (xreverse x') [a] 44 | end. 45 | 46 | Check list_rec. 47 | 48 | Eval compute in xreverse [3;2;1]. 49 | 50 | (* xireverse should reverse a list in O(N) time. *) 51 | Function xireverse {A} (x y:list A) := 52 | match x with 53 | | nil => y 54 | | a :: x' => xireverse x' (a :: y) 55 | end. 56 | 57 | Eval compute in xireverse [1;3;495;10] []. 58 | 59 | 60 | (* Use `Eval compute` to test out your functions. *) 61 | 62 | 63 | (* Now, prove some things! *) 64 | 65 | Lemma xappend_length {A} : 66 | forall l1 l2:list A, xlength l1 + xlength l2 67 | = xlength (xappend l1 l2). 68 | Proof. 69 | intros. 70 | induction l1. 71 | auto. 72 | simpl. 73 | rewrite IHl1. 74 | reflexivity. 75 | Qed. 76 | 77 | (* At this point in class, we tried to prove 78 | something for which we lacked the tools. *) 79 | Lemma xreverse_reverse {A} (x:list A) : 80 | xreverse (xreverse x) = x. 81 | Proof. 82 | induction x; auto. 83 | simpl. 84 | Abort. 85 | 86 | 87 | (* WHAT YOU SHOULD DO: Try to prove xreverse_reverse! *) 88 | 89 | (* I proved xreverse_reverse using 3 additional lemmas and the 90 | following tactics only: `induction`, `simpl`, `auto`, `rewrite`. 91 | `try` is useful for shortening proofs but not required. 92 | 93 | You'll learn most by trying to prove xreverse_reverse yourself. 94 | That will give you experience in detecting when you're stuck and 95 | finding and proving helper lemmas. Think about basic properties 96 | of mathematical objects, and things that are true about list 97 | operations. For instance, associativity: is `xappend` 98 | associative? Would that ever be useful? 99 | 100 | If you get really stuck, the lemma statements I used are below, 101 | after some space. Fun fact: two of my lemmas have the *exact same 102 | proof statement*. *) 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | Lemma xappend_empty {A} (x:list A): 210 | xappend x [] = x. 211 | Abort. 212 | 213 | Lemma xappend_assoc {A} (x y z:list A): 214 | xappend (xappend x y) z = 215 | xappend x (xappend y z). 216 | Abort. 217 | 218 | Lemma xreverse_append {A} (x y:list A): 219 | xreverse (xappend x y) = 220 | xappend (xreverse y) (xreverse x). 221 | Abort. 222 | 223 | Lemma xreverse_reverse {A} (x:list A): 224 | xreverse (xreverse x) = x. 225 | Abort. 226 | 227 | End Lecture2. 228 | -------------------------------------------------------------------------------- /l05.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Import ListNotations. 7 | 8 | Section Lecture5. 9 | 10 | (* Define some local variables to this section. *) 11 | Definition A := nat. 12 | Definition LE := le. 13 | Hint Unfold LE. 14 | 15 | (* Some facts about lists. *) 16 | Check In. 17 | Print In. 18 | 19 | Check app. 20 | Print app. 21 | 22 | (* Let's prove our own list fact. *) 23 | Lemma in_middle: forall (a:A) xs1 xs2, 24 | In a (xs1 ++ a :: xs2). 25 | Admitted. 26 | 27 | 28 | (* Now, write an inductive Prop `Dec`, with a single 29 | `list A` argument, where `Dec l` is true iff the 30 | elements of `l` are in decreasing order by `LE`. *) 31 | 32 | 33 | (* Do it again. *) 34 | 35 | 36 | (* Prove the definitions equivalent. *) 37 | 38 | 39 | (* Now, prove a lemma about appending decreasing lists. *) 40 | 41 | End Lecture5. 42 | -------------------------------------------------------------------------------- /l05sol.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Import ListNotations. 7 | 8 | Section Lecture5. 9 | 10 | (* Define some local variables to this section. *) 11 | Definition A := nat. 12 | Definition LE := le. 13 | Hint Unfold LE. 14 | 15 | (* Some facts about lists. *) 16 | Print list. 17 | 18 | 19 | Check In. 20 | Print In. 21 | 22 | Check app. 23 | Print app. 24 | 25 | (* Let's prove our own list fact. *) 26 | Lemma in_middle: forall (a:A) xs1 xs2, 27 | In a (xs1 ++ a :: xs2). 28 | intros. 29 | induction xs1. 30 | simpl; auto. 31 | rewrite <- app_comm_cons. 32 | apply in_cons. 33 | auto. 34 | Qed. 35 | 36 | 37 | (* Now, write an inductive Prop `Dec`, with a single 38 | `list A` argument, where `Dec l` is true iff the 39 | elements of `l` are in decreasing order by `LE`. *) 40 | Inductive Dec : list A -> Prop := 41 | | DNil : Dec [] 42 | | DSingleton : forall a, Dec [a] 43 | | DCons : forall a b l, 44 | LE a b -> 45 | Dec (b :: l) -> 46 | Dec (a :: b :: l). 47 | Hint Constructors Dec. 48 | 49 | Example DecExample : Dec [1;2;3;3;4]. 50 | auto 7. 51 | Qed. 52 | 53 | 54 | (* Do it again. *) 55 | Inductive Dec2 : list A -> Prop := 56 | | D2Nil : Dec2 [] 57 | | D2Cons : forall a l, 58 | (forall b, In b l -> LE a b) -> 59 | Dec2 l -> 60 | Dec2 (a :: l). 61 | Hint Constructors Dec2. 62 | 63 | Example Dec2Example : Dec2 [1;2;3;3;4]. 64 | Proof. 65 | apply D2Cons; intros. 66 | simpl in H; repeat destruct or H; unfold LE in *; omega. 67 | apply D2Cons; intros. 68 | simpl in H; repeat destruct or H; unfold LE in *; omega. 69 | apply D2Cons; intros. 70 | simpl in H; repeat destruct or H; unfold LE in *; omega. 71 | apply D2Cons; intros. 72 | simpl in H; repeat destruct or H; unfold LE in *; omega. 73 | apply D2Cons; intros. 74 | simpl in H; repeat destruct or H; unfold LE in *; omega. 75 | auto. 76 | Qed. 77 | 78 | 79 | (* Prove the definitions equivalent. *) 80 | 81 | Lemma Dec_Dec2 l: 82 | Dec l -> Dec2 l. 83 | Proof. 84 | (* Here's a first, long, interactive proof. *) 85 | intros. 86 | induction H. 87 | auto. 88 | apply D2Cons; intros. destruct H. 89 | auto. 90 | apply D2Cons. 91 | intros. 92 | inversion IHDec. 93 | destruct H1. 94 | rewrite <- H1; auto. 95 | apply H4 in H1. 96 | apply le_trans with (m:=b); auto. 97 | auto. 98 | 99 | (* Clean it up with an outline. *) 100 | Restart. 101 | intros; induction H. 102 | - auto. 103 | - apply D2Cons; intros. destruct H. auto. 104 | - apply D2Cons; intros. 105 | + inversion IHDec. (* invert Dec2 inductive hypothesis *) 106 | destruct H1. (* In b0 (b::l) becomes two cases: 107 | b0 = b \/ In b0 l *) 108 | * rewrite <- H1; auto. 109 | * apply H4 in H1. 110 | apply le_trans with (m:=b); auto. 111 | + auto. 112 | 113 | (* We need `le_trans` because `omega` can’t see through 114 | `LE`. So unfold it explicitly. *) 115 | Restart. 116 | intros; induction H. 117 | - auto. 118 | - apply D2Cons; intros. destruct H. auto. 119 | - apply D2Cons; intros. 120 | + inversion IHDec. 121 | unfold LE in *. 122 | destruct H1. (* In b0 (b::l) becomes two cases: 123 | b0 = b \/ In b0 l *) 124 | * omega. 125 | * apply H4 in H1; omega. 126 | + auto. 127 | 128 | (* Reducing the structure cleans up further. *) 129 | Restart. 130 | intros; induction H; auto; 131 | apply D2Cons; intros; auto; [ destruct H | ]. 132 | inversion IHDec; unfold LE in *; 133 | destruct H1; try apply H4 in H1; omega. 134 | Qed. 135 | 136 | 137 | (* Proving that Dec2 implies Dec is much easier. 138 | This is because Dec2s are easier to take apart 139 | -- fewer cases -- and Decs are easier to build 140 | -- only local requirements. It’s nice to know 141 | they are equivalent; now we can switch between 142 | them whenever we want! *) 143 | 144 | Lemma Dec2_Dec l: 145 | Dec2 l -> Dec l. 146 | Proof. 147 | intros; induction H; auto. 148 | destruct l; auto. 149 | apply DCons; auto. 150 | apply H; simpl; auto. 151 | Qed. 152 | 153 | 154 | (* Now, prove a lemma about appending decreasing lists. *) 155 | 156 | (* Left for later *) 157 | 158 | End Lecture5. 159 | -------------------------------------------------------------------------------- /l06.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Import ListNotations. 7 | 8 | Section Lecture6. 9 | 10 | (* True iff the list’s elements are in decreasing order *) 11 | Inductive Dec : list nat -> Prop := 12 | | DNil : Dec [] 13 | | DCons : forall a l, 14 | (forall b, In b l -> a <= b) -> 15 | Dec l -> 16 | Dec (a :: l). 17 | Hint Constructors Dec. 18 | 19 | (* True iff the list’s elements are in INcreasing order *) 20 | Inductive Inc : list nat -> Prop := 21 | | INil : Inc [] 22 | | ICons : forall a l, 23 | (forall b, In b l -> b <= a) -> 24 | Inc l -> 25 | Inc (a :: l). 26 | Hint Constructors Inc. 27 | 28 | (* A fact about appending increasing lists *) 29 | Lemma Inc_app xs ys: 30 | Inc xs -> Inc ys -> 31 | (forall x y, In x xs -> In y ys -> y <= x) -> 32 | Inc (xs ++ ys). 33 | Admitted. 34 | 35 | 36 | Lemma Dec_rev xs: Dec (rev xs) -> Inc xs. 37 | Abort. 38 | 39 | End Lecture6. 40 | -------------------------------------------------------------------------------- /l06sol.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Import ListNotations. 7 | 8 | Section Lecture6. 9 | 10 | (* True iff the list’s elements are in decreasing order *) 11 | Inductive Dec : list nat -> Prop := 12 | | DNil : Dec [] 13 | | DCons : forall a l, 14 | (forall b, In b l -> a <= b) -> 15 | Dec l -> 16 | Dec (a :: l). 17 | Hint Constructors Dec. 18 | 19 | (* True iff the list’s elements are in INcreasing order *) 20 | Inductive Inc : list nat -> Prop := 21 | | INil : Inc [] 22 | | ICons : forall a l, 23 | (forall b, In b l -> b <= a) -> 24 | Inc l -> 25 | Inc (a :: l). 26 | Hint Constructors Inc. 27 | 28 | (* A fact about appending increasing lists *) 29 | Lemma Inc_app xs ys: 30 | Inc xs -> Inc ys -> 31 | (forall x y, In x xs -> In y ys -> y <= x) -> 32 | Inc (xs ++ ys). 33 | Admitted. 34 | 35 | 36 | Lemma Dec_rev xs: Dec (rev xs) -> Inc xs. 37 | induction xs. 38 | auto. 39 | simpl. 40 | intros. 41 | apply ICons; intros. 42 | (* Ugh! We could prove some lemmas about Dec, but instead 43 | let’s use a more convenient induction principle. *) 44 | Abort. 45 | 46 | 47 | (* Reverse induction on lists: *) 48 | Lemma rev_ind {A} (P:list A -> Prop): 49 | P [] -> 50 | (forall a l, P l -> P (l ++ [a])) -> 51 | (forall l, P l). 52 | (* But this is difficult to prove from scratch. 53 | We need one more helper induction principle. *) 54 | Abort. 55 | 56 | Lemma rev_ind' {A} (P:list A -> Prop): 57 | P [] -> 58 | (forall a l, P (rev l) -> P (rev (a :: l))) -> 59 | (forall l, P (rev l)). 60 | (* This induction principle is easier to prove 61 | because, in the inductive hypotheses, `l` grows 62 | in the normal way (from the front). *) 63 | intros; induction l; auto. 64 | Qed. 65 | 66 | Lemma rev_ind {A} (P:list A -> Prop): 67 | P [] -> 68 | (forall a l, P l -> P (l ++ [a])) -> 69 | (forall l, P l). 70 | intros. 71 | rewrite <- (rev_involutive l). (* `rev (rev l) = l` *) 72 | apply rev_ind'; auto. 73 | intros. 74 | simpl. 75 | apply H0. 76 | auto. 77 | Qed. 78 | 79 | 80 | (* We’ve proved the new induction principle correct, 81 | so we can use it. *) 82 | Lemma Dec_rev xs: Dec (rev xs) -> Inc xs. 83 | induction xs using rev_ind; auto. 84 | rewrite rev_unit. (* `rev (xs ++ [a]) = a :: rev xs` *) 85 | intros; inversion H; subst. 86 | apply Inc_app. 87 | - auto. (* `auto` can apply inductive hypothesis *) 88 | - apply ICons; intros. 89 | + destruct H0. 90 | + auto. 91 | - intros; destruct H1. 92 | + subst. 93 | apply H2. 94 | apply in_rev in H0. 95 | auto. 96 | + destruct H1. 97 | Qed. 98 | 99 | 100 | (* The proof gets easier if we tell Coq `auto` to 101 | automatically resolve `In _ nil` hypotheses. *) 102 | Hint Extern 1 => match goal with 103 | | [ H : In _ nil |- _ ] => destruct H 104 | end. 105 | 106 | Lemma Dec_rev' xs: Dec (rev xs) -> Inc xs. 107 | induction xs using rev_ind; auto. 108 | rewrite rev_unit. 109 | intros; inversion H; subst. 110 | apply Inc_app; auto. (* !! *) 111 | intros; destruct H1; auto. 112 | apply in_rev in H0; subst; auto. 113 | Qed. 114 | 115 | End Lecture6. 116 | -------------------------------------------------------------------------------- /l14.v: -------------------------------------------------------------------------------- 1 | Require Import Bool Arith List Omega. 2 | Require Import Recdef Morphisms. 3 | Require Import Program.Tactics. 4 | Require Import Relation_Operators. 5 | Require FMapList. 6 | Require FMapFacts. 7 | Require Import Classical. 8 | Require Import Coq.Classes.RelationClasses. 9 | Require Import OrderedType OrderedTypeEx DecidableType. 10 | Require Import Sorting.Permutation. 11 | Import ListNotations. 12 | 13 | Section ListFacts. 14 | Context {A:Type}. 15 | Implicit Types l:list A. 16 | 17 | Lemma filter_app (f:A -> bool) l1 l2: 18 | filter f (l1 ++ l2) = filter f l1 ++ filter f l2. 19 | Admitted. 20 | 21 | Lemma filter_app_eq (f:A -> bool) x y: 22 | filter f (x ++ y) = x ++ y -> 23 | filter f x = x /\ filter f y = y. 24 | Admitted. 25 | 26 | Lemma Forall_app (P:A -> Prop) l1 l2: 27 | Forall P (l1 ++ l2) <-> Forall P l1 /\ Forall P l2. 28 | Admitted. 29 | 30 | End ListFacts. 31 | 32 | 33 | Module WXFacts_fun (E:DecidableType) (Import Map:FMapInterface.WSfun E). 34 | Module MapFacts := FMapFacts.WFacts_fun E Map. 35 | Section XFacts. 36 | Notation eq_dec := E.eq_dec. 37 | Variable elt: Type. 38 | Implicit Types m: t elt. 39 | Implicit Types x y z: key. 40 | Implicit Types e: elt. 41 | 42 | Lemma add_same_Equal: forall m x e, 43 | MapsTo x e m -> 44 | Equal m (add x e m). 45 | Proof. 46 | intros; rewrite MapFacts.Equal_mapsto_iff; split; intros. 47 | - destruct (eq_dec x k); subst. 48 | + rewrite <- e1 in H0. 49 | rewrite (MapFacts.MapsTo_fun H H0). 50 | now eapply Map.add_1. 51 | + now eapply Map.add_2. 52 | - rewrite MapFacts.add_mapsto_iff in H0; 53 | destruct H0; destruct_conjs. 54 | + rewrite <- H0; now rewrite <- H1. 55 | + auto. 56 | Qed. 57 | 58 | Lemma add_redundant_Equal: forall m x e e', 59 | Equal (add x e m) (add x e (add x e' m)). 60 | Admitted. 61 | 62 | Lemma add_commutes: forall m x y e e', 63 | ~ E.eq x y -> 64 | Equal (add x e (add y e' m)) (add y e' (add x e m)). 65 | Admitted. 66 | 67 | Lemma add_remove_Equal: forall m x e, 68 | Equal (add x e m) (add x e (remove x m)). 69 | Admitted. 70 | 71 | Lemma remove_redundant_Equal: forall m x e, 72 | Equal (remove x m) (remove x (add x e m)). 73 | Admitted. 74 | 75 | Lemma remove_commutes: forall m x y, 76 | Equal (remove x (remove y m)) (remove y (remove x m)). 77 | Admitted. 78 | 79 | End XFacts. 80 | End WXFacts_fun. 81 | -------------------------------------------------------------------------------- /lec/l01-simple.v: -------------------------------------------------------------------------------- 1 | Module Lecture1. 2 | Require Import Arith. 3 | Require Import List. 4 | 5 | Definition AnIntegerExists : nat. 6 | (* First prove by construction. Start with 7 | Definition AnIntegerExists : nat := _. 8 | and fill in the underscore. *) 9 | 10 | Lemma AnIntegerExistsB : nat. 11 | (* Then prove using tactics. Start with 12 | "Proof." and check out Coq's reports. 13 | Tactics: `apply`, `intros`, `split`, `destruct`. *) 14 | 15 | 16 | Definition Proj1 {A B:Prop} : A /\ B -> A. 17 | 18 | Lemma Proj1B {A B:Prop} : A /\ B -> A. 19 | 20 | 21 | (* Use `Locate`, `Check`, and `Print` to figure out 22 | how <-> works. Start with `Locate "_ <-> _".` *) 23 | Lemma ObjectivismB {A:Prop} : A <-> A. 24 | 25 | Definition Objectivism {A:Prop} : A <-> A. 26 | 27 | 28 | Lemma DistributeAnd {A B C:Prop} : 29 | A /\ (B \/ C) -> (A /\ B) \/ (A /\ C). 30 | 31 | End Lecture1. 32 | -------------------------------------------------------------------------------- /lec/l02-list.v: -------------------------------------------------------------------------------- 1 | Module Lecture2. 2 | Require Import List. 3 | Require Import Arith. 4 | Require Import Omega. (* the `omega` tactic knows arithmetic *) 5 | Import ListNotations. (* `[]`, `[a; b; c]` notation *) 6 | 7 | 8 | 9 | (* Let's define our own functions on lists and prove 10 | some things about them. 11 | 12 | These functions are recursive on the `list` type, 13 | so they are defined with `Function` (not `Definition`). *) 14 | 15 | (* xlength should return the length of a list. *) 16 | 17 | (* xappend should append two lists together. *) 18 | 19 | (* xreverse should reverse a list in O(N^2) time. *) 20 | 21 | (* xireverse should reverse a list in O(N) time. *) 22 | 23 | 24 | (* Use `Eval compute` to test out your functions. *) 25 | 26 | 27 | (* Now, prove some things! *) 28 | 29 | End Lecture2. 30 | -------------------------------------------------------------------------------- /lec/l05-decreasing.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Import ListNotations. 7 | 8 | Section Lecture5. 9 | 10 | (* Define some local variables to this section. *) 11 | Definition A := nat. 12 | Definition LE := le. 13 | Hint Unfold LE. 14 | 15 | (* Some facts about lists. *) 16 | Check In. 17 | Print In. 18 | 19 | Check app. 20 | Print app. 21 | 22 | (* Let's prove our own list fact. *) 23 | Lemma in_middle: forall (a:A) xs1 xs2, 24 | In a (xs1 ++ a :: xs2). 25 | Admitted. 26 | 27 | 28 | (* Now, write an inductive Prop `Dec`, with a single 29 | `list A` argument, where `Dec l` is true iff the 30 | elements of `l` are in decreasing order by `LE`. *) 31 | 32 | 33 | (* Do it again. *) 34 | 35 | 36 | (* Prove the definitions equivalent. *) 37 | 38 | 39 | (* Now, prove a lemma about appending decreasing lists. *) 40 | 41 | End Lecture5. 42 | -------------------------------------------------------------------------------- /lec/l06-increasing.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Import ListNotations. 7 | 8 | Section Lecture6. 9 | 10 | (* True iff the list’s elements are in decreasing order *) 11 | Inductive Dec : list nat -> Prop := 12 | | DNil : Dec [] 13 | | DCons : forall a l, 14 | (forall b, In b l -> a <= b) -> 15 | Dec l -> 16 | Dec (a :: l). 17 | Hint Constructors Dec. 18 | 19 | (* True iff the list’s elements are in INcreasing order *) 20 | Inductive Inc : list nat -> Prop := 21 | | INil : Inc [] 22 | | ICons : forall a l, 23 | (forall b, In b l -> b <= a) -> 24 | Inc l -> 25 | Inc (a :: l). 26 | Hint Constructors Inc. 27 | 28 | (* A fact about appending increasing lists *) 29 | Lemma Inc_app xs ys: 30 | Inc xs -> Inc ys -> 31 | (forall x y, In x xs -> In y ys -> y <= x) -> 32 | Inc (xs ++ ys). 33 | Admitted. 34 | 35 | 36 | Lemma Dec_rev xs: Dec (rev xs) -> Inc xs. 37 | Abort. 38 | 39 | End Lecture6. 40 | -------------------------------------------------------------------------------- /lec/l10-perm-inv-add.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Require Import Program.Equality. 7 | Require Import Sorting.Permutation. 8 | Import ListNotations. 9 | 10 | Variable A:Type. 11 | Implicit Types l : list A. 12 | 13 | 14 | (* This Ltac performs inversion on `Add` terms where the result list 15 | is a cons. *) 16 | Ltac InvAdd := repeat (match goal with 17 | | H: Add ?x _ (_ :: _) |- _ => inversion H; clear H; subst 18 | end). 19 | 20 | 21 | (* Let’s try to prove Permutation_Add_inv without a new induction 22 | principle! Look where we get stuck. *) 23 | 24 | Theorem Permutation_Add_inv' a l1 l2 : 25 | Permutation l1 l2 -> 26 | forall l1' l2', Add a l1' l1 -> 27 | Add a l2' l2 -> 28 | Permutation l1' l2'. 29 | Proof. 30 | revert l1 l2. 31 | refine (Permutation_ind _ _ _ _ _). 32 | - (* nil *) 33 | intros. 34 | inversion H. 35 | - (* skip *) 36 | intros x l1 l2 PE IH. intros. 37 | InvAdd. 38 | + auto. 39 | + rewrite <- PE. now apply Permutation_Add. 40 | + rewrite PE. symmetry. now apply Permutation_Add. 41 | + apply perm_skip. now apply IH. 42 | - (* swap *) 43 | intros x y l l1 l2 A1 A2. intros. 44 | Check Permutation_ind. 45 | (* DAMN *) 46 | admit. 47 | - (* trans *) 48 | intros l1 l l2 PE IH PE' IH' l1' l2' AD1 AD2. 49 | assert (Ha : In a l). 50 | { rewrite <- PE. rewrite (Add_in AD1). simpl; auto. } 51 | destruct (Add_inv _ _ Ha) as (l',AD). 52 | transitivity l'. 53 | now apply IH. 54 | now apply IH'. 55 | Abort. 56 | 57 | 58 | (* This new induction principle is inspired by the place we got stuck. *) 59 | 60 | Lemma Permutation_ind_bis: 61 | forall P: list A -> list A -> Prop, 62 | (* nil: *) 63 | P [] [] -> 64 | (* skip/cons: *) 65 | (forall a l l', Permutation l l' -> P l l' -> 66 | P (a :: l) (a :: l')) -> 67 | (* generalized swap: *) 68 | (forall x y l l', Permutation l l' -> P l l' -> 69 | P (x :: y :: l) (y :: x :: l')) -> 70 | (* trans: *) 71 | (forall l l' l'', Permutation l l' -> P l l' -> 72 | Permutation l' l'' -> P l' l'' -> 73 | P l l'') -> 74 | (* ...imply true for all *) 75 | forall l l', Permutation l l' -> P l l'. 76 | intros P Hnil Hcons Hswap Htrans l l' PE. 77 | induction PE. 78 | auto. 79 | apply Hcons; auto. 80 | apply Hswap. 81 | apply Permutation_refl. 82 | induction l; auto. 83 | apply Htrans with (l':=l'); auto. 84 | Qed. 85 | 86 | 87 | (* Now we can prove Add_inv! *) 88 | 89 | Theorem Permutation_Add_inv a l1 l2 : 90 | Permutation l1 l2 -> 91 | forall l1' l2', Add a l1' l1 -> 92 | Add a l2' l2 -> 93 | Permutation l1' l2'. 94 | Proof. 95 | revert l1 l2. 96 | refine (Permutation_ind_bis _ _ _ _ _). 97 | - (* nil *) 98 | inversion_clear 1. 99 | - (* skip *) 100 | intros x l1 l2 PE IH. intros. 101 | InvAdd. 102 | + auto. 103 | + rewrite <- PE. now apply Permutation_Add. 104 | + rewrite PE. symmetry. now apply Permutation_Add. 105 | + apply perm_skip. now apply IH. 106 | - (* swap *) 107 | intros x y l1 l2 PE IH. intros. 108 | InvAdd. 109 | + now constructor. 110 | + now constructor. 111 | + constructor. rewrite <- PE. now apply Permutation_Add. 112 | + now constructor. 113 | + now constructor. 114 | + rewrite perm_swap. constructor. rewrite <- PE. 115 | now apply Permutation_Add. 116 | + constructor. rewrite PE. symmetry. 117 | now apply Permutation_Add. 118 | + rewrite perm_swap. constructor. rewrite PE. symmetry. 119 | now apply Permutation_Add. 120 | + (* The case that needs the inductive hypothesis! *) 121 | rewrite perm_swap. constructor. constructor. 122 | now apply IH. 123 | - (* trans *) 124 | intros l1 l l2 PE IH PE' IH' l1' l2' AD1 AD2. 125 | assert (Ha : In a l). 126 | { rewrite <- PE. rewrite (Add_in AD1). simpl; auto. } 127 | destruct (Add_inv _ _ Ha) as (l',AD). 128 | transitivity l'. 129 | now apply IH. 130 | now apply IH'. 131 | Qed. 132 | 133 | 134 | (* A little more Ltac makes this easier to prove, but harder to 135 | investigate the proof. This is basically the library’s proof 136 | (the library uses `auto` here and there). *) 137 | 138 | Ltac finish_basic_perms H := 139 | try constructor; try rewrite perm_swap; try constructor; trivial; 140 | (rewrite <- H; now apply Permutation_Add) || 141 | (rewrite H; symmetry; now apply Permutation_Add). 142 | 143 | Theorem Permutation_Add_inv' a l1 l2 : 144 | Permutation l1 l2 -> 145 | forall l1' l2', Add a l1' l1 -> 146 | Add a l2' l2 -> 147 | Permutation l1' l2'. 148 | Proof. 149 | revert l1 l2. 150 | refine (Permutation_ind_bis _ _ _ _ _). 151 | - (* nil *) 152 | inversion_clear 1. 153 | - (* skip *) 154 | intros x l1 l2 PE IH. intros. 155 | InvAdd; try finish_basic_perms PE. 156 | apply perm_skip. now apply IH. 157 | - (* swap *) 158 | intros x y l1 l2 PE IH. intros. 159 | InvAdd; try finish_basic_perms PE. 160 | (* The case that needs the inductive hypothesis! *) 161 | rewrite perm_swap. do 2 constructor. now apply IH. 162 | - (* trans *) 163 | intros l1 l l2 PE IH PE' IH' l1' l2' AD1 AD2. 164 | assert (Ha : In a l). 165 | { rewrite <- PE. rewrite (Add_in AD1). simpl; auto. } 166 | destruct (Add_inv _ _ Ha) as (l',AD). 167 | transitivity l'. 168 | now apply IH. 169 | now apply IH'. 170 | Qed. 171 | -------------------------------------------------------------------------------- /lecsolutions/l01-simple.v: -------------------------------------------------------------------------------- 1 | Module Lecture1. 2 | Require Import Arith. 3 | Require Import List. 4 | 5 | Definition AnIntegerExists : nat := 2. 6 | 7 | Lemma AnIntegerExistsB : nat. 8 | Proof. 9 | apply 0. 10 | Qed. 11 | 12 | Print nat. 13 | 14 | 15 | Locate "_ /\ _". 16 | Check and. 17 | Print and. 18 | 19 | Definition Proj1 {A B:Prop} : A /\ B -> A := 20 | fun (H:A /\ B) => 21 | match H with 22 | | conj x y => x 23 | end. 24 | 25 | Lemma Proj1B {A B:Prop} : A /\ B -> A. 26 | Proof. 27 | intros. 28 | destruct H. 29 | apply H. 30 | Qed. 31 | 32 | 33 | Locate "_ <-> _". 34 | Check iff. 35 | Print iff. 36 | Print and. 37 | 38 | Lemma ObjectivismB {A:Prop} : A <-> A. 39 | Proof. 40 | split. 41 | intros. 42 | apply H. 43 | intros. 44 | apply H. 45 | Qed. 46 | 47 | Lemma ObjectivismB_Semicolons {A:Prop} : A <-> A. 48 | Proof. 49 | split; intros; apply H. 50 | Qed. 51 | 52 | Lemma ObjectivismB_Auto {A:Prop} : A <-> A. 53 | Proof. 54 | split; auto. 55 | Qed. 56 | 57 | Definition Objectivism {A:Prop} : A <-> A := 58 | conj (fun x => x) (fun x => x). 59 | 60 | 61 | Lemma DistributeAnd {A B C:Prop} : 62 | A /\ (B \/ C) -> (A /\ B) \/ (A /\ C). 63 | Proof. 64 | intros. 65 | destruct H. 66 | destruct H0. 67 | left. 68 | split. 69 | apply H. 70 | apply H0. 71 | right. 72 | split. 73 | apply H. 74 | apply H0. 75 | Qed. 76 | 77 | Lemma DistributeAnd_Semicolons {A B C:Prop} : 78 | A /\ (B \/ C) -> (A /\ B) \/ (A /\ C). 79 | Proof. 80 | intros. 81 | destruct H. 82 | destruct H0; 83 | (* [X1 | X2 | ... | Xn] applies X1 to the 1st 84 | subgoal, X2 to the second, and so forth. *) 85 | [left | right]; 86 | split; try apply H; try apply H0. 87 | Qed. 88 | 89 | End Lecture1. 90 | -------------------------------------------------------------------------------- /lecsolutions/l02-list.v: -------------------------------------------------------------------------------- 1 | Module Lecture2. 2 | Require Import List. 3 | Require Import Arith. 4 | Require Import Omega. (* the `omega` tactic knows arithmetic *) 5 | Import ListNotations. (* `[]`, `[a; b; c]` notation *) 6 | 7 | 8 | Print list. 9 | 10 | Definition x : list nat := 1 :: 2 :: 3 :: nil. 11 | Print x. 12 | 13 | 14 | (* Let's define our own functions on lists and prove 15 | some things about them. 16 | 17 | These functions are recursive on the `list` type, 18 | so they are defined with `Function` (not `Definition`). *) 19 | 20 | (* xlength should return the length of a list. *) 21 | Function xlength {A} (x:list A) := 22 | match x with 23 | | nil => 0 24 | | _ :: x' => S (xlength x') 25 | end. 26 | 27 | (* xappend should append two lists together. *) 28 | Function xappend {A} (x y:list A) := 29 | match x with 30 | | nil => y 31 | | a :: x' => a :: (xappend x' y) 32 | end. 33 | 34 | Eval compute in xlength []. 35 | Eval compute in xlength [1;2;3;4]. 36 | Eval compute in xlength [[];[];[1]]. 37 | Eval compute in xappend [1;2;3] [4;5;6;7]. 38 | 39 | (* xreverse should reverse a list in O(N^2) time. *) 40 | Function xreverse {A} (x:list A) := 41 | match x with 42 | | nil => nil 43 | | a :: x' => xappend (xreverse x') [a] 44 | end. 45 | 46 | Check list_rec. 47 | 48 | Eval compute in xreverse [3;2;1]. 49 | 50 | (* xireverse should reverse a list in O(N) time. *) 51 | Function xireverse {A} (x y:list A) := 52 | match x with 53 | | nil => y 54 | | a :: x' => xireverse x' (a :: y) 55 | end. 56 | 57 | Eval compute in xireverse [1;3;495;10] []. 58 | 59 | 60 | (* Use `Eval compute` to test out your functions. *) 61 | 62 | 63 | (* Now, prove some things! *) 64 | 65 | Lemma xappend_length {A} : 66 | forall l1 l2:list A, xlength l1 + xlength l2 67 | = xlength (xappend l1 l2). 68 | Proof. 69 | intros. 70 | induction l1. 71 | auto. 72 | simpl. 73 | rewrite IHl1. 74 | reflexivity. 75 | Qed. 76 | 77 | (* At this point in class, we tried to prove 78 | something for which we lacked the tools. *) 79 | Lemma xreverse_reverse {A} (x:list A) : 80 | xreverse (xreverse x) = x. 81 | Proof. 82 | induction x; auto. 83 | simpl. 84 | Abort. 85 | 86 | 87 | (* WHAT YOU SHOULD DO: Try to prove xreverse_reverse! *) 88 | 89 | (* I proved xreverse_reverse using 3 additional lemmas and the 90 | following tactics only: `induction`, `simpl`, `auto`, `rewrite`. 91 | `try` is useful for shortening proofs but not required. 92 | 93 | You'll learn most by trying to prove xreverse_reverse yourself. 94 | That will give you experience in detecting when you're stuck and 95 | finding and proving helper lemmas. Think about basic properties 96 | of mathematical objects, and things that are true about list 97 | operations. For instance, associativity: is `xappend` 98 | associative? Would that ever be useful? 99 | 100 | If you get really stuck, the lemma statements I used are below, 101 | after some space. Fun fact: two of my lemmas have the *exact same 102 | proof statement*. *) 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | Lemma xappend_empty {A} (x:list A): 210 | xappend x [] = x. 211 | Abort. 212 | 213 | Lemma xappend_assoc {A} (x y z:list A): 214 | xappend (xappend x y) z = 215 | xappend x (xappend y z). 216 | Abort. 217 | 218 | Lemma xreverse_append {A} (x y:list A): 219 | xreverse (xappend x y) = 220 | xappend (xreverse y) (xreverse x). 221 | Abort. 222 | 223 | Lemma xreverse_reverse {A} (x:list A): 224 | xreverse (xreverse x) = x. 225 | Abort. 226 | 227 | End Lecture2. 228 | -------------------------------------------------------------------------------- /lecsolutions/l05-decreasing.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Import ListNotations. 7 | 8 | Section Lecture5. 9 | 10 | (* Define some local variables to this section. *) 11 | Definition A := nat. 12 | Definition LE := le. 13 | Hint Unfold LE. 14 | 15 | (* Some facts about lists. *) 16 | Print list. 17 | 18 | 19 | Check In. 20 | Print In. 21 | 22 | Check app. 23 | Print app. 24 | 25 | (* Let's prove our own list fact. *) 26 | Lemma in_middle: forall (a:A) xs1 xs2, 27 | In a (xs1 ++ a :: xs2). 28 | intros. 29 | induction xs1. 30 | simpl; auto. 31 | rewrite <- app_comm_cons. 32 | apply in_cons. 33 | auto. 34 | Qed. 35 | 36 | 37 | (* Now, write an inductive Prop `Dec`, with a single 38 | `list A` argument, where `Dec l` is true iff the 39 | elements of `l` are in decreasing order by `LE`. *) 40 | Inductive Dec : list A -> Prop := 41 | | DNil : Dec [] 42 | | DSingleton : forall a, Dec [a] 43 | | DCons : forall a b l, 44 | LE a b -> 45 | Dec (b :: l) -> 46 | Dec (a :: b :: l). 47 | Hint Constructors Dec. 48 | 49 | Example DecExample : Dec [1;2;3;3;4]. 50 | auto 7. 51 | Qed. 52 | 53 | 54 | (* Do it again. *) 55 | Inductive Dec2 : list A -> Prop := 56 | | D2Nil : Dec2 [] 57 | | D2Cons : forall a l, 58 | (forall b, In b l -> LE a b) -> 59 | Dec2 l -> 60 | Dec2 (a :: l). 61 | Hint Constructors Dec2. 62 | 63 | Example Dec2Example : Dec2 [1;2;3;3;4]. 64 | Proof. 65 | apply D2Cons; intros. 66 | simpl in H; repeat destruct or H; unfold LE in *; omega. 67 | apply D2Cons; intros. 68 | simpl in H; repeat destruct or H; unfold LE in *; omega. 69 | apply D2Cons; intros. 70 | simpl in H; repeat destruct or H; unfold LE in *; omega. 71 | apply D2Cons; intros. 72 | simpl in H; repeat destruct or H; unfold LE in *; omega. 73 | apply D2Cons; intros. 74 | simpl in H; repeat destruct or H; unfold LE in *; omega. 75 | auto. 76 | Qed. 77 | 78 | 79 | (* Prove the definitions equivalent. *) 80 | 81 | Lemma Dec_Dec2 l: 82 | Dec l -> Dec2 l. 83 | Proof. 84 | (* Here's a first, long, interactive proof. *) 85 | intros. 86 | induction H. 87 | auto. 88 | apply D2Cons; intros. destruct H. 89 | auto. 90 | apply D2Cons. 91 | intros. 92 | inversion IHDec. 93 | destruct H1. 94 | rewrite <- H1; auto. 95 | apply H4 in H1. 96 | apply le_trans with (m:=b); auto. 97 | auto. 98 | 99 | (* Clean it up with an outline. *) 100 | Restart. 101 | intros; induction H. 102 | - auto. 103 | - apply D2Cons; intros. destruct H. auto. 104 | - apply D2Cons; intros. 105 | + inversion IHDec. (* invert Dec2 inductive hypothesis *) 106 | destruct H1. (* In b0 (b::l) becomes two cases: 107 | b0 = b \/ In b0 l *) 108 | * rewrite <- H1; auto. 109 | * apply H4 in H1. 110 | apply le_trans with (m:=b); auto. 111 | + auto. 112 | 113 | (* We need `le_trans` because `omega` can’t see through 114 | `LE`. So unfold it explicitly. *) 115 | Restart. 116 | intros; induction H. 117 | - auto. 118 | - apply D2Cons; intros. destruct H. auto. 119 | - apply D2Cons; intros. 120 | + inversion IHDec. 121 | unfold LE in *. 122 | destruct H1. (* In b0 (b::l) becomes two cases: 123 | b0 = b \/ In b0 l *) 124 | * omega. 125 | * apply H4 in H1; omega. 126 | + auto. 127 | 128 | (* Reducing the structure cleans up further. *) 129 | Restart. 130 | intros; induction H; auto; 131 | apply D2Cons; intros; auto; [ destruct H | ]. 132 | inversion IHDec; unfold LE in *; 133 | destruct H1; try apply H4 in H1; omega. 134 | Qed. 135 | 136 | 137 | (* Proving that Dec2 implies Dec is much easier. 138 | This is because Dec2s are easier to take apart 139 | -- fewer cases -- and Decs are easier to build 140 | -- only local requirements. It’s nice to know 141 | they are equivalent; now we can switch between 142 | them whenever we want! *) 143 | 144 | Lemma Dec2_Dec l: 145 | Dec2 l -> Dec l. 146 | Proof. 147 | intros; induction H; auto. 148 | destruct l; auto. 149 | apply DCons; auto. 150 | apply H; simpl; auto. 151 | Qed. 152 | 153 | 154 | (* Now, prove a lemma about appending decreasing lists. *) 155 | 156 | (* Left for later *) 157 | 158 | End Lecture5. 159 | -------------------------------------------------------------------------------- /lecsolutions/l06-increasing.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Import ListNotations. 7 | 8 | Section Lecture6. 9 | 10 | (* True iff the list’s elements are in decreasing order *) 11 | Inductive Dec : list nat -> Prop := 12 | | DNil : Dec [] 13 | | DCons : forall a l, 14 | (forall b, In b l -> a <= b) -> 15 | Dec l -> 16 | Dec (a :: l). 17 | Hint Constructors Dec. 18 | 19 | (* True iff the list’s elements are in INcreasing order *) 20 | Inductive Inc : list nat -> Prop := 21 | | INil : Inc [] 22 | | ICons : forall a l, 23 | (forall b, In b l -> b <= a) -> 24 | Inc l -> 25 | Inc (a :: l). 26 | Hint Constructors Inc. 27 | 28 | (* A fact about appending increasing lists *) 29 | Lemma Inc_app xs ys: 30 | Inc xs -> Inc ys -> 31 | (forall x y, In x xs -> In y ys -> y <= x) -> 32 | Inc (xs ++ ys). 33 | Admitted. 34 | 35 | 36 | Lemma Dec_rev xs: Dec (rev xs) -> Inc xs. 37 | induction xs. 38 | auto. 39 | simpl. 40 | intros. 41 | apply ICons; intros. 42 | (* Ugh! We could prove some lemmas about Dec, but instead 43 | let’s use a more convenient induction principle. *) 44 | Abort. 45 | 46 | 47 | (* Reverse induction on lists: *) 48 | Lemma rev_ind {A} (P:list A -> Prop): 49 | P [] -> 50 | (forall a l, P l -> P (l ++ [a])) -> 51 | (forall l, P l). 52 | (* But this is difficult to prove from scratch. 53 | We need one more helper induction principle. *) 54 | Abort. 55 | 56 | Lemma rev_ind' {A} (P:list A -> Prop): 57 | P [] -> 58 | (forall a l, P (rev l) -> P (rev (a :: l))) -> 59 | (forall l, P (rev l)). 60 | (* This induction principle is easier to prove 61 | because, in the inductive hypotheses, `l` grows 62 | in the normal way (from the front). *) 63 | intros; induction l; auto. 64 | Qed. 65 | 66 | Lemma rev_ind {A} (P:list A -> Prop): 67 | P [] -> 68 | (forall a l, P l -> P (l ++ [a])) -> 69 | (forall l, P l). 70 | intros. 71 | rewrite <- (rev_involutive l). (* `rev (rev l) = l` *) 72 | apply rev_ind'; auto. 73 | intros. 74 | simpl. 75 | apply H0. 76 | auto. 77 | Qed. 78 | 79 | 80 | (* We’ve proved the new induction principle correct, 81 | so we can use it. *) 82 | Lemma Dec_rev xs: Dec (rev xs) -> Inc xs. 83 | induction xs using rev_ind; auto. 84 | rewrite rev_unit. (* `rev (xs ++ [a]) = a :: rev xs` *) 85 | intros; inversion H; subst. 86 | apply Inc_app. 87 | - auto. (* `auto` can apply inductive hypothesis *) 88 | - apply ICons; intros. 89 | + destruct H0. 90 | + auto. 91 | - intros; destruct H1. 92 | + subst. 93 | apply H2. 94 | apply in_rev in H0. 95 | auto. 96 | + destruct H1. 97 | Qed. 98 | 99 | 100 | (* The proof gets easier if we tell Coq `auto` to 101 | automatically resolve `In _ nil` hypotheses. *) 102 | Hint Extern 1 => match goal with 103 | | [ H : In _ nil |- _ ] => destruct H 104 | end. 105 | 106 | Lemma Dec_rev' xs: Dec (rev xs) -> Inc xs. 107 | induction xs using rev_ind; auto. 108 | rewrite rev_unit. 109 | intros; inversion H; subst. 110 | apply Inc_app; auto. (* !! *) 111 | intros; destruct H1; auto. 112 | apply in_rev in H0; subst; auto. 113 | Qed. 114 | 115 | End Lecture6. 116 | -------------------------------------------------------------------------------- /naivecoq.md: -------------------------------------------------------------------------------- 1 | Naïve Coq 2 | ========= 3 | 4 | * **[Coq](https://coq.inria.fr/)** 5 | * [Tactic index](https://coq.inria.fr/distrib/current/refman/tactic-index.html) 6 | * [Talkin’ with the Rooster (Coq FAQ)](https://coq.inria.fr/faq#htoc51) 7 | 8 | Overview 9 | -------- 10 | 11 | | WHEN YOU WANT | USE | 12 | | --------------------------------------- | ---------- | 13 | | to forget a hypothesis | `clear` | 14 | | simple case analysis | `destruct` | 15 | | smart case analysis on complex objects | `inversion` | 16 | | inductive case analysis | `induction` | 17 | | simultaneous induction | `induction x, y` | 18 | | a more general inductive hypothesis | `generalize` or `revert` | 19 | | inductive case analysis on a `Function` body | `functional induction` | 20 | | Coq to do something obvious | `auto`, `omega` | 21 | | to expand a function body | `simpl`, `unfold` | 22 | | to expand a `Function` body | `rewrite [functionname]_equation` | 23 | | to rewrite using equality | `rewrite H` | 24 | | an obvious contradiction | `contradiction` | 25 | | a contradiction about different constructors | `discriminate` | 26 | | to go from a theorem’s conclusion to its premise | `apply` | 27 | | to go from a theorem’s premise to its conclusion | `apply in H` | 28 | | a new hypothesis | `pose`, `assert` | 29 | | a new name for an expression | `remember` | 30 | | to work with `A /\ B` | `split` (goal), `destruct` (hypothesis) | 31 | | to work with `A \/ B` | `left`, `right` (goal), `destruct` (hypothesis) | 32 | | to show `exists x, P` | `exists p` | 33 | | to create `x` from `exists x, P` hypothesis | `destruct`, `destruct_conjs` | 34 | | the same thing in every subgoal | `tactic1; tactic2` | 35 | | different things in diffent subgoals | `[ tactic1 | tactic2 | ... ]` | 36 | | suppressed errors | `...; try tactic; ...` | 37 | 38 | 39 | Introduction tactics: adding hypotheses 40 | --------------------------------------- 41 | 42 | These tactics add new hypotheses. 43 | 44 | ### `pose (expression)` 45 | 46 | This adds a new hypothesis defined by `expression`, which might be an 47 | already-defined theorem (e.g., `pose le_S_n`, `pose (le_S_n 2 3)`) or a 48 | version of an existing hypothesis (e.g., `pose (IHx 0)`). `pose (expression) 49 | as Hname` gives the new hypothesis a specific name. 50 | 51 | ### `assert (expression)` 52 | 53 | Pause the current proof and start a new subgoal of proving `expression`. 54 | 55 | ### `assert (expression) by tactic` 56 | 57 | Add `expression` as a hypothesis, but only if it can be proved by `tactic`. 58 | 59 | ### `remember (expression) as x` 60 | 61 | Adds a new variable `x` and a hypothesis `x = expression`, then rewrites 62 | everything else to use `x` instead of `expression`. Useful to simplify 63 | arguments for tactics such as functional induction. 64 | 65 | > Note that some people think good Coq style requires minimizing hypotheses, 66 | > so you should only introduce facts by applying them to existing hypotheses 67 | > or goals. I think this is too strict. 68 | 69 | 70 | Destructive tactics: destruction, induction, inversion 71 | ------------------------------------------------------ 72 | 73 | Every object of an inductive type `T` was created by one of `T`’s 74 | constructors. A `nat` was created by either `O` or `S`, a `bool` was created 75 | by either `false` or `true`, and so forth. 76 | 77 | The *destructive tactics* break a proof into cases by generating one case per 78 | constructor. Destructive tactics usually create subgoals. They are either 79 | simple (they don’t generate inductive hypotheses) or inductive (they do). 80 | 81 | ### `destruct x`: simple 82 | 83 | Replaces `x` with one subgoal per `x` constructor. 84 | 85 | ### `destruct H`: simple 86 | 87 | Replaces `H` with one subgoal per `H` constructor. 88 | 89 | ### `destruct (expression)`: simple 90 | 91 | Adds one subgoal per constructor of the expression, possibly including new 92 | hypotheses. Example: `destruct (eq_nat_dec n m)`. 93 | 94 | `destruct` can lose context. For example, `destruct (S n)` generates an 95 | impossible case (the one where `(S n) = O`), but also drops the context 96 | required to prove the case impossible. Other destructive tactics are less 97 | stupid. `destruct` is best on simple objects, like pure variables. 98 | 99 | ### `destruct_pairs`: simple 100 | 101 | Separate all logical-and hyptheses into their component parts. Never loses 102 | context. Needs `Require Import Program.Tactics`. 103 | 104 | ### `inversion_clear H`: simple 105 | 106 | Replaces `H` with one subgoal per `H` constructor, *and* generates additional 107 | constraints based on arguments to `H`, *and* eliminates impossible cases, 108 | *and* cleans up redundant hypotheses. 109 | 110 | `inversion_clear` is useful when `destruct` loses too much context. 111 | 112 | ### `inversion H`: simple 113 | 114 | Like `inversion_clear`, but preserves possibly-redundant hypotheses. 115 | 116 | ### Example: Simple destructive tactic comparison 117 | 118 | Here we try to prove a basic fact about `cons`. Note how the different tactics 119 | behave: 120 | 121 | ``` 122 | Coq < Lemma cons_eq {A} (a b:A) x y: 123 | Coq < a :: x = b :: y -> a = b. 124 | Coq < Proof. intros. 125 | 126 | 1 subgoal, subgoal 1 (ID 748) 127 | 128 | A : Type 129 | n, m : A 130 | p, q : list A 131 | H : n :: p = m :: q 132 | ============================ 133 | p = q 134 | 135 | Coq < destruct H. (* loses context *) 136 | 137 | 1 subgoal, subgoal 1 (ID 757) 138 | 139 | A : Type 140 | n, m : A 141 | p, q : list A 142 | ============================ 143 | p = q 144 | 145 | Coq < Restart. intros; inversion H. (* keeps context, redundancy *) 146 | 147 | 1 subgoal, subgoal 1 (ID 774) 148 | 149 | A : Type 150 | n, m : A 151 | p, q : list A 152 | H : n :: p = m :: q 153 | H1 : n = m 154 | H2 : p = q 155 | ============================ 156 | q = q 157 | 158 | Coq < Restart. intros; inversion_clear H. (* keeps context, less redundancy *) 159 | 160 | A : Type 161 | n, m : A 162 | p, q : list A 163 | ============================ 164 | q = q 165 | ``` 166 | 167 | ### `induction x`: inductive 168 | 169 | Adds one subgoal per `x` constructor, with inductive hypotheses when 170 | appropriate. You can induct over hypotheses too. 171 | 172 | ### `induction x, y`: inductive 173 | 174 | Simultaneous induction on `x` and `y`. This is often preferred to `induction 175 | x; induction y`, which performs a *separate* induction on `y` for *each* 176 | inductive case on `x`. In simultaneous induction, the inductive hypotheses are 177 | over *both* `x` and `y`; in separate induction, `y`’s inductive hypothesis 178 | will assume a specific `x`. 179 | 180 | ### `functional induction (f arg...)`: inductive 181 | 182 | Adds one subgoal per case in the definition of `f`, which must have been 183 | defined by `Function`. Works best if the `arg`s are simple variables; if 184 | they’re not, it can lose context. Use `remember` to avoid this. 185 | 186 | `rewrite f_equation` is useful for functional induction. 187 | 188 | ### `generalize x` 189 | 190 | Replaces a goal that refers to a specific variable, `x`, with `forall x, 191 | [goal]`. Use this before induction if applying an induction tactic gives you a 192 | too-specific inductive hypothesis. 193 | 194 | ### `revert x` 195 | 196 | Acts like `generalize x`, but also removes references to `x` from the 197 | hypotheses. 198 | 199 | ### `clear H` 200 | 201 | Clears a hypothesis. 202 | 203 | 204 | Applicative tactics 205 | ------------------- 206 | 207 | These tactics work with implications: statements of the form `P -> Q`. This 208 | includes certain inductive constructors; for instance, the type of the 209 | natural-number successor operation `S` is `nat -> nat`. 210 | 211 | ### `apply H`, `apply Theorem`, `apply (expression)` 212 | 213 | Matches the goal with the conclusion of the implication, and replaces it with 214 | the premise. For instance, given the goal `S (a + b) > S x`, `apply gt_n_S` 215 | will create the new goal `a + b > x`. May generate new subgoals for new 216 | dependencies. 217 | 218 | ### `apply ... with (var:=expr)` 219 | 220 | Sometimes Coq can’t figure out how to apply an implication. Help it by giving 221 | more hypotheses or by assigning values to specific variables. This often 222 | occurs with transitivity. For instance: 223 | 224 | ``` 225 | Coq < Check gt_trans. 226 | 227 | gt_trans 228 | : forall n m p : nat, n > m -> m > p -> n > p 229 | 230 | Coq < Lemma gt_trans_Snp x y z: S x > y -> y > S z -> S x > S z. 231 | Coq < intros. apply gt_trans. 232 | 233 | Error: Unable to find an instance for the variable m. 234 | 235 | Coq < apply (gt_trans (S x) y z). (* works *) 236 | Coq < apply gt_trans with (m:=y). (* also works *) 237 | Coq < apply (gt_trans _ _ _ (S x > y)). (* also works *) 238 | ``` 239 | 240 | ### `apply ... in H` 241 | 242 | Applies an implication in a hypothesis rather than the goal. This works in the 243 | other direction—it matches the *premise* of the implication against the 244 | hypothesis, and replaces the hypothesis with the *conclusion*. 245 | 246 | 247 | Existence tactics 248 | ----------------- 249 | 250 | These tactics are useful when you’re trying to prove that something exists. 251 | They are really forms of `apply`, specialized for inductive types. The goal 252 | states the something exists; the tactics replace that goal with the premises 253 | for one of the corresponding constructors. 254 | 255 | These tactics are typically used for propositions rather than objects, since a 256 | proof goal rarely has the form “a list exists.” They do not apply to 257 | hypotheses: you can’t say `split in H`. In hypotheses, use a destructive 258 | tactic instead. (For instance, to split apart a hypothesis `H : A /\ B` or `H 259 | : A \/ B`, you usually want `destruct H`.) 260 | 261 | ### `split` 262 | 263 | `split` is typically used for logical-and goals, but works for all goals with 264 | single-constructor types. It just applies the single constructor. `split` 265 | never loses context. 266 | 267 | > Explanation: Every object witnesses its type, and constructors let us make 268 | > new objects from old ones. In Coq, the goal `A /\ B` (or, equivalently, `and 269 | > A B`) means “there exists an object of type `and A B`.” Since that inductive 270 | > type has one constructor, `conj` (of type `A -> B -> and A B`), that object, 271 | > if it exists, must have been constructed by that constructor. As soon as we 272 | > have an `A` and a `B` (that is, witnesses for those types), we can create an 273 | > `A /\ B` by applying `conj`. The `split` tactic implements this logic: it 274 | > replaces a `A /\ B` goal with its requirements, `A` and `B`. 275 | 276 | ### `left` and `right` 277 | 278 | `left` and `right` are typically used for logical-or goals, but work for all 279 | goals with two-constructor types. `left` applies the first constructor and 280 | `right` applies the second one. This can lose context, so make sure you pick 281 | the right one. 282 | 283 | ### `exists` (goal) 284 | 285 | `exists p` is typically used for existence goals (like `exists p, p = 5`), but 286 | works for all goals with single-constructor types. It applies the single 287 | constructor with argument `p`. For instance, `exists p, p = 5` can be proved 288 | by `exists (2 * 2 + 1); auto`. 289 | 290 | ### `constructor` 291 | 292 | More generally, `constructor` applies the first constructor that matches the 293 | goal. If more than one constructor matches, `constructor` can lose context. 294 | Use `constructor N` to apply the `N`th constructor specifically. Use 295 | `constructor ... with (var:=value)` if the constructor can’t figure out values 296 | for some variables. 297 | 298 | ### `destruct` for `exists` hypotheses 299 | 300 | A hypothesis like `H : exists x, P` says that *some* `x` exists. To find a 301 | *specific* `x` for which `P` holds, run `destruct H`. To simultaneously 302 | instantiate *all* `exists` hypotheses (and break down `/\` hypotheses into 303 | their components), run `destruct_conjs` (requires `Program.Tactics`). 304 | 305 | 306 | Completion tactics 307 | ------------------ 308 | 309 | These tactics solve a goal. 310 | 311 | ### `auto` (fail-free) 312 | 313 | Solve an “obvious” system. “Obvious” means Coq searches for a solution by 314 | simplifying expressions, rewriting equalities, finding contradictions, etc., 315 | but not for very long. Give a number, such as `auto 10`, to tell Coq to look a 316 | little harder. 317 | 318 | If `auto` can’t solve the current subgoal, it does nothing. 319 | 320 | ### `omega` 321 | 322 | Solve a system by arithmetic or fail. You may prefer `try omega`. 323 | 324 | ### `contradiction` 325 | 326 | Solve a system with transparently contradictory hypotheses (e.g., `False`, `~ 327 | True`) or fail. You may prefer `try contradiction`. 328 | 329 | ### `discriminate H` 330 | 331 | Solve a system by contradiction, given an absurd hypothesis `H` that equates 332 | different constructors of the same type (e.g., `S n = 0`). 333 | 334 | ### `reflexivity` 335 | 336 | Solve a system whose goal is a reflexive equality (e.g., `x = x`). `auto` is 337 | better. 338 | 339 | 340 | Simplification tactics 341 | ---------------------- 342 | 343 | These tactics simplify expressions by rewriting function calls. 344 | 345 | ### `simpl in *` (fail-free) 346 | 347 | Simplify all function calls that can be simplified. To localize the tactic’s 348 | effects, Use `simpl` (goal only), `simpl in H` (hypothesis `H` only), `simpl 349 | functionname` (`functionname` only), `simpl functionname at N [in H]` (only 350 | the `N`th occurrence of `functionname`). 351 | 352 | ### `unfold functionname in *` 353 | 354 | Replace a function call with the function’s body. Use `unfold functionname` or 355 | `unfold functionname in H` to localize effects. 356 | 357 | ### `fold functionname in *` 358 | 359 | Replace a function’s body with a call. This is the inverse of `unfold 360 | functionname in *`. 361 | 362 | ### `rewrite _equation in *` 363 | 364 | The `simpl` and `unfold` tactics don’t work well on recursive functions 365 | defined by `Function`. Use this instead; it rewrites a function call with the 366 | function’s body. 367 | 368 | 369 | Tactics that use equivalence 370 | ---------------------------- 371 | 372 | These tactics rewrite expressions using equivalence facts, either primitive 373 | equality (`x = y`) or if-and-only-if propositions (`A <-> B`). 374 | 375 | ### `rewrite H in *` 376 | 377 | `H` should be an equality or equivalence. This replaces all occurrences of the 378 | left-hand side of the equality with the right-hand side. 379 | 380 | ### `rewrite <- H in *` 381 | 382 | Same, but replaces occurrences of the right-hand side with the left-hand side. 383 | 384 | 385 | 386 | Meta-tactics 387 | ------------ 388 | 389 | ### `idtac` 390 | 391 | No-op tactic. 392 | 393 | ### `fail` 394 | 395 | Always fail. 396 | 397 | ### `try tactic` 398 | 399 | Try `tactic`, but do nothing if it fails. 400 | 401 | ### `tactic1; tactic2` 402 | 403 | Do `tactic1`, and if it succeeds, apply `tactic2` to every generated subgoal. 404 | 405 | ### `[ tactic1 | tactic2 | ... | tacticN ]` 406 | 407 | Requires that there are `N` active subgoals. Applies `tacticI` to the `I`th 408 | subgoal. 409 | 410 | ### `repeat tactic` 411 | 412 | Do `tactic` until it fails. 413 | -------------------------------------------------------------------------------- /pset1/README.md: -------------------------------------------------------------------------------- 1 | Problem set 1 2 | ============= 3 | 4 | Finish these files; the exercises are marked. 5 | 6 | 1. `pset1compare.v`: mostly warmup, some tactics. 7 | 2. `pset1bnat.v`: binary (rather than unary) numbers. 8 | 3. `pset1btree.v`: binary search trees. 9 | 10 | Due Monday 2/6. 11 | -------------------------------------------------------------------------------- /pset1/pset1bnat.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | 5 | Module Pset1Bnat. 6 | 7 | (* In this module, you'll prove some facts about *binary* 8 | numbers, rather than Coq's default unary numbers. 9 | We'll start out with some useful lemmas. *) 10 | Lemma double_eq (n m:nat) : n + n = m + m <-> n = m. 11 | Proof. omega. Qed. 12 | 13 | Lemma double_lt (n m:nat) : n + n < m + m <-> n < m. 14 | Proof. omega. Qed. 15 | 16 | Lemma double_lt_n_Sm (n m:nat) : n + n < S (m + m) <-> n < S m. 17 | Proof. omega. Qed. 18 | 19 | (* The `clean_arithmetic` tactic cleans up a lot of arithmetic 20 | identities typically occurring in these problems. 21 | It replaces `x + 0` with `x`, `S x = S y` with `x = y`, and 22 | `x + x = y + y` with `x = y`. *) 23 | Ltac clean_arithmetic := 24 | repeat match goal with 25 | | [H : context[_ + 0] |- _] => rewrite Nat.add_0_r in H 26 | | [ |- context[_ + 0] ] => rewrite Nat.add_0_r 27 | | [H : context[?a + ?a = ?b + ?b] |- _] => rewrite double_eq in H 28 | | [ |- context[?a + ?a = ?b + ?b] ] => rewrite double_eq 29 | | [H : context[S _ = S _] |- _] => apply eq_add_S in H 30 | | [ |- context[S _ = S _] ] => apply eq_S 31 | end. 32 | 33 | 34 | (* Coq's Peano arithmetic defines natural numbers inductively, 35 | based on two constructors: 0 and successor. So 10 is defined 36 | as a structure of length 11: 37 | (S (S (S (S (S (S (S (S (S (S Z))))))))))) 38 | And in general N is defined in a structure of length O(N). 39 | 40 | We're going to work with natural numbers based on *three* 41 | constructors. That will lead to numbers defined in structures 42 | of length O(log_2 N). *) 43 | 44 | Inductive bnat : Type := 45 | | BZ : bnat (* Zero *) 46 | | BD1 : bnat -> bnat (* BD1 X is the number 2*X + 1 *) 47 | | BD2 : bnat -> bnat. (* BD2 X is the number 2*X + 2 *) 48 | 49 | 50 | (* EXERCISE: Write a function that computes the conventional 51 | (Peano) value of bnat `b`. *) 52 | Function bval (b:bnat) : nat := 53 | 0. 54 | 55 | 56 | (* EXERCISE: Prove that if `bval b = 0`, then `b = BZ`. *) 57 | Lemma bval_zero (b:bnat) : 58 | bval b = 0 -> b = BZ. 59 | Proof. 60 | Abort. 61 | 62 | 63 | (* EXERCISE: Prove that every natural number has a unique 64 | binary representation. *) 65 | Lemma bval_uniq (b c:bnat) : 66 | bval b = bval c -> b = c. 67 | Proof. 68 | (* Your first tactic should be `generalize c`. 69 | Don't forget `discriminate`, `clean_arithmetic`, and `omega`. *) 70 | generalize c. 71 | Abort. 72 | 73 | 74 | (* EXERCISE: Write a function that returns the successor of `b`. 75 | You'll want to work this out on paper first, and there will 76 | be at least one recursive call to `bsucc`. *) 77 | Function bsucc (b:bnat) : bnat := 78 | 0. 79 | 80 | 81 | (* EXERCISE: Prove that your `bsucc` is correct. *) 82 | Lemma bsucc_correct (b:bnat) : 83 | bval (bsucc b) = S (bval b). 84 | Proof. 85 | (* If regular induction doesn't work, try 86 | `functional induction (bval b)`, which runs induction over the 87 | cases of the `bval` function. *) 88 | Abort. 89 | 90 | 91 | (* This function returns the length of a binary number. *) 92 | Function blen (b:bnat) : nat := 93 | match b with 94 | | BZ => 0 95 | | BD1 b' => S (blen b') 96 | | BD2 b' => S (blen b') 97 | end. 98 | 99 | 100 | (* The sum of the lengths of two binary numbers in a pair. *) 101 | Definition bpairlen (x:bnat*bnat) : nat := 102 | blen (fst x) + blen (snd x). 103 | 104 | 105 | (* EXERCISE: Write a function that adds two binary numbers. 106 | You'll want to work this out on paper first too. Everything 107 | works out easier if the numbers are passed in a pair; you 108 | can decompose the pair with `fst` and `snd` (Check them), 109 | or with matching. 110 | 111 | After writing the function, you'll need to prove that its 112 | recursion terminates. Do that too. *) 113 | Function baddpair (x:bnat*bnat) {measure bpairlen x} : bnat := 114 | 0. 115 | (* Proof. ... Qed. *) 116 | 117 | (* A more conventional adder function: *) 118 | Definition badd (x y:bnat) := baddpair (x,y). 119 | 120 | 121 | (* EXERCISE: Prove that your add function is correct. *) 122 | Lemma baddpair_correct (x:bnat*bnat) : 123 | bval (baddpair x) = bval (fst x) + bval (snd x). 124 | Proof. 125 | Abort. 126 | 127 | 128 | (* EXERCISE: Write a function that determines whether 129 | `fst x < snd x`. Return either False or True. *) 130 | Function blesspair (x:bnat*bnat) {measure bpairlen x} : Prop := 131 | True. 132 | (* Proof. ... Qed. *) 133 | 134 | Definition bless (x y:bnat) : Prop := blesspair (x,y). 135 | 136 | 137 | (* EXERCISE: Prove that `blesspair` is correct. 138 | For me this was the hardest proof in this module. *) 139 | Lemma blesspair_correct (x:bnat*bnat) : 140 | bval (fst x) < bval (snd x) <-> blesspair x. 141 | Proof. 142 | Abort. 143 | 144 | End Pset1Bnat. 145 | -------------------------------------------------------------------------------- /pset1/pset1btree.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | 7 | Module Pset1Btree. 8 | Import ListNotations. 9 | 10 | (* These comparison tactics are useful for this module. *) 11 | Lemma compare_dec (n m:nat): 12 | {n < m /\ n ?= m = Lt} 13 | + {n = m /\ n ?= m = Eq} 14 | + {n > m /\ n ?= m = Gt}. 15 | Proof. 16 | destruct (lt_eq_lt_dec n m); try destruct s. 17 | - left; left; split; auto; apply nat_compare_lt; auto. 18 | - left; right; split; auto; apply nat_compare_eq_iff; auto. 19 | - right; split; auto; apply nat_compare_gt; auto. 20 | Qed. 21 | 22 | Ltac destruct_compare n m := 23 | let Hlt := fresh "Hlt" in 24 | let Hc := fresh "Hc" in 25 | let Hp := fresh "Hp" in 26 | destruct (compare_dec n m) as [Hlt | Hlt]; 27 | try destruct Hlt as [Hlt | Hlt]; 28 | destruct Hlt as [Hp Hc]. 29 | 30 | 31 | (* A `nattree` is a tree of natural numbers, where every internal 32 | node has an associated number and leaves are empty. There are 33 | two constructors, L (empty leaf) and I (internal node). 34 | I's arguments are: left-subtree, number, right-subtree. *) 35 | Inductive nattree : Set := 36 | | L : nattree (* Leaf *) 37 | | I : nattree -> nat -> nattree -> nattree. (* Internal nodes *) 38 | 39 | (* Some example nattrees. *) 40 | Definition empty_nattree := L. 41 | Definition singleton_nattree := I L 0 L. 42 | Definition right_nattree := I L 0 (I L 1 (I L 2 (I L 3 L))). 43 | Definition left_nattree := I (I (I (I L 0 L) 1 L) 2 L) 3 L. 44 | Definition balanced_nattree := I (I L 0 (I L 1 L)) 2 (I L 3 L). 45 | Definition unsorted_nattree := I (I L 3 (I L 1 L)) 0 (I L 2 L). 46 | 47 | 48 | (* Flatten a `nattree` into a list of nats using inorder traversal. *) 49 | Function flatten (t:nattree) : list nat := 50 | match t with 51 | | L => nil 52 | | I l n r => flatten l ++ (n :: flatten r) 53 | end. 54 | 55 | Eval compute in flatten empty_nattree. 56 | Eval compute in flatten singleton_nattree. 57 | Eval compute in flatten right_nattree. 58 | Eval compute in flatten left_nattree. 59 | Eval compute in flatten balanced_nattree. 60 | Eval compute in flatten unsorted_nattree. 61 | 62 | 63 | (* EXERCISE: Complete this proposition, which should be `True` 64 | iff `x` is located somewhere in `t` (even if `t` is unsorted, 65 | i.e., not a valid binary search tree). *) 66 | Function btree_in (x:nat) (t:nattree) : Prop := 67 | False. 68 | 69 | (* EXERCISE: Complete these examples, which show `btree_in` works. 70 | Hint: The same proof will work for every example. 71 | End each example with `Qed.`. *) 72 | Example btree_in_ex1 : ~ btree_in 0 empty_nattree. 73 | Abort. 74 | Example btree_in_ex2 : btree_in 0 singleton_nattree. 75 | Abort. 76 | Example btree_in_ex3 : btree_in 2 right_nattree. 77 | Abort. 78 | Example btree_in_ex4 : btree_in 2 left_nattree. 79 | Abort. 80 | Example btree_in_ex5 : btree_in 2 balanced_nattree. 81 | Abort. 82 | Example btree_in_ex6 : btree_in 2 unsorted_nattree. 83 | Abort. 84 | Example btree_in_ex7 : ~ btree_in 10 balanced_nattree. 85 | Abort. 86 | 87 | 88 | (* EXERCISE: Complete this function, which should return `true` 89 | iff `x` is in the valid binary search tree `t` (and `false` 90 | otherwise). 91 | 92 | Note that this function returns `bool` (lower-case `true` and 93 | `false`), not `Prop` (upper-case `True` and `False`), because 94 | we imagine using it at execution time, not only in proofs. *) 95 | Function binsearch (x:nat) (t:nattree) : bool := 96 | false. 97 | 98 | 99 | (* In the rest of this module you will prove that your `binsearch` 100 | is correct. There are two parts. First, we prove that if 101 | `binsearch` returns true, then the searched-for number is in the 102 | tree. *) 103 | 104 | (* EXERCISE: Complete this lemma, which says that every nat that 105 | bsearch can find is in the tree. *) 106 | Lemma binsearch_in {x:nat} {t:nattree}: 107 | binsearch x t = true -> btree_in x t. 108 | Proof. 109 | (* Remember the `destruct_compare n m` tactic! *) 110 | Abort. 111 | 112 | 113 | (* Then, we prove that if the searched-for number is in the 114 | tree *and the tree is valid binary search tree*, then `binsearch` 115 | returns true. 116 | 117 | For this we need a definition of “valid binary search tree.” 118 | 119 | It's important to choose a good definition. Think more like a prover 120 | than an implementer. For proof purposes, use high-level lemmas 121 | with less-complicated types, and don't worry about traversing the 122 | tree multiple times. *) 123 | 124 | (* EXERCISE: Complete this proposition, which is True iff every 125 | number in the tree is `<= ub`. *) 126 | Function btree_le (t:nattree) (ub:nat) : Prop := 127 | True. 128 | 129 | (* EXERCISE: Complete this proposition, which is True iff every 130 | number in the tree is `>= lb`. *) 131 | Function btree_ge (t:nattree) (lb:nat) : Prop := 132 | True. 133 | 134 | (* EXERCISE: Complete this proposition, which is True iff `t` 135 | is a valid binary search tree---that is, an in-order traversal 136 | visits the numbers in increasing order. *) 137 | Function btree_sorted (t:nattree) : Prop := 138 | True. 139 | 140 | 141 | (* EXERCISE: Complete this lemma, which says that every element 142 | of a tree with an upper bound is `<=` that upper bound. 143 | 144 | Note that the arguments are in {curly braces}, not (parens). 145 | Arguments in curly braces are implicit; Coq will determine 146 | them from context when the lemma is applied. *) 147 | Lemma btree_in_le {x:nat} {t:nattree} {ub:nat}: 148 | btree_in x t -> btree_le t ub -> x <= ub. 149 | Proof. 150 | (* You will probably find the `destruct_pairs` tactic useful. 151 | This tactic unpacks long `and` chains `X /\ Y /\ ...` into 152 | individual hypotheses. *) 153 | Abort. 154 | 155 | (* EXERCISE: Complete this lemma, which says that every element 156 | of a tree with a lower bound is `>=` that lower bound. *) 157 | Lemma btree_in_ge {x:nat} {t:nattree} {lb:nat}: 158 | btree_in x t -> btree_ge t lb -> x >= lb. 159 | Proof. 160 | Abort. 161 | 162 | 163 | (* EXERCISE: Complete this lemma, says which that in a binary 164 | search tree, smaller elements are in left subtrees. *) 165 | Lemma btree_sorted_in_left {x:nat} {l:nattree} {y:nat} {r:nattree}: 166 | btree_sorted (I l y r) -> btree_in x (I l y r) -> x < y 167 | -> btree_in x l. 168 | Proof. 169 | Abort. 170 | 171 | (* EXERCISE: Complete this lemma, which says that in a binary 172 | search tree, large elements are in right subtrees. *) 173 | Lemma btree_sorted_in_right {x:nat} {l:nattree} {y:nat} {r:nattree}: 174 | btree_sorted (I l y r) -> btree_in x (I l y r) -> x > y 175 | -> btree_in x r. 176 | Proof. 177 | Abort. 178 | 179 | 180 | (* EXERCISE: Complete the main lemma, which says that `binsearch` 181 | can find every element in a binary search tree. *) 182 | Lemma in_binsearch {x:nat} {t:nattree}: 183 | btree_sorted t -> btree_in x t -> binsearch x t = true. 184 | Proof. 185 | Abort. 186 | 187 | 188 | (* Finally, we relate the `flatten` function to the tree's contents. 189 | The proposition `In x l` is True iff `x` is a member of `l`. *) 190 | 191 | (* EXERCISE: Complete this lemma. *) 192 | Lemma In_btree {x:nat} {t:nattree}: 193 | btree_in x t <-> In x (flatten t). 194 | Proof. 195 | Abort. 196 | 197 | End Pset1Btree. 198 | -------------------------------------------------------------------------------- /pset1/pset1compare.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | 7 | Module Pset1Compare. 8 | Import ListNotations. 9 | 10 | (* These exercises are warmups about natural number comparison. *) 11 | 12 | (* COMPARISON PROPOSITIONS 13 | `n < m` is a proposition that's True if `n` is less than `m`, and 14 | False otherwise. 15 | Similarly for `n = m`, `n > m`. 16 | Comparison propositions are useful during proofs. Like all propositions 17 | (statements of kind `Prop`), they are ignored when Coq is compiled to 18 | efficient executable code. 19 | 20 | COMPARISON OBJECTS (SETS) 21 | `n ?= m` is a function that takes two `nat`s and returns an object 22 | of type `comparison`. 23 | `n ?= m` is `Lt` if `n` is less than `m`, `Eq` if they are equal, and 24 | `Gt` if `n` is greater than `m`. 25 | Comparison object functions are useful for executable code. 26 | 27 | The library has named lemmas showing that the two notions of comparison 28 | mean the same thing. *) 29 | 30 | (* Example computations *) 31 | Eval compute in 1 < 2. 32 | Eval compute in 2 < 1. 33 | Eval compute in 1 ?= 2. 34 | 35 | (* Look up some facts about comparison notation *) 36 | Locate "_ ?= _". 37 | Check Nat.compare. 38 | Print Nat.compare. 39 | Check comparison. 40 | 41 | (* Look up some comparison lemmas *) 42 | Search (_ ?= _). 43 | Search (_ ?= _ = Lt -> _ < _). 44 | 45 | (* Prove our own version of `Lt => <`. 46 | Demonstrate several tactics. *) 47 | Lemma my_compare_Lt_lt (n m:nat): 48 | n ?= m = Lt -> n < m. 49 | Proof. 50 | intros. 51 | Search (_ ?= _ = Lt -> _ < _). 52 | apply nat_compare_Lt_lt. 53 | auto. 54 | (* we're done but *) Restart. 55 | 56 | (* this works too: *) 57 | intros. 58 | apply nat_compare_Lt_lt in H. 59 | (* Quiz: how's that different? *) 60 | auto. 61 | Restart. 62 | 63 | (* all at once: *) 64 | apply nat_compare_Lt_lt. 65 | Restart. 66 | 67 | (* `apply` can take a full expression: *) 68 | intros. 69 | apply (nat_compare_Lt_lt n m H). 70 | Qed. 71 | 72 | 73 | 74 | (* EXERCISE: Prove this lemma. *) 75 | Lemma nat_compare_Lt_gt (n m:nat): 76 | n ?= m = Lt -> m > n. 77 | Proof. 78 | (* If you want to skip this exercise for now, replace `Abort` 79 | with `Admitted`. But remember to finish it! You want to end 80 | with `Qed`. *) 81 | Abort. 82 | 83 | 84 | (* Coq provides a powerful tactic `omega` that knows a lot about 85 | arithmetic---but not everything. Let's see how smart it is. *) 86 | 87 | Lemma arithmetic1 (n m:nat): 88 | 100 < n -> 30 < 3 * m -> 110 < n + m. 89 | Proof. 90 | (* Without `omega`, look what we'd need. 91 | Useful to know all the lemmas exist, but a pain. *) 92 | intros. 93 | simpl in H0. 94 | Search (_ + 0). 95 | rewrite <- plus_n_O in H0. 96 | Search (_ + (_ + _)). 97 | rewrite <- plus_assoc_reverse in H0. 98 | assert (10 < m). 99 | (* Now we'll prove by contradiction that m > 10. *) 100 | Search (~ _ >= _ -> _ < _). 101 | apply not_ge. 102 | intro. (* Because `~ (10 >= m)` means `10 >= m -> False`. *) 103 | unfold ge in H1. (* `ge`/`>=` is an abbreviation for `le`/`<=`, 104 | so there are more lemmas about `<=`. *) 105 | Search (_ <= _ -> _ + _ <= _ + _). 106 | assert (m + m + m <= 10 + 10 + 10). 107 | apply plus_le_compat; auto. 108 | apply plus_le_compat; auto. 109 | simpl in H2. 110 | Search (_ <= _ -> ~ _ > _). 111 | apply le_not_gt in H2. 112 | unfold gt in H2. 113 | contradiction. 114 | Search (_ + _ < _ + _). 115 | apply (plus_lt_compat 100 n 10 m). 116 | auto. 117 | auto. 118 | Restart. 119 | 120 | (* But `omega`, on the other hand! *) 121 | omega. 122 | Qed. 123 | 124 | 125 | (* DECIDABILITY *) 126 | (* Types like `{A} + {B}` represent cases where (at least) one of two 127 | propositions hold. For instance, given two natural numbers, 128 | n and m, we know that one of `n < m`, `n = m`, `m < n` holds. 129 | This is represented by `lt_eq_lt_dec`: *) 130 | Check lt_eq_lt_dec. 131 | (* Decidability propositions are especially useful for `destruct`, 132 | as you'll see now. *) 133 | 134 | (* EXERCISE *) 135 | Lemma lt_or_le (n m:nat): 136 | n < m \/ m <= n. 137 | Proof. 138 | destruct (lt_eq_lt_dec n m). 139 | destruct s. 140 | (* You now have three cases. `omega` could complete each case, but 141 | you should use `omega` AT MOST ONCE. Use `left`, `right`, `auto`, 142 | etc. *) 143 | Abort. 144 | 145 | 146 | (* Let's prove our own decidability proposition, which will decide 147 | comparison propositions and objects simultaneously. *) 148 | 149 | Lemma compare_dec (n m:nat): 150 | {n < m /\ n ?= m = Lt} 151 | + {n = m /\ n ?= m = Eq} 152 | + {n > m /\ n ?= m = Gt}. 153 | Proof. 154 | (* EXERCISE: Complete the proof. Use `Search` to find the tactics 155 | that link `?=` with comparison propositions. *) 156 | Abort. 157 | 158 | 159 | (* An example proof using `compare_dec` (redundant with the standard library) *) 160 | Lemma nat_compare_Lt_lt (n m:nat): 161 | n ?= m = Lt -> n < m. 162 | Proof. 163 | intros. destruct (compare_dec n m). destruct s. 164 | - (* Case 1: < *) destruct a. auto. 165 | - (* Case 2: = *) destruct a. rewrite H in H1. discriminate. 166 | - (* Case 3: > *) destruct a. rewrite H in H1. discriminate. 167 | Qed. 168 | 169 | 170 | (* EXERCISE: Use `compare_dec` to prove: *) 171 | Lemma lt_eq_Lt (n m p:nat): 172 | n < m -> p = m -> n ?= p = Lt. 173 | Proof. 174 | (* Use tactic `destruct` to unpack a logical-and hypothesis. *) 175 | (* Don't forget `omega` knows a lot about arithmetic. *) 176 | Abort. 177 | 178 | 179 | (* Working with `compare_dec` is kind of irritating; there's 180 | a lot of make-work: you're always destructing `s`. 181 | We can make it a lot easier by *writing our own tactic.* 182 | After this statement, we can say `destruct_compare n m` instead 183 | of `destruct (compare_dec n m); try destruct s; ...`. *) 184 | Ltac destruct_compare n m := 185 | let Hlt := fresh "Hlt" in 186 | let Hc := fresh "Hc" in 187 | let H := fresh "Hp" in 188 | destruct (compare_dec n m) as [Hlt | Hlt]; 189 | try destruct Hlt as [Hlt | Hlt]; 190 | destruct Hlt as [H Hc]. 191 | 192 | 193 | (* EXERCISE: Use `destruct_compare` to prove: *) 194 | Lemma gt_Gt_Gt (n m p:nat): 195 | n > m -> m ?= p = Gt -> n ?= p = Gt. 196 | Proof. 197 | (* `destruct_compare` may not lead to the shortest possible 198 | proof, but it does have the advantage that there's fewer 199 | lemma names to remember -- `destruct_compare` introduces 200 | a lot of facts about comparison all at once. 201 | 202 | You will need a lemma about greater-than's transitivity. 203 | Coq, of course, has one already. You can't `apply` it 204 | directly -- you'll need to supply arguments: 205 | `apply (LEMMA_NAME ARG1 ARG2 ARG3 HYPOTHESIS) in ...`. *) 206 | Abort. 207 | 208 | End Pset1Compare. 209 | -------------------------------------------------------------------------------- /pset2/README.md: -------------------------------------------------------------------------------- 1 | Problem set 2 2 | ------------- 3 | 4 | Merge this problem set with your code with 5 | 6 | git pull git://github.com/readablesystems/cs260r-17 master 7 | 8 | (or do a pull or merge using GitHub). 9 | 10 | Then finish the marked exercises in the `pset2permutation.v` file. 11 | 12 | Due Monday 2/13 at midnight. 13 | -------------------------------------------------------------------------------- /pset2/pset2permutation.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Require Import Program.Equality. 7 | Import ListNotations. 8 | 9 | (* You may find this lemma useful later. *) 10 | Lemma in_middle: forall {A} a (xs1 xs2:list A), In a (xs1 ++ a :: xs2). 11 | induction xs1; intros; simpl; [ left | right ]; auto. 12 | Qed. 13 | 14 | 15 | Section Pset2PermutationDef. 16 | (* All definitions in this section will be predicated on a type 17 | argument, A. *) 18 | Context {A: Type}. 19 | 20 | 21 | (* PERMUTATIONS 22 | 23 | List `xs` is a permutation of list `ys` iff `xs` and `ys` have 24 | the same elements regardless of order. Coq’s library comes with 25 | an inductive definition of permutations: `xs` is a permutation of 26 | `ys` iff `xs` can be obtained from `ys` via a series of swap 27 | operations. This is maybe a little surprising, but it works 28 | great. This problem set section has you work with this 29 | definition. We repeat the definition (using the library’s terms) 30 | and prove many library lemmas. *) 31 | 32 | 33 | (* A Permutation is a relation between lists of A, so a permutation 34 | proposition looks like `Permutation x y`. *) 35 | 36 | Inductive Permutation: list A -> list A -> Prop := 37 | 38 | (* The empty list is a permutation of itself. *) 39 | | perm_nil: Permutation [] [] 40 | 41 | (* Adding an element preserves the permutation relation. 42 | (Called `perm_skip` rather than `perm_cons` because `apply`ing 43 | it in a theorem conclusion removes an element.) *) 44 | | perm_skip : forall a xs ys, 45 | Permutation xs ys -> Permutation (a :: xs) (a :: ys) 46 | 47 | (* Swapping the head elements preserves the permutation relation. *) 48 | | perm_swap : forall a b xs, 49 | Permutation (a :: b :: xs) (b :: a :: xs) 50 | 51 | (* Permutation is closed under transitivity. *) 52 | | perm_trans : forall xs ys zs, 53 | Permutation xs ys -> Permutation ys zs -> Permutation xs zs. 54 | 55 | End Pset2PermutationDef. 56 | 57 | 58 | Section Pset2PermutationEx. 59 | (* Prove that a permutation exists, the painful way *) 60 | Example sample_permutation: 61 | Permutation [1; 2; 3; 4; 5] [4; 1; 5; 2; 3]. 62 | Proof. 63 | apply perm_trans with (ys:=[1; 4; 5; 2; 3]); 64 | [ | (* proving `Permutation [1; 4; 5; 2; 3] [4; 1; 5; 2; 3]`: *) 65 | apply perm_swap ]. 66 | apply perm_skip. 67 | apply perm_trans with (ys:=[4; 2; 5; 3]); 68 | [ | (* proving `Permutation [4; 2; 5; 3] [4; 5; 2; 3]`: *) 69 | apply perm_skip; 70 | apply perm_swap ]. 71 | apply perm_trans with (ys:=[2; 4; 5; 3]); 72 | [ | apply perm_swap ]. 73 | apply perm_skip. 74 | apply perm_trans with (ys:=[4; 3; 5]); 75 | [ | apply perm_skip; 76 | apply perm_swap ]. 77 | apply perm_trans with (ys:=[3; 4; 5]); 78 | [ | apply perm_swap ]. 79 | apply perm_skip. 80 | apply perm_skip. 81 | apply perm_skip. 82 | apply perm_nil. 83 | Qed. 84 | 85 | 86 | (* This says that Permutation constructors should be applied by 87 | `auto` and `eauto`. *) 88 | Hint Constructors Permutation. 89 | 90 | Example sample_permutation': 91 | Permutation [1; 2; 3; 4; 5] [4; 1; 5; 2; 3]. 92 | Proof. 93 | (* Note how `Hint Constructors` simplifies the proof! When a constructor 94 | applies, `auto` applies it. *) 95 | apply perm_trans with (ys:=[1; 4; 5; 2; 3]); auto. 96 | constructor. (* Applies the first constructor that works. *) 97 | apply perm_trans with (ys:=[4; 2; 5; 3]); auto. 98 | apply perm_trans with (ys:=[2; 4; 5; 3]); auto. 99 | constructor. 100 | apply perm_trans with (ys:=[4; 3; 5]); auto. 101 | Qed. 102 | End Pset2PermutationEx. 103 | 104 | 105 | Section Pset2PermutationLemmas. 106 | Hint Constructors Permutation. 107 | Context {A: Type}. 108 | 109 | (* We can perform induction on permutation hypotheses. The induction 110 | cases are just the induction constructors. Here are three simple 111 | lemmas to show how that works. *) 112 | 113 | Lemma Permutation_nil (xs:list A): 114 | Permutation xs [] -> xs = []. 115 | Proof. 116 | intros. 117 | (* need to `remember` the empty set or induction will fail! *) 118 | remember nil as ys. 119 | induction H. 120 | - auto. 121 | - discriminate. 122 | - discriminate. 123 | - pose (IHPermutation2 Heqys); rewrite e in *. 124 | pose (IHPermutation1 Heqys). auto. 125 | 126 | (* With a bit of care, a shorter proof appears. *) 127 | Restart. 128 | intros; remember nil as ys in H. (* NB remember ONLY in H! *) 129 | induction H; try discriminate; auto. 130 | Qed. 131 | 132 | 133 | Lemma Permutation_refl (xs:list A): 134 | Permutation xs xs. 135 | Proof. 136 | (* Sometimes it’s easier to induct on the list. *) 137 | induction xs; auto. 138 | Qed. 139 | Local Hint Resolve Permutation_refl. 140 | 141 | 142 | Lemma Permutation_sym (xs ys:list A): 143 | Permutation xs ys -> Permutation ys xs. 144 | Proof. 145 | intros; induction H; auto. 146 | apply (perm_trans _ _ _ IHPermutation2); auto. 147 | Qed. 148 | 149 | 150 | (* Now it’s your turn! Complete these lemmas. You may complete 151 | the proofs in any order, but don’t use later lemmas in earlier 152 | proofs. *) 153 | 154 | Theorem Permutation_in (xs ys:list A) a: 155 | Permutation xs ys -> In a xs -> In a ys. 156 | Admitted. 157 | 158 | Lemma Permutation_app_tail (xs ys zs:list A) : 159 | Permutation xs ys -> Permutation (xs ++ zs) (ys ++ zs). 160 | Admitted. 161 | 162 | Lemma Permutation_app_head (xs ys zs:list A) : 163 | Permutation ys zs -> Permutation (xs ++ ys) (xs ++ zs). 164 | Admitted. 165 | 166 | Lemma Permutation_app (xs1 ys1 xs2 ys2:list A) : 167 | Permutation xs1 ys1 -> 168 | Permutation xs2 ys2 -> 169 | Permutation (xs1 ++ xs2) (ys1 ++ ys2). 170 | Admitted. 171 | 172 | Lemma Permutation_ends a (xs:list A): 173 | Permutation (a :: xs) (xs ++ [a]). 174 | Admitted. 175 | 176 | Lemma Permutation_app_comm (xs ys:list A) : 177 | Permutation (xs ++ ys) (ys ++ xs). 178 | Admitted. 179 | Local Hint Resolve Permutation_app_comm. 180 | 181 | Lemma Permutation_middle (xs ys:list A) a: 182 | Permutation (a :: xs ++ ys) (xs ++ a :: ys). 183 | Admitted. 184 | Local Hint Resolve Permutation_middle. 185 | 186 | 187 | 188 | Lemma Permutation_add a (xs ys:list A): 189 | Permutation xs ys -> 190 | forall xsm ysm, Add a xsm xs -> 191 | Add a ysm ys -> 192 | Permutation xsm ysm. 193 | (* ************************************* *) 194 | (* YOU DO NOT NEED TO PROVE THIS LEMMA!! *) 195 | (* ************************************* *) 196 | 197 | (* First, check out and understand the `Add` inductive type... *) 198 | Print Add. 199 | (* ...and some example `Add` facts. *) 200 | Check Add_split. 201 | Check Add_in. 202 | 203 | (* EXPLANATION 204 | 205 | This lemma statement is equivalent to 206 | `Permutation (xs1 ++ a :: xs2) (ys1 ++ a :: ys2) -> 207 | Permutation (xs1 ++ xs2) (ys1 ++ ys2)`. 208 | Why should that be hard to prove? Seems obvious! 209 | 210 | But while it’s easy to make larger Permutations from smaller ones, 211 | it’s quite hard to make *smaller* permutations from *larger* ones. 212 | 213 | The reason is that two lists can be `Permutation`s in 214 | *many different ways,* complicating the task of reversing 215 | a given Permutation. 216 | 217 | For instance, assume `P : Permutation (a :: xs) (a :: ys)`. 218 | `P` could have been created by `perm_skip` from 219 | `Permutation xs ys`, but it might have been created in many other 220 | ways too! For instance, perhaps it was created by `perm_trans` 221 | from `Permutation (a :: b :: xs') (c :: a :: ys')` and 222 | `Permutation (c :: a :: ys') (a :: c :: ys')`. So you can’t 223 | just apply `inversion` to `P` to reverse it: the inversion 224 | spins off more and more cases. Induction doesn’t help either, 225 | and for the same reason. 226 | 227 | A secondary problem is that `a` might be in the lists multiple 228 | times. So it’s not enough to prove that *some* `a`s can be removed 229 | from each list, you need to prove that the `a`s *in the 230 | hypothesized positions* can be removed. (The `Add` inductive 231 | type eases this problem somewhat.) 232 | 233 | But the lemma *is* true, and for advanced optional work, you may 234 | try to prove it. 235 | 236 | The library proves this lemma by first stating, and then 237 | proving, that a more general induction scheme for Permutations 238 | is equivalent to the natural scheme generated from the 239 | definition. Thought exercise: What might such a scheme be? *) 240 | Admitted. 241 | Local Hint Constructors Add. 242 | 243 | 244 | (* Finish off by proving these. *) 245 | 246 | Lemma Permutation_cons_inv (xs ys:list A) a : 247 | Permutation (a :: xs) (a :: ys) -> 248 | Permutation xs ys. 249 | Admitted. 250 | 251 | Lemma Permutation_rev (xs:list A): 252 | Permutation xs (rev xs). 253 | Admitted. 254 | 255 | Lemma Permutation_length (xs ys:list A): 256 | Permutation xs ys -> length xs = length ys. 257 | Admitted. 258 | 259 | Lemma Permutation_length_1_inv a (xs:list A): 260 | Permutation [a] xs -> xs = [a]. 261 | Admitted. 262 | 263 | 264 | (* The library has many more lemmas: 265 | https://coq.inria.fr/library/Coq.Sorting.Permutation.html 266 | Try to prove one or two others if you want! *) 267 | End Pset2PermutationLemmas. 268 | 269 | 270 | Section Pset2AlternatePermutation. 271 | Hint Constructors Permutation. 272 | Local Hint Resolve Permutation_app_comm. 273 | Local Hint Resolve Permutation_middle. 274 | 275 | 276 | (* In this section you’ll experiment with alternate definitions 277 | of permutations. We’ll start out with an expert’s definition 278 | of permutation from 2014. This definition is INCORRECT. *) 279 | 280 | Definition sublist {A} (xs ys:list A) : Prop := 281 | forall x, In x xs -> In x ys. 282 | 283 | Definition pseudopermutation {A} (xs ys:list A) : Prop := 284 | sublist xs ys /\ length xs = length ys. 285 | 286 | (* YOUR CODE HERE: Provide a pseudopermutation that is 287 | not a permutation. *) 288 | Example bad_pseudopermutation: 289 | exists (xs ys:list nat), 290 | pseudopermutation xs ys /\ ~ Permutation xs ys. 291 | Admitted. 292 | 293 | 294 | (* Now write an alternate permutation definition that *is* correct. 295 | 296 | Your definition can be either computational (e.g., a Function 297 | Fixpoint) or inductive, but it should differ substantially from 298 | Coq’s. In particular, your definition should NOT assume 299 | transitivity, the way Coq’s does. (Your equivalent of `perm_trans` 300 | should be a lemma, not an inductive case.) 301 | 302 | It’s recommended to start with an inductive definition. If 303 | you choose a computational definition, you’ll need the `eq_dec` 304 | hypothesis, which says that variables of type A can be 305 | distinguished. *) 306 | 307 | Context {A:Type}. 308 | 309 | Hypothesis eq_dec : forall x y : A, {x = y} + {x <> y}. 310 | (* An example theorem with this type: Nat.eq_dec. *) 311 | 312 | 313 | (* YOUR CODE HERE *) 314 | 315 | 316 | (* Now prove that your permutation definition and `Permutation` mean 317 | the same thing. Write the lemma statements and finish the proofs. 318 | 319 | NOTE! You will almost certainly need helper lemmas. One very 320 | helpful helper will be a version of `Permutation_add`. You may 321 | leave your version of `Permutation_add` as `Admitted` (as long as 322 | it’s actually true!), but all other lemmas should be proven. *) 323 | 324 | (* YOUR CODE HERE *) 325 | 326 | End Pset2AlternatePermutation. 327 | -------------------------------------------------------------------------------- /pset3/README.md: -------------------------------------------------------------------------------- 1 | Problem set 3 2 | ------------- 3 | 4 | Merge this problem set with your code with 5 | 6 | git pull git://github.com/readablesystems/cs260r-17 master 7 | 8 | (or do a pull or merge using GitHub). 9 | 10 | Then finish the marked exercises in the `pset3sort.v` and `pset3regex.v` files. 11 | The sort exercises are probably easier. 12 | 13 | This problem set gives you a lot of rope. Do not hang yourself. 14 | 15 | Due Wednesday 2/22 at midnight. 16 | -------------------------------------------------------------------------------- /pset3/pset3regex.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import String. 5 | Require Import List. 6 | Require Import Program.Tactics. 7 | Require Import Relation_Operators. 8 | Require Import Ascii. 9 | Require Import Program. 10 | Require Import Bool. 11 | Require Import Sorting.Permutation. 12 | 13 | Section CharacterFacts. 14 | (* Perhaps useful in your regular-expression development. *) 15 | 16 | (* `ascii_eq` is an executable equality test for characters. *) 17 | Definition ascii_eq (a b:ascii) : bool := 18 | match (a, b) with 19 | | (Ascii a1 a2 a3 a4 a5 a6 a7 a8, 20 | Ascii b1 b2 b3 b4 b5 b6 b7 b8) => 21 | eqb a1 b1 && eqb a2 b2 && eqb a3 b3 && eqb a4 b4 && 22 | eqb a5 b5 && eqb a6 b6 && eqb a7 b7 && eqb a8 b8 23 | end. 24 | 25 | Lemma ascii_eq_refl (a:ascii): 26 | ascii_eq a a = true. 27 | Proof. 28 | destruct a; unfold ascii_eq; repeat rewrite eqb_reflx; simpl; auto. 29 | Qed. 30 | 31 | (* `ascii_eq` is equivalent to character equality. *) 32 | Lemma ascii_eq_iff a b: 33 | ascii_eq a b = true <-> a = b. 34 | Proof. 35 | split; intros. 36 | - unfold ascii_eq in H; destruct a; destruct b; 37 | repeat rewrite andb_true_iff in *; destruct_pairs; 38 | rewrite eqb_true_iff in *; congruence. 39 | - rewrite H; apply ascii_eq_refl. 40 | Qed. 41 | End CharacterFacts. 42 | 43 | 44 | Section StringFacts. 45 | (* Perhaps useful in your regular-expression development. *) 46 | 47 | (* Coq `string`s are constructed from characters using a 48 | `list`-like cons procedure. The two `string` constructors 49 | are `EmptyString` (also written ` ""%string `; like `nil`) 50 | and `String ch s` (which is like `ch :: s`). *) 51 | 52 | 53 | (* `append` is the string version of `app` (list append). 54 | The Coq library has fewer `append` lemmas; we provide them. *) 55 | 56 | Lemma append_nil_l s: 57 | ("" ++ s)%string = s. 58 | Proof. 59 | simpl; auto. 60 | Qed. 61 | 62 | Lemma append_nil_r s: 63 | (s ++ "")%string = s. 64 | Proof. 65 | induction s; simpl; try rewrite IHs; auto. 66 | Qed. 67 | 68 | Lemma append_assoc s1 s2 s3: 69 | (s1 ++ s2 ++ s3)%string = ((s1 ++ s2) ++ s3)%string. 70 | Proof. 71 | induction s1; simpl; try rewrite IHs1; auto. 72 | Qed. 73 | 74 | Lemma append_comm_cons a s1 s2: 75 | (String a s1 ++ s2)%string = String a (s1 ++ s2)%string. 76 | Proof. 77 | induction s1; simpl; auto. 78 | Qed. 79 | 80 | 81 | Definition strlen := String.length. 82 | 83 | Lemma append_strlen_l s1 s2: 84 | strlen s1 <= strlen (s1 ++ s2). 85 | Proof. 86 | induction s1; simpl; try rewrite IHs1; omega. 87 | Qed. 88 | 89 | Lemma append_strlen_r s1 s2: 90 | strlen s2 <= strlen (s1 ++ s2). 91 | Proof. 92 | induction s1; simpl; try rewrite IHs1; omega. 93 | Qed. 94 | End StringFacts. 95 | 96 | 97 | (* Define a type that represents regular expressions. 98 | 99 | Support the following grammar (but you don’t need to 100 | use this concrete syntax): 101 | 102 | regex ::= 'ε' // matches the empty string 103 | | 'ANY' // matches any character 104 | | CHAR // matches a specific character 105 | | regex '++' regex // concatenation 106 | | regex '|' regex // or 107 | | regex '*' // Kleene star 108 | 109 | This type *represents* regular expressions in memory, 110 | so it is a `Set` (not a `Prop`). *) 111 | 112 | (* YOUR CODE HERE *) 113 | 114 | 115 | 116 | (* Now define an inductive proposition with two parameters, 117 | a regex and a string, that holds iff the regex matches 118 | the string. 119 | 120 | (This type *defines* the semantics of regular expressions, 121 | so you can’t prove it correct---it just needs to match 122 | the expected semantics.) *) 123 | 124 | (* YOUR CODE HERE *) 125 | 126 | 127 | 128 | (* Write a couple `Example`s that show that specific regular 129 | expressions match specific strings, and prove them. *) 130 | 131 | (* YOUR CODE HERE *) 132 | 133 | 134 | 135 | (* State and prove a theorem that relates Kleene-star and 136 | concatenation. Your theorem should say that if `r*` matches 137 | a string `s`, then there exists a regex `r ++ r ++ ... ++ r` 138 | that matches `s`, and vice versa. (Probably easiest to 139 | introduce a helper function that produces `r ++ ... ++ r` 140 | for a given count, and then to prove two lemmas, one per 141 | direction. *) 142 | 143 | (* YOUR CODE HERE *) 144 | 145 | 146 | 147 | (* Now, write a regular expression IMPLEMENTATION and prove 148 | it at least partially correct. 149 | 150 | Q. What does “implementation” mean? 151 | A. Here are some possibilities. 152 | - COMPUTATIONAL MATCHER: You could write a `Function` 153 | or `Fixpoint` that takes a regex as an argument and 154 | returns `true` if the regex matches. 155 | - NFA MATCHER: You could design an inductive type for 156 | nondeterministic finite automata. This would include 157 | a `Function` or `Fixpoint` that builds the NFA for 158 | a regex using Thompson’s construction, and an 159 | inductive proposition that implements NFA matching. 160 | - DERIVATIVES MATCHER: 161 | http://www.ccs.neu.edu/home/turon/re-deriv.pdf 162 | - OTHER: Check with me! 163 | 164 | Q. Any general gotchas? 165 | A. DON’T PREMATURELY OPTIMIZE. 166 | 167 | Q. Any gotchas with a computational matcher? 168 | A. Kleene-star will cause problems since your recursion 169 | might never terminate. It’s OK to add an extra “fuel” 170 | argument to force termination. Each recursive call 171 | will consume a unit of fuel, and the match may terminate 172 | prematurely (i.e., return `false`) if it runs out of 173 | fuel. But given enough fuel, your matcher should succeed 174 | on Kleene-star arguments (it’s not OK to return `false` 175 | for every Kleene-star regex). 176 | 177 | Q. Any gotchas with an NFA matcher? 178 | A. I found it far easier to work with a “nested” NFA 179 | design, in which an NFA node may contain another NFA as 180 | a subgraph, than a “flat” NFA design, in which the 181 | regex-to-NFA function flattens the regex as it goes. 182 | 183 | Q. What does “partially correct” mean? 184 | A. Full correctness requires that (1) every string that 185 | matches the spec also matches the implementation (perhaps 186 | with a “given enough fuel” clause), and (2) every string 187 | that matches the implementation also matches the spec. 188 | Please prove AT LEAST ONE of these directions. 189 | 190 | Q. Any general hints? 191 | A. I found the NFA matcher easier to work with because of 192 | the absence of fuel. 193 | A. You will likely need some additional type definitions. 194 | A. `inversion` is your friend. 195 | A. You will almost certainly want `Hint Constructors` for 196 | your types. `auto` goes a lot further when `Hint 197 | Constructors` is active. Consider writing your own 198 | tactics too; I benefited enormously from a tactic that 199 | can tell when an edge (such as one created by 200 | `inversion` or `destruct`) is not actually in an NFA. 201 | 202 | Q. This is fun. How can I go further? 203 | A. You’re crazy. But, for example, given an NFA matcher, 204 | you could implement an NFA-to-DFA converter and prove it 205 | preserved the NFA’s semantics. I wrote an NFA flattener, 206 | which de-nests a recursive NFA into a flat graph, and 207 | proved that flattening preserves the NFA’s semantics. 208 | That was hard, but fun. 209 | 210 | Q. How can I not get stuck? 211 | A. Share your inductive definitions with me before you get 212 | too far down the proof route. *) 213 | 214 | (* YOUR CODE HERE *) 215 | -------------------------------------------------------------------------------- /pset3/pset3sort.v: -------------------------------------------------------------------------------- 1 | Require Import Arith. 2 | Require Import Omega. 3 | Require Import Recdef. 4 | Require Import List. 5 | Require Import Program.Tactics. 6 | Require Import Program.Equality. 7 | Require Import Sorting.Permutation. 8 | Require Import Sorting.Sorted. 9 | Import ListNotations. 10 | 11 | (* Write a function that sorts a list of `nat`s into increasing 12 | order. Your function must take O(N log N) time in the average 13 | case. This means no insertion-sort or bubble-sort. (Remember 14 | that `++` takes O(N) time, so if you go crazy with it, your 15 | function will be slower than it looks.) 16 | 17 | Then test your function, using `Eval compute` or `Extraction`. 18 | 19 | Then prove it correct. Write a specification for sorting 20 | and show that your function obeys that specification. 21 | You may use parts of the standard library in your spec; I 22 | definitely recommend using `Permutation`. Your function code 23 | and proof code should be your own, however. *) 24 | 25 | (* YOUR CODE HERE *) 26 | -------------------------------------------------------------------------------- /pset4/README.md: -------------------------------------------------------------------------------- 1 | Problem set 4 2 | ------------- 3 | 4 | Merge this problem set with your code with 5 | 6 | git pull git://github.com/readablesystems/cs260r-17 master 7 | 8 | (or do a pull or merge using GitHub). 9 | 10 | Then finish the marked exercises in the `pset4osview.v` file. 11 | 12 | There’s a lot of library stuff to absorb in this pset, and since we are 13 | working with a whole OS model (rather than something with a simple spec, like 14 | mergesort or regular expressions), the lemma statements are tricky. But the 15 | proofs themselves are not ridiculously long. My solutions file is <200 lines 16 | longer than the problem set file. 17 | 18 | More exercises may be released. 19 | 20 | Due Sunday 3/26 at midnight. 21 | -------------------------------------------------------------------------------- /pset4/pset4osview.v: -------------------------------------------------------------------------------- 1 | Require Import Bool Arith List Omega. 2 | Require Import Recdef Morphisms. 3 | Require Import Program.Tactics. 4 | Require Import Relation_Operators. 5 | Require FMapList. 6 | Require FMapFacts. 7 | Require Import Classical. 8 | Require Import Coq.Classes.RelationClasses. 9 | Require Import OrderedType OrderedTypeEx DecidableType. 10 | Require Import Sorting.Permutation. 11 | Import ListNotations. 12 | 13 | (** * Process isolation 14 | 15 | This problem set contains a model of a multi-process 16 | operating system with a single ramdisk. You show that process 17 | isolation holds across system calls. 18 | 19 | The problem set has two purposes. First, it shows one way to model 20 | a real system -- it’s much more like seL4 than like mergesort. 21 | Second, we delve deeper into the standard library. Most large Coq 22 | developments use complex data structures like finite maps; we do 23 | too. *) 24 | 25 | 26 | (** ** Memory 27 | 28 | A _memory_ is a partial function from addresses to “byte” values (we use 29 | [nat] because why not). An address is a [nat]; a value is an [option nat], 30 | where [None] indicates unmapped memory. We use the [memory] type to 31 | represent both process memory and the ramdisk. *) 32 | 33 | Section Memory. 34 | 35 | (** We represent a memory as a function. *) 36 | Definition memory : Set := nat -> option nat. 37 | 38 | 39 | Implicit Types m : memory. 40 | Implicit Types addr len : nat. 41 | Implicit Types vs : list nat. 42 | 43 | (** The empty memory *) 44 | Definition memnil : memory := 45 | fun a => None. 46 | 47 | (** Returns [True] iff the [len] addresses starting at [addr] are all 48 | mapped. *) 49 | Definition memmapped m addr len : Prop := 50 | forall i, i < len -> m (addr + i) <> None. 51 | 52 | 53 | (** These functions create a new memory based on [m]. *) 54 | 55 | (** Add new values [vs] to [m], starting at [addr]. *) 56 | Definition memmap m addr vs : memory := 57 | fun a => 58 | if (addr <=? a) && (a 65 | if (addr <=? a) && (a bool) l1 l2: 88 | filter f (l1 ++ l2) = filter f l1 ++ filter f l2. 89 | Proof. 90 | (* YOUR CODE HERE *) 91 | Admitted. 92 | 93 | Lemma Forall_app (P:A -> Prop) l1 l2: 94 | Forall P (l1 ++ l2) <-> Forall P l1 /\ Forall P l2. 95 | Proof. 96 | (* YOUR CODE HERE *) 97 | Admitted. 98 | End ListFacts. 99 | 100 | 101 | (** ** Process definitions 102 | 103 | Main definitions: 104 | 105 | - [process]: The internal configuration of a process, including 106 | its memory, its current execution mode, and what system call 107 | it’s executing (if any). 108 | 109 | - [event]: A history variable that records execution steps for 110 | proofs. 111 | 112 | - [procview]: A process’s view of the system. Comprises a 113 | [process], the kernel’s ramdisk, and a trace (a list of 114 | [event]s). 115 | 116 | - [procstep1]: The transition relation for [procview]s. *) 117 | 118 | Section Process. 119 | 120 | (** A [process] is computing on its current memory ([PComp]), 121 | blocked in a system call ([PSys]), or dead ([PDead]). 122 | The system-call constructor [PSys] also specifies the system 123 | call name and arguments. Not all arguments are used for all 124 | system calls. *) 125 | 126 | Inductive syscall : Set := Map | Unmap | Read | Write | Kill. 127 | 128 | Inductive process : Set := 129 | | PComp: forall (pm:memory), process 130 | | PSys: forall (pm:memory) (sysc:syscall) (addr len offset:nat), process 131 | | PDead. 132 | 133 | 134 | Inductive event : Set := 135 | | EC (* Computation event *) 136 | | ES (* system call Start event *) 137 | | ER (* system call Return event *) 138 | | EK (* system call in-Kernel event *) 139 | | EKW: forall (offset len:nat), event 140 | (* system call in-Kernel Write event: records what offsets written *) 141 | | ED. (* Die event *) 142 | 143 | (** Is event [e] not a write? *) 144 | Definition event_nonwrite e : Prop := 145 | match e with 146 | | EKW _ _ => False 147 | | _ => True 148 | end. 149 | 150 | (** Is event [e] not an in-kernel event? *) 151 | Definition event_nonkernel e : Prop := 152 | match e with 153 | | EK => False 154 | | EKW _ _ => False 155 | | _ => True 156 | end. 157 | 158 | 159 | Inductive procview : Set := 160 | | PS: forall (p:process) (km:memory) (tr:list event), procview. 161 | 162 | 163 | (** The [procstep1 : procview -> procview -> Prop] relation 164 | determines how the OS works. The basic ideas are that processes 165 | alternate between computing and system calls, and that system 166 | calls execute one byte at a time. 167 | 168 | - [PS_compute]: A computing process can change its memory (but 169 | it can only change memory that is already mapped). 170 | - [PS_syscall]: A computing process can make a new system call. 171 | - [PS_sysdone]: When a system call is done, the process starts 172 | computing again. 173 | - [PS_kill]: A process can be killed at any time. 174 | 175 | Then there are four constructors to execute the four system 176 | calls. The system calls take the same arguments: [addr] is an 177 | address in the process’s memory, [len] is a number of bytes, 178 | and [offset] (not always used) is an offset into the ramdisk. 179 | 180 | - [Map addr len]: Like malloc or mmap: adds [len] bytes to this 181 | process’s address space. The new bytes are zeroed out. 182 | - [Unmap addr len]: Like munmap: removes [len] bytes from this 183 | process’s address space. 184 | - [Read addr len offset]: Reads [len] bytes from the ramdisk 185 | starting at [offset]. 186 | - [Write addr len offset]: Writes [len] bytes to the ramdisk 187 | starting at [offset]. 188 | 189 | The [Read] and [Write] system calls require that the relevant 190 | part of the process’s memory is mapped. 191 | 192 | Each step records its function in the [procview]’s [tr] trace 193 | history variable. *) 194 | 195 | Inductive procstep1 : relation procview := 196 | | PS_compute: forall pm km tr addr vs, 197 | memmapped pm addr (length vs) -> 198 | procstep1 (PS (PComp pm) km tr) 199 | (PS (PComp (memmap pm addr vs)) km (tr ++ [EC])) 200 | | PS_syscall: forall pm km tr sysc addr len offset, 201 | procstep1 (PS (PComp pm) km tr) 202 | (PS (PSys pm sysc addr len offset) km (tr ++ [ES])) 203 | | PS_sysdone: forall pm km tr sysc addr offset, 204 | procstep1 (PS (PSys pm sysc addr 0 offset) km tr) 205 | (PS (PComp pm) km (tr ++ [ER])) 206 | | PS_map: forall pm km tr addr len offset, 207 | procstep1 (PS (PSys pm Map addr (S len) offset) km tr) 208 | (PS (PSys (memmap pm addr [0]) Map (S addr) len (S offset)) 209 | km (tr ++ [EK])) 210 | | PS_unmap: forall pm km tr addr len offset, 211 | procstep1 (PS (PSys pm Unmap addr (S len) offset) km tr) 212 | (PS (PSys (memunmap pm addr 1) Unmap (S addr) len (S offset)) 213 | km (tr ++ [EK])) 214 | | PS_read: forall pm km tr addr len offset v, 215 | km offset = Some v -> 216 | procstep1 (PS (PSys pm Read addr (S len) offset) km tr) 217 | (PS (PSys (memmap pm addr [v]) Read (S addr) len (S offset)) 218 | km (tr ++ [EK])) 219 | | PS_write: forall pm km tr addr len offset v, 220 | pm addr = Some v -> 221 | memmapped km offset 1 -> 222 | procstep1 (PS (PSys pm Write addr (S len) offset) km tr) 223 | (PS (PSys pm Write (S addr) len (S offset)) 224 | (memmap km offset [v]) (tr ++ [EKW offset 1])) 225 | | PS_kill: forall p km tr, 226 | procstep1 (PS p km tr) 227 | (PS PDead km (tr ++ [ED])). 228 | 229 | (** [procsteps : procview -> procview -> Prop] is the reflexive, 230 | transitive closure of [procstep1]. So [procsteps a b] holds if 231 | there is some chain [a0, a1, a2, ..., an] of zero or more steps, 232 | so that [a0 = a], [an = b], and 233 | [forall i, i < n -> procstep1 ai a{i+1}]. 234 | 235 | [clos_refl_trans_n1] is defined in Coq’s standard library. *) 236 | 237 | Definition procsteps := clos_refl_trans_n1 procview procstep1. 238 | 239 | End Process. 240 | 241 | 242 | (** ** Finite maps 243 | 244 | We now examine Coq’s [FMap] module, which defines a dictionary-like data type. 245 | 246 | [FMap] functions are defined in the [FMapInterface] module, which also has 247 | many lemmas. Many other lemmas are defined in [FMapFacts]. 248 | 249 | [FMap]s are instantiated in three steps. First, we choose an underlying 250 | representation, such as an AVL tree. We, though, use sorted lists, via 251 | the [FMapList] type, since they print nicer. 252 | 253 | Second, we provide the type of keys, which must be an ordered type. 254 | This gives us a new module specialized for a kind of key. For example: 255 | << 256 | Module NatMap := FMapList.Make Nat_as_OT. 257 | >> 258 | Now [NatMap] defines [FMap] types, functions, and lemmas specialized 259 | for [nat] keys. ([Nat_as_OT] is defined in the Coq library.) We can refer 260 | to those types, functions, and lemmas as [NatMap.whatever]. 261 | 262 | Finally, we provide the type of values, which can be anything. 263 | << 264 | Definition Map_from_nat_to_bool := NatMap.t bool. 265 | Definition an_empty_map := NatMap.empty bool. 266 | >> 267 | Now [Map_from_nat_to_bool] is a map type. Its representation is hidden, 268 | meaning you can only access its contents via [FMap] functions and lemmas 269 | (and things you build on top). 270 | 271 | The most important [Map] accessors are as follows. In the below, [key] 272 | is the map’s key type, [elt] is its value type, and [t elt] is the map 273 | type. In arguments, [x] and [y] are keys, [e] and [e'] are values, and 274 | [m] and [m'] are maps. 275 | 276 | - [Map.empty elt] is an empty map storing [elt] values. 277 | - [Map.add x e m: t elt] sets [x]’s value to [e] in [m]. Coq is functional, 278 | of course, so this involves making a copy of [m]. 279 | - [Map.find x m: option elt] returns [x]’s value in [m], if it exists. 280 | - [Map.In x m: Prop] is [True] iff [x] has a value in [m]. 281 | - [Map.MapsTo x e m: Prop] is [True] iff [x]’s value in [m] is [e]. 282 | 283 | Three lemmas in [Map] show how [MapsTo] and [add] relate. [add_1] is about 284 | the value of the added key, [add_2] and [add_3] are about other keys. 285 | These lemmas are use [E.eq] to test keys for equality; for [nat]s, [E.eq] 286 | is just [=]. 287 | << 288 | Lemma Map.add_1 (x y:key) (e:elt) (m:t elt) : 289 | E.eq x y -> Map.MapsTo y e (Map.add x e m). 290 | Lemma Map.add_2 (x y:key) (e e':elt) (m:t elt) : 291 | ~ E.eq x y -> Map.MapsTo y e m -> Map.MapsTo y e (Map.add x e' m). 292 | Lemma Map.add_3 (x y:key) (e e':elt) (m:t elt) : 293 | ~ E.eq x y -> Map.MapsTo y e (Map.add x e' m) -> Map.MapsTo y e m. 294 | >> 295 | 296 | Then there are some useful lemmas in [MapFacts]. [add_mapsto_iff] 297 | completely specifies the behavior of [Map.add]; it’s like a combination 298 | of [add_1], [add_2], and [add_3]. 299 | << 300 | Lemma MapFacts.add_mapsto_iff (m:t elt) (x y:key) (e e':elt) : 301 | Map.MapsTo y e' (Map.add x e m) <-> 302 | (E.eq x y /\ e = e') \/ 303 | (~ E.eq x y /\ Map.MapsTo y e' m). 304 | >> 305 | 306 | [MapsTo_fun] says that [MapsTo] is a (partial) function: if the 307 | same key maps to two different values, then those values are equal. 308 | << 309 | Lemma MapFacts.MapsTo_fun (m:t elt) (x:key) (e e':elt) : 310 | Map.MapsTo x e m -> Map.MapsTo x e' m -> e = e'. 311 | >> 312 | 313 | And there are others, such as [add_in_iff]. 314 | 315 | 316 | *** The problem of equality 317 | 318 | Maps (and other complicated data structures) force us to think more 319 | carefully about equality. Intuitively, we think two maps are equal if 320 | they contain the same elements. This is captured by the [Map.Equal] 321 | relation: 322 | 323 | << 324 | Definition Map.Equal m m' := forall x, Map.find x m = Map.find x m'. 325 | >> 326 | 327 | Unfortunately, it is _not_ true that [Map.Equal m m' -> m = m']. 328 | “Different” [Map]s, in terms of Leibniz equality (Coq’s primitive 329 | equality), can have the same contents. For example, the [Map]s might 330 | be balanced trees with different layouts. :( 331 | 332 | This makes life more difficult! It’s very nice to use [rewrite] 333 | tactics, and usually [rewrite] requires Leibniz equality. 334 | 335 | To use [rewrite] on [Map]s, or data types containing [Map]s, we must 336 | provide separate lemmas showing that rewriting in specific situations 337 | is OK. We’ll see some such lemmas below. But the Coq library already 338 | provides many of these lemmas for [Map] operations, so this proof 339 | works: 340 | 341 | << 342 | Lemma MapsTo_Equal x e m m': 343 | Map.Equal m m' -> 344 | Map.MapsTo x e m -> 345 | Map.MapsTo x e m'. 346 | 347 | intros EQ H. now rewrite <- EQ. 348 | Qed. 349 | >> 350 | 351 | The [rewrite] succeeds because the Coq library has proved that 352 | [Equal] [Map]s can be substituted inside a [MapsTo]’s arguments. 353 | 354 | A useful fact about [Equal]: 355 | 356 | << 357 | Lemma MapFacts.Equal_mapsto_iff m m': 358 | Equal m m' <-> (forall x e, MapsTo x e m <-> MapsTo x e m'). 359 | >> 360 | 361 | 362 | *** Your work 363 | 364 | Get started by proving some useful facts about [Map]s in general. We 365 | start a new module for these facts, called [WXFacts_fun]. 366 | 367 | *) 368 | 369 | Module WXFacts_fun (E:DecidableType) (Import Map:FMapInterface.WSfun E). 370 | Module MapFacts := FMapFacts.WFacts_fun E Map. 371 | Section XFacts. 372 | Notation eq_dec := E.eq_dec. 373 | Variable elt: Type. 374 | Implicit Types m: t elt. 375 | Implicit Types x y z: key. 376 | Implicit Types e: elt. 377 | 378 | (** A map doesn’t change if you [add] the same element to it. *) 379 | Lemma add_same_Equal: forall m x e, 380 | MapsTo x e m -> 381 | Equal m (add x e m). 382 | Proof. 383 | intros; rewrite MapFacts.Equal_mapsto_iff; split; intros. 384 | - destruct (eq_dec x k); subst. 385 | + rewrite <- e1 in H0. 386 | rewrite (MapFacts.MapsTo_fun H H0). 387 | now eapply Map.add_1. 388 | + now eapply Map.add_2. 389 | - rewrite MapFacts.add_mapsto_iff in H0; 390 | destruct H0; destruct_conjs. 391 | + rewrite <- H0; now rewrite <- H1. 392 | + auto. 393 | Qed. 394 | 395 | (** Now you prove this: [add]ing an element overrides the previous 396 | element at that key. *) 397 | Lemma add_redundant_Equal: forall m x e e', 398 | Equal (add x e m) (add x e (add x e' m)). 399 | Proof. 400 | (* YOUR CODE HERE *) 401 | Admitted. 402 | End XFacts. 403 | End WXFacts_fun. 404 | 405 | (** Our [Map]s have [nat] as keys. We instantiate the three useful modules: 406 | the [Map] itself, its [Facts], and its [XFacts] (that we just wrote). *) 407 | Module NatMap := FMapList.Make Nat_as_OT. 408 | Module NatMapFacts := FMapFacts.WFacts_fun Nat_as_OT NatMap. 409 | Module NatMapXFacts := WXFacts_fun Nat_as_OT NatMap. 410 | 411 | 412 | 413 | (** ** Operating system view 414 | 415 | An [osview] is the analogy to [procview], but for the whole operating system, 416 | not just a single process. An [osview] contains a [NatMap] of [process]es, 417 | mapped by process ID; a ramdisk; and a trace history variable, where this time 418 | each trace element is a pair of process ID and event. 419 | 420 | Since [osview]s contain [Map]s, Leibniz equality isn’t the right notion of 421 | equality. We provide our own equality definition, prove that it is reflexive, 422 | symmetric, and transitive, and finally show that it is an equivalence relation, 423 | allowing it to be used by tactics like [reflexivity], [symmetry], and 424 | [transitivity]. *) 425 | 426 | Section OSView. 427 | 428 | Definition processes := NatMap.t process. 429 | 430 | Definition trace := list (nat * event). 431 | 432 | Inductive osview := 433 | | OS: forall (ps:processes) (km:memory) (tr:trace), osview. 434 | 435 | Definition osv_ps os := match os with OS ps _ _ => ps end. 436 | Definition osv_km os := match os with OS _ km _ => km end. 437 | Definition osv_tr os := match os with OS _ _ tr => tr end. 438 | 439 | 440 | Definition osv_equal os1 os2 := 441 | match os1, os2 with 442 | | OS ps1 km1 tr1, OS ps2 km2 tr2 => 443 | NatMap.Equal ps1 ps2 /\ km1 = km2 /\ tr1 = tr2 444 | end. 445 | 446 | Global Instance osv_equal_refl : Reflexive osv_equal. 447 | intro os1. (* unpack the [Reflexive] definition *) 448 | destruct os1; simpl; intuition. 449 | Qed. 450 | 451 | Global Instance osv_equal_sym : Symmetric osv_equal. 452 | intros os1 os2. 453 | destruct os1, os2; simpl; intuition. 454 | Qed. 455 | 456 | Global Instance osv_equal_trans : Transitive osv_equal. 457 | intros os1 os2 os3. 458 | destruct os1, os2, os3; simpl; intros; destruct_conjs; repeat split. 459 | - now transitivity ps0. 460 | - now subst. 461 | - now subst. 462 | Qed. 463 | 464 | Global Instance : Equivalence osv_equal. 465 | split; 466 | [ apply osv_equal_refl | apply osv_equal_sym | apply osv_equal_trans ]. 467 | Qed. 468 | 469 | End OSView. 470 | 471 | 472 | 473 | (** ** Trace facts 474 | 475 | Some helper functions and lemmas about traces. *) 476 | 477 | Section TraceFacts. 478 | Implicit Types tr:trace. 479 | Implicit Types pid:nat. 480 | 481 | (** Returns a trace that _only_ contains events from process [pid]. *) 482 | Definition trace_filter_pid pid tr : trace := 483 | filter (fun pr => fst pr =? pid) tr. 484 | 485 | (** Returns a trace that _strips out_ events from process [pid]. *) 486 | Definition trace_remove_pid pid tr : trace := 487 | filter (fun pr => negb (fst pr =? pid)) tr. 488 | 489 | (** [True] iff none of the events in [tr] are kernel writes. *) 490 | Definition trace_nowrites tr : Prop := 491 | Forall event_nonwrite (map snd tr). 492 | 493 | 494 | Lemma trace_filter_pid_app pid tr1 tr2: 495 | trace_filter_pid pid (tr1 ++ tr2) = 496 | trace_filter_pid pid tr1 ++ trace_filter_pid pid tr2. 497 | Proof. 498 | (* YOUR CODE HERE -- remember ListFacts above! *) 499 | Admitted. 500 | 501 | Lemma trace_remove_pid_app pid tr1 tr2: 502 | trace_remove_pid pid (tr1 ++ tr2) = 503 | trace_remove_pid pid tr1 ++ trace_remove_pid pid tr2. 504 | Proof. 505 | (* YOUR CODE HERE *) 506 | Admitted. 507 | 508 | Lemma trace_nowrites_app tr1 tr2: 509 | trace_nowrites (tr1 ++ tr2) <-> trace_nowrites tr1 /\ trace_nowrites tr2. 510 | Proof. 511 | (* YOUR CODE HERE *) 512 | Admitted. 513 | 514 | 515 | (** These lemmas relate the [trace_filter_pid] and [trace_remove_pid] 516 | operations to [map]s over event lists. *) 517 | Lemma trace_filter_pid_map_eq pid es: 518 | trace_filter_pid pid (map (pair pid) es) = map (pair pid) es. 519 | Proof. 520 | (* YOUR CODE HERE *) 521 | Admitted. 522 | 523 | Lemma trace_filter_pid_map_neq pid pid': 524 | pid <> pid' -> 525 | forall es, trace_filter_pid pid (map (pair pid') es) = []. 526 | Proof. 527 | (* YOUR CODE HERE *) 528 | Admitted. 529 | 530 | Lemma trace_remove_pid_map_neq pid pid': 531 | pid <> pid' -> 532 | forall es, trace_remove_pid pid (map (pair pid') es) = map (pair pid') es. 533 | Proof. 534 | (* YOUR CODE HERE *) 535 | Admitted. 536 | 537 | End TraceFacts. 538 | 539 | 540 | (** ** Operating system step relation 541 | 542 | We are now ready to state the [osstep1 : osview -> osview -> Prop] 543 | step relation: the OS as a whole takes a step when one of its component 544 | processes takes a step. And we define [ossteps], which is the reflexive, 545 | transitive closure of [osstep1]. 546 | 547 | Issues involving equality affect both of these definitions. In [OS_step], 548 | we don’t require that the new OS contain _exactly_ [NatMap.add pid p' ps]; 549 | instead, we require that it contain some _equivalent_ process map. 550 | Similarly, the [os_refl] rule applies to _equivalent_ OSs, not merely 551 | _equal_ ones. *) 552 | 553 | Section OSStep. 554 | 555 | Inductive osstep1 : relation osview := 556 | | OS_step : forall ps km tr ps' km' tr' pid p p', 557 | NatMap.MapsTo pid p ps -> 558 | procstep1 (PS p km []) (PS p' km' tr') -> 559 | NatMap.Equal ps' (NatMap.add pid p' ps) -> 560 | osstep1 (OS ps km tr) 561 | (OS ps' km' (tr ++ (map (pair pid) tr'))). 562 | 563 | Inductive ossteps : relation osview := 564 | | os_refl: forall os1 os2, 565 | osv_equal os1 os2 -> 566 | ossteps os1 os2 567 | | os_trans: forall os1 os2 os3, 568 | ossteps os1 os2 -> 569 | osstep1 os2 os3 -> 570 | ossteps os1 os3. 571 | 572 | End OSStep. 573 | 574 | 575 | (** ** Rewriting for [osstep1] and [ossteps] 576 | 577 | In this section you prove that [osstep1] and [ossteps] hold true up to 578 | the [osv_equal] equivalence relation. This lets us show the [Proper] 579 | instances that will let us [rewrite] the arguments of [osstep1] and 580 | [ossteps]. *) 581 | 582 | Section OSStepRewriting. 583 | 584 | Lemma osstep1_equal os1 os1' os2 os2': 585 | osv_equal os1 os1' -> 586 | osv_equal os2 os2' -> 587 | osstep1 os1 os2 -> 588 | osstep1 os1' os2'. 589 | Proof. 590 | (* YOUR CODE HERE *) 591 | Admitted. 592 | 593 | Global Instance: Proper (osv_equal ==> osv_equal ==> iff) osstep1. 594 | Proof. 595 | intros os1 os1' H1 os2 os2' H2. 596 | split; intros. 597 | - now apply osstep1_equal with (os1:=os1) (os2:=os2). 598 | - apply osstep1_equal with (os1:=os1') (os2:=os2'); intuition. 599 | Qed. 600 | 601 | 602 | Lemma ossteps_equal os1 os1' os2 os2': 603 | osv_equal os1 os1' -> 604 | osv_equal os2 os2' -> 605 | ossteps os1 os2 -> 606 | ossteps os1' os2'. 607 | Proof. 608 | (* YOUR CODE HERE *) 609 | Admitted. 610 | 611 | Global Instance: Proper (osv_equal ==> osv_equal ==> iff) ossteps. 612 | Proof. 613 | intros os1 os1' H1 os2 os2' H2. 614 | split; intros. 615 | - now apply ossteps_equal with (os1:=os1) (os2:=os2). 616 | - now apply ossteps_equal with (os1:=os1') (os2:=os2'). 617 | Qed. 618 | 619 | End OSStepRewriting. 620 | 621 | 622 | (** ** Process isolation 623 | 624 | You’re finally there: you’re going to prove two process isolation lemmas. 625 | 626 | The first theorem, [process_isolation], says that if a process takes no steps 627 | in some trace, then that process’s internal state remains unchanged. 628 | 629 | The second theorem, [write_isolation], says that for processes only communicate 630 | through [Write]s to the ramdisk. Specifically, we consider a trace in which 631 | no process but [p] writes to the ramdisk. Given that trace, we show that 632 | [p] could take the exact same steps, with the same results, in a different 633 | trace where _no other process takes any steps at all_. 634 | 635 | You can imagine many other theorems. For example, [read_isolation] would show 636 | that if a process performs no [Read]s from the ramdisk, then it could take 637 | the same steps in a different trace where no other process takes steps. 638 | Or [reverse_write_isolation] would show that _adding_ non-write actions to a 639 | trace containing [p] won’t affect [p]’s actions. *) 640 | 641 | Section ProcessIsolation. 642 | 643 | (* Helper lemma: processes never disappear. *) 644 | Lemma ossteps_In_ps pid os1 os2: 645 | NatMap.In pid (osv_ps os1) -> 646 | ossteps os1 os2 -> 647 | NatMap.In pid (osv_ps os2). 648 | Proof. 649 | (* YOUR CODE HERE *) 650 | Admitted. 651 | 652 | 653 | Theorem process_isolation os1 os2 pid p: 654 | ossteps os1 os2 -> 655 | NatMap.MapsTo pid p (osv_ps os1) -> 656 | osv_tr os1 = [] -> 657 | ~ In pid (map fst (osv_tr os2)) -> 658 | NatMap.MapsTo pid p (osv_ps os2). 659 | Proof. 660 | (* YOUR CODE HERE *) 661 | Admitted. 662 | 663 | 664 | Theorem write_isolation os1 os2 pid p p': 665 | ossteps os1 os2 -> 666 | NatMap.MapsTo pid p (osv_ps os1) -> 667 | NatMap.MapsTo pid p' (osv_ps os2) -> 668 | osv_tr os1 = [] -> 669 | trace_nowrites (trace_remove_pid pid (osv_tr os2)) -> 670 | ossteps os1 (OS (NatMap.add pid p' (osv_ps os1)) 671 | (osv_km os2) 672 | (trace_filter_pid pid (osv_tr os2))). 673 | Proof. 674 | (* YOUR CODE HERE *) 675 | Admitted. 676 | 677 | End ProcessIsolation. 678 | -------------------------------------------------------------------------------- /sep-sol.v: -------------------------------------------------------------------------------- 1 | Require Import Bool Arith List Omega. 2 | Require Import Recdef Morphisms. 3 | Require Import Program.Tactics. 4 | Require Import Relation_Operators. 5 | Require FMapList. 6 | Require FMapFacts. 7 | Require Import Classical. 8 | Require Import Coq.Classes.RelationClasses. 9 | Require Import OrderedType OrderedTypeEx DecidableType. 10 | Require Import Sorting.Permutation. 11 | Import ListNotations. 12 | 13 | Module WXFacts_fun (E:DecidableType) (Import Map:FMapInterface.WSfun E). 14 | Module MapF := FMapFacts.WFacts_fun E Map. 15 | Module MapProperties := FMapFacts.WProperties_fun E Map. 16 | Section XFacts. 17 | Notation eq_dec := E.eq_dec. 18 | Context {elt: Type}. 19 | Implicit Types m: t elt. 20 | Implicit Types x y z: key. 21 | Implicit Types e: elt. 22 | Notation Partition := MapProperties.Partition. 23 | Notation Disjoint := MapProperties.Disjoint. 24 | Notation update := MapProperties.update. 25 | 26 | 27 | Definition Submap m1 m2 := 28 | forall k e, MapsTo k e m1 -> MapsTo k e m2. 29 | 30 | Lemma Submap_in: 31 | forall {m1 m2}, Submap m1 m2 -> 32 | forall k, In k m1 -> In k m2. 33 | Proof. 34 | intros m1 m2 S k I. 35 | destruct I. 36 | exists x. 37 | now apply S. 38 | Qed. 39 | 40 | 41 | (* Pull in the library’s facts on Disjoint and Partition. *) 42 | Lemma Disjoint_alt: 43 | forall m m', Disjoint m m' <-> 44 | (forall k e e', MapsTo k e m -> 45 | MapsTo k e' m' -> 46 | False). 47 | Proof. 48 | apply MapProperties.Disjoint_alt. 49 | Qed. 50 | 51 | Lemma Disjoint_empty_r: 52 | forall {m}, Disjoint m (Map.empty elt). 53 | Proof. 54 | intros; rewrite Disjoint_alt; intros. 55 | now rewrite MapF.empty_mapsto_iff in H0. 56 | Qed. 57 | 58 | Lemma Disjoint_sym: 59 | forall {m1 m2}, Disjoint m1 m2 -> Disjoint m2 m1. 60 | Proof. 61 | apply MapProperties.Disjoint_sym. 62 | Qed. 63 | 64 | Lemma Disjoint_in_nin: 65 | forall {m1 m2}, Disjoint m1 m2 -> 66 | forall k, In k m1 -> ~ In k m2. 67 | Proof. 68 | intros m1 m2 D k I1 I2; apply (D k); intuition. 69 | Qed. 70 | 71 | Lemma Disjoint_mapsto_nin: 72 | forall {m1 m2}, Disjoint m1 m2 -> 73 | forall k e, MapsTo k e m1 -> ~ In k m2. 74 | Proof. 75 | intros m1 m2 D k e M1 I2; apply (D k); intuition; now exists e. 76 | Qed. 77 | 78 | Lemma Disjoint_submap_r: 79 | forall m1 m2 m3, Disjoint m1 m2 -> 80 | Submap m3 m2 -> Disjoint m1 m3. 81 | Proof. 82 | intros m1 m2 m3 D S k I; destruct I as [I1 I2]. 83 | apply (Submap_in S) in I2; now apply (D k). 84 | Qed. 85 | 86 | 87 | Lemma update_in_iff: 88 | forall m1 m2 k, In k (update m1 m2) <-> In k m1 \/ In k m2. 89 | Proof. 90 | apply MapProperties.update_in_iff. 91 | Qed. 92 | 93 | Lemma update_mapsto_iff: 94 | forall m1 m2 k e, MapsTo k e (update m1 m2) <-> 95 | (MapsTo k e m2 \/ 96 | (MapsTo k e m1 /\ ~ In k m2)). 97 | Proof. 98 | apply MapProperties.update_mapsto_iff. 99 | Qed. 100 | 101 | Lemma disjoint_update_mapsto_iff: 102 | forall {m1 m2}, Disjoint m1 m2 -> 103 | forall k e, MapsTo k e (update m1 m2) <-> 104 | MapsTo k e m1 \/ MapsTo k e m2. 105 | Proof. 106 | intros m1 m2 D k e; rewrite update_mapsto_iff. 107 | generalize (Disjoint_mapsto_nin D k e); intros G; intuition. 108 | Qed. 109 | 110 | Lemma disjoint_update_comm: 111 | forall {m1 m2}, Disjoint m1 m2 -> 112 | Map.Equal (update m1 m2) (update m2 m1). 113 | Proof. 114 | intros m1 m2 D; rewrite MapF.Equal_mapsto_iff; intros. 115 | rewrite (disjoint_update_mapsto_iff D). 116 | rewrite (disjoint_update_mapsto_iff (Disjoint_sym D)). 117 | intuition. 118 | Qed. 119 | 120 | Lemma update_submap_r: 121 | forall m1 m2, Submap m2 (update m1 m2). 122 | Proof. 123 | intros m1 m2 k e M; apply update_mapsto_iff; now left. 124 | Qed. 125 | 126 | Lemma disjoint_update_submap_l: 127 | forall {m1 m2}, Disjoint m1 m2 -> 128 | Submap m1 (update m1 m2). 129 | Proof. 130 | intros m1 m2 D k e M; apply disjoint_update_mapsto_iff; intuition. 131 | Qed. 132 | 133 | 134 | Lemma Partition_disjoint: 135 | forall {m m1 m2}, Partition m m1 m2 -> Disjoint m1 m2. 136 | Proof. 137 | unfold Partition; intuition. 138 | Qed. 139 | 140 | Lemma Partition_mapsto_iff: 141 | forall {m m1 m2}, Partition m m1 m2 -> 142 | forall k e, MapsTo k e m <-> 143 | MapsTo k e m1 \/ MapsTo k e m2. 144 | Proof. 145 | unfold Partition; intuition. 146 | Qed. 147 | 148 | Lemma Partition_mapsto_l: 149 | forall {m m1 m2}, Partition m m1 m2 -> 150 | forall k e, MapsTo k e m1 -> MapsTo k e m. 151 | Proof. 152 | intros; rewrite (Partition_mapsto_iff H); intuition. 153 | Qed. 154 | 155 | Lemma Partition_mapsto_r: 156 | forall {m m1 m2}, Partition m m1 m2 -> 157 | forall k e, MapsTo k e m2 -> MapsTo k e m. 158 | Proof. 159 | intros; rewrite (Partition_mapsto_iff H); intuition. 160 | Qed. 161 | 162 | Lemma Partition_submap_l: 163 | forall {m m1 m2}, Partition m m1 m2 -> Submap m1 m. 164 | Proof. 165 | intros m m1 m2 P k e M; rewrite (Partition_mapsto_iff P); intuition. 166 | Qed. 167 | 168 | Lemma Partition_submap_r: 169 | forall {m m1 m2}, Partition m m1 m2 -> Submap m2 m. 170 | Proof. 171 | intros m m1 m2 P k e M; rewrite (Partition_mapsto_iff P); intuition. 172 | Qed. 173 | 174 | Lemma Partition_in_iff: 175 | forall {m m1 m2}, Partition m m1 m2 -> 176 | forall k, In k m <-> In k m1 \/ In k m2. 177 | Proof. 178 | intros; generalize (Partition_mapsto_iff H); split; intros. 179 | - destruct H1; rewrite H0 in H1; destruct or H1; 180 | [ left | right ]; now exists x. 181 | - destruct or H1; destruct H1; exists x; rewrite H0; intuition. 182 | Qed. 183 | 184 | Lemma Partition_in_l: 185 | forall {m m1 m2}, Partition m m1 m2 -> 186 | forall k, In k m1 -> In k m. 187 | Proof. 188 | intros; rewrite (Partition_in_iff H); intuition. 189 | Qed. 190 | 191 | Lemma Partition_in_r: 192 | forall {m m1 m2}, Partition m m1 m2 -> 193 | forall k, In k m2 -> In k m. 194 | Proof. 195 | intros; rewrite (Partition_in_iff H); intuition. 196 | Qed. 197 | 198 | 199 | Lemma Partition_refl: 200 | forall m, Partition m m (Map.empty elt). 201 | Proof. 202 | intros; unfold Partition; split. 203 | - apply Disjoint_empty_r. 204 | - intros; rewrite MapF.empty_mapsto_iff; intuition. 205 | Qed. 206 | 207 | Lemma Partition_sym: 208 | forall m m1 m2, Partition m m1 m2 -> Partition m m2 m1. 209 | Proof. 210 | apply MapProperties.Partition_sym. 211 | Qed. 212 | 213 | Lemma Partition_empty_r: 214 | forall m m', Partition m m' (Map.empty elt) -> 215 | Map.Equal m m'. 216 | Proof. 217 | intros; apply MapF.Equal_mapsto_iff; intros. 218 | unfold Partition in *; destruct_conjs. 219 | generalize (H0 k e); rewrite MapF.empty_mapsto_iff; intuition. 220 | Qed. 221 | 222 | Lemma Partition_update: 223 | forall m m1 m2, Partition m m1 m2 -> 224 | Map.Equal m (update m1 m2). 225 | Proof. 226 | intros; unfold MapProperties.Partition in *; destruct_conjs. 227 | apply MapF.Equal_mapsto_iff; intros. 228 | rewrite H0. 229 | rewrite (disjoint_update_mapsto_iff H). 230 | intuition. 231 | Qed. 232 | 233 | Lemma disjoint_update_partition: 234 | forall m1 m2, Disjoint m1 m2 -> 235 | Partition (update m1 m2) m1 m2. 236 | Proof. 237 | intros; unfold Partition; split; auto; intros. 238 | rewrite (disjoint_update_mapsto_iff H); intuition. 239 | Qed. 240 | 241 | Lemma Partition_assoc: 242 | forall m m1 m2 m2a m2b, 243 | Partition m m1 m2 -> 244 | Partition m2 m2a m2b -> 245 | Partition m (update m1 m2a) m2b. 246 | Proof. 247 | intros; unfold Partition; split. 248 | - unfold Disjoint; intros k I; destruct I as [Ia Ib]. 249 | apply update_in_iff in Ia; destruct or Ia. 250 | + apply (Partition_disjoint H k). 251 | now apply (Partition_in_r H0) in Ib. 252 | + now apply (Partition_disjoint H0 k). 253 | - intros; rewrite disjoint_update_mapsto_iff. 254 | rewrite (Partition_mapsto_iff H). 255 | rewrite (Partition_mapsto_iff H0). 256 | split; intuition. 257 | apply Disjoint_submap_r with (m2:=m2). 258 | apply (Partition_disjoint H). 259 | apply (Partition_submap_l H0). 260 | Qed. 261 | 262 | Lemma Partition_add_1: 263 | forall m m1 m2 k v v1, 264 | Partition m (Map.add k v1 m1) m2 -> 265 | Partition (Map.add k v m) (Map.add k v m1) m2. 266 | Proof. 267 | intros m m1 m2 k v v1 P. 268 | unfold Partition; split. 269 | - intros kk I; destruct I; apply (Partition_disjoint P kk). 270 | rewrite MapF.add_in_iff in *; intuition. 271 | - intros kk e. 272 | rewrite MapF.add_mapsto_iff. 273 | rewrite (Partition_mapsto_iff P). 274 | repeat rewrite MapF.add_mapsto_iff. 275 | intuition. 276 | right; split; intuition. 277 | apply (Partition_disjoint P kk). 278 | rewrite H in *; rewrite MapF.add_in_iff; intuition; now exists e. 279 | Qed. 280 | 281 | End XFacts. 282 | End WXFacts_fun. 283 | 284 | 285 | Module Separation. 286 | Definition ptr := Z. 287 | Definition ptr_eq := Z.eq_dec. 288 | Definition value := Z. 289 | Implicit Types v: value. 290 | 291 | Module Heap := FMapList.Make Z_as_OT. 292 | Module HeapF := FMapFacts.WFacts_fun Z_as_OT Heap. 293 | Module HeapP := FMapFacts.WProperties_fun Z_as_OT Heap. 294 | Module HeapX := WXFacts_fun Z_as_OT Heap. 295 | 296 | Definition heap := Heap.t value. 297 | Implicit Types h : heap. 298 | Definition empty_heap := Heap.empty value. 299 | 300 | Notation heap_Equal := Heap.Equal. 301 | 302 | 303 | (* Assertions, aka heap propositions *) 304 | Definition weak_assertion := heap -> Prop. 305 | 306 | Definition assertion_wf (wa:weak_assertion) := 307 | forall h1 h2, 308 | heap_Equal h1 h2 -> wa h1 -> wa h2. 309 | 310 | Lemma assertion_wf_iff: 311 | forall wa:weak_assertion, 312 | forall wawf:assertion_wf wa, 313 | forall h1 h2, 314 | heap_Equal h1 h2 -> wa h1 <-> wa h2. 315 | Proof. 316 | split; intros; unfold assertion_wf in *. 317 | - now apply wawf with (h1:=h1). 318 | - apply wawf with (h1:=h2); [ symmetry | ]; auto. 319 | Qed. 320 | 321 | Inductive assertion : Type := 322 | | Assert : forall wa:weak_assertion, 323 | forall wawf:assertion_wf wa, 324 | assertion. 325 | Hint Constructors assertion. 326 | 327 | Definition asserts : assertion -> heap -> Prop := 328 | fun a h => 329 | match a with 330 | | Assert wa _ => wa h 331 | end. 332 | 333 | Definition assertion_imp : assertion -> assertion -> Prop := 334 | fun a1 a2 => 335 | forall h, asserts a1 h -> asserts a2 h. 336 | 337 | Definition assertion_iff : assertion -> assertion -> Prop := 338 | fun a1 a2 => 339 | forall h, asserts a1 h <-> asserts a2 h. 340 | 341 | Infix "===>" := assertion_imp (at level 90) : sep_scope. 342 | Infix "<===>" := assertion_iff (at level 90) : sep_scope. 343 | Local Open Scope sep_scope. 344 | 345 | Global Instance: Proper (eq ==> heap_Equal ==> iff) asserts. 346 | Proof. 347 | intros a1 a2 aeq h1 h2 heq. 348 | subst; destruct a2. 349 | unfold asserts; split; intros. 350 | - now apply (wawf _ _ heq). 351 | - symmetry in heq; now apply (wawf _ _ heq). 352 | Qed. 353 | 354 | 355 | 356 | Definition emp : assertion. 357 | refine (Assert (fun h => heap_Equal h empty_heap) _). 358 | unfold assertion_wf; intros. 359 | now rewrite <- H. 360 | Defined. 361 | 362 | Definition pointsto : ptr -> value -> assertion. 363 | intros p v. 364 | refine (Assert (fun h => heap_Equal h 365 | (Heap.add p v empty_heap)) _). 366 | unfold assertion_wf; intros; now rewrite <- H. 367 | Defined. 368 | Infix "|>" := pointsto (no associativity, at level 75) : sep_scope. 369 | 370 | Definition points : ptr -> assertion. 371 | intros p. 372 | refine (Assert (fun h => exists v, 373 | heap_Equal h (Heap.add p v empty_heap)) _). 374 | unfold assertion_wf; intros. 375 | destruct H0; exists x; now rewrite <- H. 376 | Defined. 377 | Notation "p |>?" := (points p) (no associativity, at level 75) : sep_scope. 378 | 379 | Definition weak_star : assertion -> assertion -> 380 | weak_assertion := 381 | fun a1 a2 => 382 | fun h => 383 | exists h1 h2, HeapP.Partition h h1 h2 /\ 384 | asserts a1 h1 /\ 385 | asserts a2 h2. 386 | 387 | Definition star : assertion -> assertion -> 388 | assertion. 389 | intros a1 a2. 390 | refine (Assert (weak_star a1 a2) _). 391 | unfold assertion_wf; unfold weak_star; intros; destruct_conjs. 392 | exists H0, H1; rewrite <- H; intuition. 393 | Defined. 394 | Infix "**" := star (right associativity, at level 80) : sep_scope. 395 | 396 | Definition heq : heap -> assertion. 397 | intros h. 398 | refine (Assert (fun h' => Heap.Equal h h') _). 399 | unfold assertion_wf; intros; transitivity h1; auto. 400 | Defined. 401 | 402 | Definition weak_imp : assertion -> assertion -> 403 | weak_assertion := 404 | fun a1 a2 => 405 | fun h => asserts a1 h -> asserts a2 h. 406 | 407 | Definition imp : assertion -> assertion -> 408 | assertion. 409 | intros a1 a2. 410 | refine (Assert (weak_imp a1 a2) _). 411 | unfold assertion_wf. unfold weak_imp. intros. 412 | rewrite H in *. auto. 413 | Qed. 414 | 415 | Definition weak_magic_wand : assertion -> assertion -> 416 | weak_assertion := 417 | fun a1 a2 => 418 | fun h => 419 | forall h', HeapP.Disjoint h h' -> 420 | asserts a1 h' -> 421 | asserts a2 (HeapP.update h h'). 422 | 423 | Definition magic_wand : assertion -> assertion -> 424 | assertion. 425 | intros a1 a2. 426 | refine (Assert (weak_magic_wand a1 a2) _). 427 | unfold assertion_wf. unfold weak_magic_wand. intros. 428 | assert (HeapP.Disjoint h1 h') by (now rewrite H). 429 | apply (H0 _ H3) in H2. 430 | assert (heap_Equal (HeapP.update h2 h') (HeapP.update h1 h')) 431 | by (now rewrite H). 432 | now rewrite H4. 433 | Qed. 434 | 435 | 436 | Lemma emp_empty_heap: 437 | asserts emp empty_heap. 438 | Proof. 439 | unfold asserts; unfold emp; reflexivity. 440 | Qed. 441 | 442 | Lemma emp_equals_iff h: 443 | asserts emp h <-> heap_Equal h empty_heap. 444 | split; intros. 445 | - auto. 446 | - rewrite H; apply emp_empty_heap. 447 | Qed. 448 | 449 | Lemma heq_equals_iff h1 h: 450 | asserts (heq h1) h <-> heap_Equal h1 h. 451 | Proof. 452 | split; intros. 453 | - auto. 454 | - unfold asserts; unfold heq; auto. 455 | Qed. 456 | 457 | Lemma pointsto_equals_iff a v h: 458 | asserts (pointsto a v) h <-> 459 | heap_Equal h (Heap.add a v empty_heap). 460 | Proof. 461 | split; intros. 462 | - auto. 463 | - unfold asserts; unfold pointsto; auto. 464 | Qed. 465 | 466 | Lemma points_equals_iff a h: 467 | asserts (points a) h <-> exists v, heap_Equal h (Heap.add a v empty_heap). 468 | Proof. 469 | split; intros. 470 | - auto. 471 | - unfold asserts; unfold points; auto. 472 | Qed. 473 | 474 | Lemma star_iff : 475 | forall a1 a2 h, asserts (a1 ** a2) h <-> 476 | exists h1 h2, HeapP.Partition h h1 h2 477 | /\ asserts a1 h1 /\ asserts a2 h2. 478 | Proof. 479 | unfold asserts at 1; unfold star; unfold weak_star; intuition. 480 | Qed. 481 | 482 | Ltac destruct_star := 483 | match goal with 484 | | [ |- context [ asserts (?a1 ** ?a2) ?h ] ] => rewrite (star_iff a1 a2 h) 485 | end. 486 | 487 | Ltac destruct_star_in H := 488 | match goal with 489 | | [ H : context [ asserts (?a1 ** ?a2) ?h ] |- _ ] => rewrite (star_iff a1 a2) in H 490 | end. 491 | 492 | Lemma add_emp_r : 493 | forall a, a <===> a ** emp. 494 | Proof. 495 | split; intros. 496 | - destruct_star. 497 | exists h, empty_heap; split; [ | split]. 498 | + apply HeapX.Partition_refl. 499 | + auto. 500 | + apply emp_empty_heap. 501 | - destruct_star_in H. 502 | destruct_conjs. 503 | rewrite emp_equals_iff in H3. 504 | rewrite H3 in H1. 505 | apply HeapX.Partition_empty_r in H1. 506 | now rewrite H1. 507 | Qed. 508 | 509 | Lemma star_comm : 510 | forall a1 a2, a1 ** a2 <===> a2 ** a1. 511 | Proof. 512 | intros a1 a2 h; split; intros I. 513 | all: rewrite star_iff in *; destruct_conjs. 514 | all: exists H, I; intuition. 515 | all: now apply HeapX.Partition_sym. 516 | Qed. 517 | 518 | Lemma star_assoc : 519 | forall a1 a2 a3 h, asserts (a1 ** a2 ** a3) h <-> 520 | asserts ((a1 ** a2) ** a3) h. 521 | Proof. 522 | split; intros. 523 | - destruct_star_in H; destruct_conjs. 524 | destruct_star_in H3; destruct_conjs. 525 | destruct_star; exists (HeapP.update H H3), H4. 526 | split; [ | split ]. 527 | + now apply HeapX.Partition_assoc with (m2:=H0). 528 | + destruct_star; exists H, H3; split; auto. 529 | apply HeapX.disjoint_update_partition. 530 | unfold HeapP.Partition in *; destruct_conjs. 531 | rewrite HeapP.Disjoint_alt in *; intros. 532 | assert (Heap.MapsTo k e' H0). rewrite H8; now left. 533 | apply (H1 _ _ _ H10 H12). 534 | + auto. 535 | - destruct_star_in H; destruct_conjs. 536 | destruct_star_in H2; destruct_conjs. 537 | destruct_star; exists H2, (HeapP.update H4 H0). 538 | assert (HeapP.Disjoint H0 H4). { 539 | unfold HeapP.Partition in *; destruct_conjs. 540 | intros k I; destruct_conjs. 541 | destruct H10, H11. 542 | assert (Heap.MapsTo k x0 H). 543 | rewrite H8; now right. 544 | apply (H1 k); split; [now exists x0 | now exists x]. 545 | } 546 | split; [ | split ]. 547 | + apply HeapP.Partition_sym. 548 | rewrite HeapX.disjoint_update_comm. 549 | apply HeapX.Partition_assoc with (m2:=H). 550 | now apply HeapP.Partition_sym. 551 | now apply HeapP.Partition_sym. 552 | now apply HeapP.Disjoint_sym. 553 | + auto. 554 | + destruct_star; exists H4, H0; split; [ | split ]; auto. 555 | apply HeapX.disjoint_update_partition. 556 | now apply HeapP.Disjoint_sym. 557 | Qed. 558 | 559 | Lemma star_imp: 560 | forall {a1 a2}, a1 ===> a2 -> 561 | forall x, a1 ** x ===> a2 ** x. 562 | Proof. 563 | intros a1 a2 I x h L. 564 | unfold asserts in *; unfold star in *; unfold weak_star in *. 565 | destruct_conjs. 566 | apply I in H1. 567 | exists L, H; intuition. 568 | Qed. 569 | 570 | Lemma pointsto_find: 571 | forall {p v h}, asserts (p |> v) h -> Heap.find p h = Some v. 572 | Proof. 573 | intros p v h A. 574 | unfold asserts in *; unfold pointsto in *. 575 | rewrite A. 576 | apply Heap.find_1; now apply Heap.add_1. 577 | Qed. 578 | 579 | Lemma star_pointsto_find: 580 | forall {p v a h}, asserts (p |> v ** a) h -> Heap.find p h = Some v. 581 | Proof. 582 | intros p v a h A. 583 | rewrite star_iff in A; destruct_conjs. 584 | apply pointsto_find in H1. 585 | apply Heap.find_1; apply (HeapX.Partition_mapsto_l H0); now apply Heap.find_2. 586 | Qed. 587 | 588 | Lemma points_find: 589 | forall {p h}, asserts (p |>?) h -> exists v, Heap.find p h = Some v. 590 | Proof. 591 | intros p h A. 592 | unfold asserts in *; unfold points in *. 593 | destruct A. 594 | exists x; rewrite H; apply Heap.find_1; now apply Heap.add_1. 595 | Qed. 596 | 597 | Lemma star_points_find: 598 | forall {p a h}, asserts (p |>? ** a) h -> exists v, Heap.find p h = Some v. 599 | Proof. 600 | intros p a h A. 601 | rewrite star_iff in A; destruct_conjs. 602 | apply points_find in H1; destruct H1; exists x. 603 | apply Heap.find_1; apply (HeapX.Partition_mapsto_l H0); now apply Heap.find_2. 604 | Qed. 605 | 606 | 607 | 608 | 609 | Definition Cmd := heap -> option heap. 610 | 611 | Definition sep_triple (p:assertion) (c:Cmd) (q:assertion) := 612 | forall rest h, 613 | asserts (p ** heq rest) h -> 614 | exists h', c h = Some h' /\ asserts (q ** heq rest) h'. (* NB stuff about termination *) 615 | Notation "{{ P }} c {{ Q }}" := (sep_triple P c Q) (at level 90) : sep_scope. 616 | 617 | 618 | Lemma consequence: 619 | forall p p' c q q', 620 | {{p}} c {{q}} -> 621 | p' ===> p -> 622 | q ===> q' -> 623 | {{p'}} c {{q'}}. 624 | Proof. 625 | unfold sep_triple; intros. 626 | apply star_imp with (x:=heq rest) in H0. 627 | apply star_imp with (x:=heq rest) in H1. 628 | apply H0 in H2. 629 | apply H in H2. 630 | destruct_conjs. 631 | apply H1 in H4. 632 | exists H2; intuition. 633 | Qed. 634 | 635 | 636 | Lemma frame: 637 | forall p c q r, 638 | {{p}} c {{q}} -> 639 | {{p ** r}} c {{q ** r}}. 640 | Proof. 641 | unfold sep_triple; intros. 642 | rewrite <- star_assoc in H0. 643 | rewrite star_iff in H0. 644 | destruct_conjs. 645 | assert (asserts (p ** heq H1) h). 646 | rewrite star_iff. 647 | exists H0, H1; intuition. 648 | unfold asserts; unfold heq; reflexivity. 649 | apply H in H5. 650 | destruct_conjs. 651 | exists H5. 652 | intuition. 653 | rewrite <- star_assoc. 654 | rewrite star_iff in H7; destruct_conjs. 655 | rewrite star_iff. 656 | exists H7, H1. 657 | rewrite heq_equals_iff in H11. 658 | repeat rewrite H11 in *. 659 | intuition. 660 | Qed. 661 | 662 | 663 | Inductive expr := 664 | | Ev : Z -> expr 665 | | Ebin : expr -> expr -> (Z -> Z -> Z) -> expr 666 | | Eread : expr -> expr 667 | | Ewrite : expr -> expr -> expr. 668 | 669 | Fixpoint estepf (e:expr) (h:heap) : option (expr * heap) := 670 | match e with 671 | | Ebin (Ev v1) (Ev v2) op => Some (Ev (op v1 v2), h) 672 | | Ebin (Ev v1) e2 op => match estepf e2 h with 673 | | Some (e2',h') => Some (Ebin (Ev v1) e2' op, h') 674 | | _ => None 675 | end 676 | | Ebin e1 e2 op => match estepf e1 h with 677 | | Some (e1',h') => Some (Ebin e1' e2 op, h') 678 | | _ => None 679 | end 680 | | Eread (Ev a) => match Heap.find a h with 681 | | Some v => Some (Ev v, h) 682 | | _ => None 683 | end 684 | | Eread e => match estepf e h with 685 | | Some (e',h') => Some (Eread e', h') 686 | | _ => None 687 | end 688 | | Ewrite (Ev a) (Ev v) => match Heap.find a h with 689 | | Some _ => Some (Ev v, Heap.add a v h) 690 | | _ => None 691 | end 692 | | Ewrite (Ev a) e => match estepf e h with 693 | | Some (e',h') => Some (Ewrite (Ev a) e', h') 694 | | _ => None 695 | end 696 | | Ewrite a e => match estepf a h with 697 | | Some (a',h') => Some (Ewrite a' e, h') 698 | | _ => None 699 | end 700 | | _ => None 701 | end. 702 | 703 | Definition ecmd : expr -> Cmd := 704 | fun e h => 705 | match estepf e h with 706 | | Some (e', h') => Some h' 707 | | _ => None 708 | end. 709 | Hint Resolve ecmd. 710 | 711 | Lemma ecmd_estepf_iff e h h': 712 | ecmd e h = Some h' <-> exists e', estepf e h = Some (e', h'). 713 | Proof. 714 | split; intros. 715 | unfold ecmd in *; remember (estepf e h) as s; destruct s. 716 | destruct p; exists e0; inversion H; intuition. 717 | discriminate. 718 | unfold ecmd in *; destruct H as [e' H]; rewrite H; auto. 719 | Qed. 720 | 721 | 722 | Lemma readv_tc: 723 | forall a v, 724 | {{ a |> v }} ecmd (Eread (Ev a)) {{ a |> v }}. 725 | Proof. 726 | intros a v rest h A; exists h. 727 | unfold ecmd; simpl. 728 | rewrite (star_pointsto_find A). 729 | unfold asserts in A; unfold star in A. 730 | intuition. 731 | Qed. 732 | 733 | Lemma writev_tc: 734 | forall a v, 735 | {{ a |>? }} ecmd (Ewrite (Ev a) (Ev v)) {{ a |> v }}. 736 | intros a v rest h A; exists (Heap.add a v h). 737 | unfold ecmd; simpl. 738 | generalize (star_points_find A); intros G; destruct G. 739 | rewrite H; intuition. 740 | exists (Heap.add a v empty_heap), rest; split; [ | split ]. 741 | - rewrite star_iff in A; destruct_conjs. 742 | rewrite points_equals_iff in H2; destruct H2 as [xx H2]; rewrite H2 in H1. 743 | apply HeapX.Partition_add_1 with (v1:=xx). 744 | rewrite heq_equals_iff in H3; now rewrite H3. 745 | - rewrite pointsto_equals_iff; reflexivity. 746 | - rewrite heq_equals_iff; reflexivity. 747 | Qed. 748 | 749 | Lemma reads_tc: 750 | forall a1 a2 e1, 751 | {{a1}} ecmd e1 {{a2}} -> 752 | {{a1}} ecmd (Eread e1) {{a2}}. 753 | Proof. 754 | intros a1 a2 e1 H rest h A. 755 | generalize (H rest h A); intro G; destruct G as [hg [G1 G2]]. 756 | exists hg; split; auto. 757 | rewrite ecmd_estepf_iff in G1; destruct G1 as [e' G1]. 758 | destruct e1; try discriminate. 759 | all: rewrite ecmd_estepf_iff; exists (Eread e'). 760 | all: simpl in *; rewrite G1; auto. 761 | Qed. 762 | 763 | End Separation. 764 | -------------------------------------------------------------------------------- /sep.v: -------------------------------------------------------------------------------- 1 | Require Import Bool Arith List Omega. 2 | Require Import Recdef Morphisms. 3 | Require Import Program.Tactics. 4 | Require Import Relation_Operators. 5 | Require FMapList. 6 | Require FMapFacts. 7 | Require Import Classical. 8 | Require Import Coq.Classes.RelationClasses. 9 | Require Import OrderedType OrderedTypeEx DecidableType. 10 | Require Import Sorting.Permutation. 11 | Import ListNotations. 12 | 13 | Module WXFacts_fun (E:DecidableType) (Import Map:FMapInterface.WSfun E). 14 | Module MapF := FMapFacts.WFacts_fun E Map. 15 | Module MapProperties := FMapFacts.WProperties_fun E Map. 16 | Section XFacts. 17 | Notation eq_dec := E.eq_dec. 18 | Context {elt: Type}. 19 | Implicit Types m: t elt. 20 | Implicit Types x y z: key. 21 | Implicit Types e: elt. 22 | Notation Partition := MapProperties.Partition. 23 | Notation Disjoint := MapProperties.Disjoint. 24 | Notation update := MapProperties.update. 25 | 26 | 27 | Definition Submap m1 m2 := 28 | forall k e, MapsTo k e m1 -> MapsTo k e m2. 29 | 30 | Lemma Submap_in: 31 | forall {m1 m2}, Submap m1 m2 -> 32 | forall k, In k m1 -> In k m2. 33 | Admitted. 34 | 35 | 36 | (* Pull in the library’s facts on Disjoint and Partition. *) 37 | Lemma Disjoint_alt: 38 | forall m m', Disjoint m m' <-> 39 | (forall k e e', MapsTo k e m -> MapsTo k e' m' -> False). 40 | Admitted. 41 | 42 | Lemma Disjoint_empty_r: 43 | forall {m}, Disjoint m (Map.empty elt). 44 | Admitted. 45 | 46 | Lemma Disjoint_sym: 47 | forall {m1 m2}, Disjoint m1 m2 -> Disjoint m2 m1. 48 | Admitted. 49 | 50 | Lemma Disjoint_in_nin: 51 | forall {m1 m2}, Disjoint m1 m2 -> 52 | forall k, In k m1 -> ~ In k m2. 53 | Admitted. 54 | 55 | Lemma Disjoint_mapsto_nin: 56 | forall {m1 m2}, Disjoint m1 m2 -> 57 | forall k e, MapsTo k e m1 -> ~ In k m2. 58 | Admitted. 59 | 60 | Lemma Disjoint_submap_r: 61 | forall m1 m2 m3, Disjoint m1 m2 -> 62 | Submap m3 m2 -> Disjoint m1 m3. 63 | Admitted. 64 | 65 | 66 | Lemma update_in_iff: 67 | forall m1 m2 k, In k (update m1 m2) <-> In k m1 \/ In k m2. 68 | Admitted. 69 | 70 | Lemma update_mapsto_iff: 71 | forall m1 m2 k e, MapsTo k e (update m1 m2) <-> 72 | (MapsTo k e m2 \/ (MapsTo k e m1 /\ ~ In k m2)). 73 | Admitted. 74 | 75 | Lemma disjoint_update_mapsto_iff: 76 | forall {m1 m2}, Disjoint m1 m2 -> 77 | forall k e, MapsTo k e (update m1 m2) <-> 78 | MapsTo k e m1 \/ MapsTo k e m2. 79 | Admitted. 80 | 81 | Lemma disjoint_update_comm: 82 | forall {m1 m2}, Disjoint m1 m2 -> 83 | Map.Equal (update m1 m2) (update m2 m1). 84 | Admitted. 85 | 86 | Lemma update_submap_r: 87 | forall m1 m2, Submap m2 (update m1 m2). 88 | Admitted. 89 | 90 | Lemma disjoint_update_submap_l: 91 | forall {m1 m2}, Disjoint m1 m2 -> 92 | Submap m1 (update m1 m2). 93 | Admitted. 94 | 95 | 96 | Lemma Partition_disjoint: 97 | forall {m m1 m2}, Partition m m1 m2 -> Disjoint m1 m2. 98 | Admitted. 99 | 100 | Lemma Partition_mapsto_iff: 101 | forall {m m1 m2}, Partition m m1 m2 -> 102 | forall k e, MapsTo k e m <-> 103 | MapsTo k e m1 \/ MapsTo k e m2. 104 | Admitted. 105 | 106 | Lemma Partition_mapsto_l: 107 | forall {m m1 m2}, Partition m m1 m2 -> 108 | forall k e, MapsTo k e m1 -> MapsTo k e m. 109 | Admitted. 110 | 111 | Lemma Partition_mapsto_r: 112 | forall {m m1 m2}, Partition m m1 m2 -> 113 | forall k e, MapsTo k e m2 -> MapsTo k e m. 114 | Admitted. 115 | 116 | Lemma Partition_submap_l: 117 | forall {m m1 m2}, Partition m m1 m2 -> Submap m1 m. 118 | Admitted. 119 | 120 | Lemma Partition_submap_r: 121 | forall {m m1 m2}, Partition m m1 m2 -> Submap m2 m. 122 | Admitted. 123 | 124 | Lemma Partition_in_iff: 125 | forall {m m1 m2}, Partition m m1 m2 -> 126 | forall k, In k m <-> In k m1 \/ In k m2. 127 | Admitted. 128 | 129 | Lemma Partition_in_l: 130 | forall {m m1 m2}, Partition m m1 m2 -> 131 | forall k, In k m1 -> In k m. 132 | Admitted. 133 | 134 | Lemma Partition_in_r: 135 | forall {m m1 m2}, Partition m m1 m2 -> 136 | forall k, In k m2 -> In k m. 137 | Admitted. 138 | 139 | Lemma Partition_refl: 140 | forall m, Partition m m (Map.empty elt). 141 | Admitted. 142 | 143 | Lemma Partition_sym: 144 | forall m m1 m2, Partition m m1 m2 -> Partition m m2 m1. 145 | Admitted. 146 | 147 | Lemma Partition_empty_r: 148 | forall m m', Partition m m' (Map.empty elt) -> Map.Equal m m'. 149 | Admitted. 150 | 151 | Lemma Partition_update: 152 | forall m m1 m2, Partition m m1 m2 -> Map.Equal m (update m1 m2). 153 | Admitted. 154 | 155 | Lemma disjoint_update_partition: 156 | forall m1 m2, Disjoint m1 m2 -> Partition (update m1 m2) m1 m2. 157 | Admitted. 158 | 159 | Lemma Partition_assoc: 160 | forall m m1 m2 m2a m2b, 161 | Partition m m1 m2 -> 162 | Partition m2 m2a m2b -> 163 | Partition m (update m1 m2a) m2b. 164 | Admitted. 165 | 166 | Lemma Partition_add_1: 167 | forall m m1 m2 k v v1, 168 | Partition m (Map.add k v1 m1) m2 -> 169 | Partition (Map.add k v m) (Map.add k v m1) m2. 170 | Admitted. 171 | 172 | End XFacts. 173 | End WXFacts_fun. 174 | 175 | 176 | Module Separation. 177 | Definition ptr := Z. 178 | Definition ptr_eq := Z.eq_dec. 179 | Definition value := Z. 180 | Implicit Types v: value. 181 | 182 | Module Heap := FMapList.Make Z_as_OT. 183 | Module HeapF := FMapFacts.WFacts_fun Z_as_OT Heap. 184 | Module HeapP := FMapFacts.WProperties_fun Z_as_OT Heap. 185 | Module HeapX := WXFacts_fun Z_as_OT Heap. 186 | 187 | Definition heap := Heap.t value. 188 | Implicit Types h : heap. 189 | Definition empty_heap := Heap.empty value. 190 | 191 | Notation heap_Equal := Heap.Equal. 192 | 193 | 194 | (* Assertions, aka heap propositions *) 195 | Definition weak_assertion := heap -> Prop. 196 | 197 | Definition assertion_wf (wa:weak_assertion) := 198 | forall h1 h2, 199 | heap_Equal h1 h2 -> wa h1 -> wa h2. 200 | 201 | Lemma assertion_wf_iff: 202 | forall wa:weak_assertion, 203 | forall wawf:assertion_wf wa, 204 | forall h1 h2, 205 | heap_Equal h1 h2 -> wa h1 <-> wa h2. 206 | Proof. 207 | split; intros; unfold assertion_wf in *. 208 | - now apply wawf with (h1:=h1). 209 | - apply wawf with (h1:=h2); [ symmetry | ]; auto. 210 | Qed. 211 | 212 | Inductive assertion : Type := 213 | | Assert : forall wa:weak_assertion, 214 | forall wawf:assertion_wf wa, 215 | assertion. 216 | Hint Constructors assertion. 217 | 218 | Definition asserts : assertion -> heap -> Prop := 219 | fun a h => 220 | match a with 221 | | Assert wa _ => wa h 222 | end. 223 | 224 | Global Instance: Proper (eq ==> heap_Equal ==> iff) asserts. 225 | Proof. 226 | intros a1 a2 aeq h1 h2 heq; subst; destruct a2. 227 | unfold asserts; split; intros. 228 | - now apply (wawf _ _ heq). 229 | - symmetry in heq; now apply (wawf _ _ heq). 230 | Qed. 231 | 232 | 233 | Definition assertion_imp : assertion -> assertion -> Prop := 234 | fun a1 a2 => 235 | forall h, asserts a1 h -> asserts a2 h. 236 | 237 | Definition assertion_iff : assertion -> assertion -> Prop := 238 | fun a1 a2 => 239 | forall h, asserts a1 h <-> asserts a2 h. 240 | 241 | Infix "===>" := assertion_imp (at level 90) : sep_scope. 242 | Infix "<===>" := assertion_iff (at level 90) : sep_scope. 243 | Local Open Scope sep_scope. 244 | 245 | 246 | Definition emp : assertion. 247 | refine (Assert (fun h => heap_Equal h empty_heap) _). 248 | unfold assertion_wf; intros; now rewrite <- H. 249 | Defined. 250 | 251 | Definition pointsto : ptr -> value -> assertion. 252 | intros p v. 253 | refine (Assert (fun h => heap_Equal h (Heap.add p v empty_heap)) _). 254 | unfold assertion_wf; intros; now rewrite <- H. 255 | Defined. 256 | Infix "|>" := pointsto (no associativity, at level 75) : sep_scope. 257 | 258 | Definition points : ptr -> assertion. 259 | intros p. 260 | refine (Assert (fun h => exists v, heap_Equal h (Heap.add p v empty_heap)) _). 261 | unfold assertion_wf; intros. 262 | destruct H0; exists x; now rewrite <- H. 263 | Defined. 264 | Notation "p |>?" := (points p) (no associativity, at level 75) : sep_scope. 265 | 266 | Definition weak_star : assertion -> assertion -> weak_assertion := 267 | fun a1 a2 => 268 | fun h => 269 | exists h1 h2, HeapP.Partition h h1 h2 /\ asserts a1 h1 /\ asserts a2 h2. 270 | 271 | Definition star : assertion -> assertion -> assertion. 272 | intros a1 a2. 273 | refine (Assert (weak_star a1 a2) _). 274 | unfold assertion_wf; unfold weak_star; intros; destruct_conjs. 275 | exists H0, H1; rewrite <- H; intuition. 276 | Defined. 277 | Infix "**" := star (right associativity, at level 80) : sep_scope. 278 | 279 | Definition heq : heap -> assertion. 280 | intros h. 281 | refine (Assert (fun h' => Heap.Equal h h') _). 282 | unfold assertion_wf; intros; transitivity h1; auto. 283 | Defined. 284 | 285 | Definition weak_imp : assertion -> assertion -> weak_assertion := 286 | fun a1 a2 => 287 | fun h => asserts a1 h -> asserts a2 h. 288 | 289 | Definition imp : assertion -> assertion -> assertion. 290 | intros a1 a2. 291 | refine (Assert (weak_imp a1 a2) _). 292 | unfold assertion_wf. unfold weak_imp. intros. 293 | rewrite H in *. auto. 294 | Qed. 295 | 296 | Definition weak_magic_wand : assertion -> assertion -> weak_assertion := 297 | fun a1 a2 => 298 | fun h => forall h', HeapP.Disjoint h h' -> asserts a1 h' -> 299 | asserts a2 (HeapP.update h h'). 300 | 301 | Definition magic_wand : assertion -> assertion -> assertion. 302 | intros a1 a2. 303 | refine (Assert (weak_magic_wand a1 a2) _). 304 | unfold assertion_wf. unfold weak_magic_wand. intros. 305 | assert (HeapP.Disjoint h1 h') by (now rewrite H). 306 | apply (H0 _ H3) in H2. 307 | assert (heap_Equal (HeapP.update h2 h') (HeapP.update h1 h')) 308 | by (now rewrite H). 309 | now rewrite H4. 310 | Qed. 311 | 312 | 313 | Lemma emp_empty_heap: 314 | asserts emp empty_heap. 315 | Admitted. 316 | 317 | Lemma emp_equals_iff h: 318 | asserts emp h <-> heap_Equal h empty_heap. 319 | Admitted. 320 | 321 | Lemma heq_equals_iff h1 h: 322 | asserts (heq h1) h <-> heap_Equal h1 h. 323 | Admitted. 324 | 325 | Lemma pointsto_equals_iff a v h: 326 | asserts (pointsto a v) h <-> heap_Equal h (Heap.add a v empty_heap). 327 | Admitted. 328 | 329 | Lemma points_equals_iff a h: 330 | asserts (points a) h <-> exists v, heap_Equal h (Heap.add a v empty_heap). 331 | Admitted. 332 | 333 | Lemma star_iff : 334 | forall a1 a2 h, asserts (a1 ** a2) h <-> 335 | exists h1 h2, HeapP.Partition h h1 h2 336 | /\ asserts a1 h1 /\ asserts a2 h2. 337 | Admitted. 338 | 339 | Ltac destruct_star := 340 | match goal with 341 | | [ |- context [ asserts (?a1 ** ?a2) ?h ] ] => rewrite (star_iff a1 a2 h) 342 | end. 343 | 344 | Ltac destruct_star_in H := 345 | match goal with 346 | | [ H : context [ asserts (?a1 ** ?a2) ?h ] |- _ ] => rewrite (star_iff a1 a2) in H 347 | end. 348 | 349 | Lemma add_emp_r: 350 | forall a, a <===> a ** emp. 351 | Admitted. 352 | 353 | Lemma star_comm: 354 | forall a1 a2, a1 ** a2 ===> a2 ** a1. 355 | Admitted. 356 | 357 | Lemma star_assoc : 358 | forall a1 a2 a3 h, asserts (a1 ** a2 ** a3) h <-> asserts ((a1 ** a2) ** a3) h. 359 | Admitted. 360 | 361 | Lemma star_imp: 362 | forall {a1 a2}, a1 ===> a2 -> forall x, a1 ** x ===> a2 ** x. 363 | Admitted. 364 | 365 | Lemma pointsto_find: 366 | forall {p v h}, asserts (p |> v) h -> Heap.find p h = Some v. 367 | Admitted. 368 | 369 | Lemma star_pointsto_find: 370 | forall {p v a h}, asserts (p |> v ** a) h -> Heap.find p h = Some v. 371 | Admitted. 372 | 373 | Lemma points_find: 374 | forall {p h}, asserts (p |>?) h -> exists v, Heap.find p h = Some v. 375 | Admitted. 376 | 377 | Lemma star_points_find: 378 | forall {p a h}, asserts (p |>? ** a) h -> exists v, Heap.find p h = Some v. 379 | Admitted. 380 | 381 | 382 | 383 | 384 | Definition Cmd := heap -> option heap. 385 | 386 | Definition sep_triple (p:assertion) (c:Cmd) (q:assertion) := 387 | forall rest h, 388 | asserts (p ** heq rest) h -> 389 | exists h', c h = Some h' /\ asserts (q ** heq rest) h'. (* NB stuff about termination *) 390 | Notation "{{ P }} c {{ Q }}" := (sep_triple P c Q) (at level 90) : sep_scope. 391 | 392 | 393 | Lemma consequence: 394 | forall p p' c q q', 395 | {{p}} c {{q}} -> 396 | p' ===> p -> 397 | q ===> q' -> 398 | {{p'}} c {{q'}}. 399 | Proof. 400 | unfold sep_triple; intros. 401 | apply star_imp with (x:=heq rest) in H0. 402 | apply star_imp with (x:=heq rest) in H1. 403 | apply H0 in H2. 404 | apply H in H2. 405 | destruct_conjs. 406 | apply H1 in H4. 407 | exists H2; intuition. 408 | Qed. 409 | 410 | 411 | Lemma frame: 412 | forall p c q r, 413 | {{p}} c {{q}} -> 414 | {{p ** r}} c {{q ** r}}. 415 | Proof. 416 | unfold sep_triple; intros. 417 | rewrite <- star_assoc in H0. 418 | rewrite star_iff in H0. 419 | destruct_conjs. 420 | assert (asserts (p ** heq H1) h). 421 | rewrite star_iff. 422 | exists H0, H1; intuition. 423 | unfold asserts; unfold heq; reflexivity. 424 | apply H in H5. 425 | destruct_conjs. 426 | exists H5. 427 | intuition. 428 | rewrite <- star_assoc. 429 | rewrite star_iff in H7; destruct_conjs. 430 | rewrite star_iff. 431 | exists H7, H1. 432 | rewrite heq_equals_iff in H11. 433 | repeat rewrite H11 in *. 434 | intuition. 435 | Qed. 436 | 437 | 438 | Inductive expr := 439 | | Ev : Z -> expr 440 | | Ebin : expr -> expr -> (Z -> Z -> Z) -> expr 441 | | Eread : expr -> expr 442 | | Ewrite : expr -> expr -> expr. 443 | 444 | Fixpoint estepf (e:expr) (h:heap) : option (expr * heap) := 445 | match e with 446 | | Ebin (Ev v1) (Ev v2) op => Some (Ev (op v1 v2), h) 447 | | Ebin (Ev v1) e2 op => match estepf e2 h with 448 | | Some (e2',h') => Some (Ebin (Ev v1) e2' op, h') 449 | | _ => None 450 | end 451 | | Ebin e1 e2 op => match estepf e1 h with 452 | | Some (e1',h') => Some (Ebin e1' e2 op, h') 453 | | _ => None 454 | end 455 | | Eread (Ev a) => match Heap.find a h with 456 | | Some v => Some (Ev v, h) 457 | | _ => None 458 | end 459 | | Eread e => match estepf e h with 460 | | Some (e',h') => Some (Eread e', h') 461 | | _ => None 462 | end 463 | | Ewrite (Ev a) (Ev v) => match Heap.find a h with 464 | | Some _ => Some (Ev v, Heap.add a v h) 465 | | _ => None 466 | end 467 | | Ewrite (Ev a) e => match estepf e h with 468 | | Some (e',h') => Some (Ewrite (Ev a) e', h') 469 | | _ => None 470 | end 471 | | Ewrite a e => match estepf a h with 472 | | Some (a',h') => Some (Ewrite a' e, h') 473 | | _ => None 474 | end 475 | | _ => None 476 | end. 477 | 478 | Definition ecmd : expr -> Cmd := 479 | fun e h => 480 | match estepf e h with 481 | | Some (e', h') => Some h' 482 | | _ => None 483 | end. 484 | Hint Resolve ecmd. 485 | 486 | Lemma ecmd_estepf_iff e h h': 487 | ecmd e h = Some h' <-> exists e', estepf e h = Some (e', h'). 488 | Proof. 489 | split; intros. 490 | unfold ecmd in *; remember (estepf e h) as s; destruct s. 491 | destruct p; exists e0; inversion H; intuition. 492 | discriminate. 493 | unfold ecmd in *; destruct H as [e' H]; rewrite H; auto. 494 | Qed. 495 | 496 | 497 | Lemma readv_tc: 498 | forall a v, 499 | {{ a |> v }} ecmd (Eread (Ev a)) {{ a |> v }}. 500 | Admitted. 501 | 502 | Lemma writev_tc: 503 | forall a v, 504 | {{ a |>? }} ecmd (Ewrite (Ev a) (Ev v)) {{ a |> v }}. 505 | Admitted. 506 | 507 | Lemma reads_tc: 508 | forall a1 a2 e1, 509 | {{a1}} ecmd e1 {{a2}} -> 510 | {{a1}} ecmd (Eread e1) {{a2}}. 511 | Proof. 512 | intros a1 a2 e1 H rest h A. 513 | generalize (H rest h A); intro G; destruct G as [hg [G1 G2]]. 514 | exists hg; split; auto. 515 | rewrite ecmd_estepf_iff in G1; destruct G1 as [e' G1]. 516 | Admitted. 517 | 518 | End Separation. 519 | --------------------------------------------------------------------------------