├── examples ├── 000-Universe.ma ├── 000-Universe.na ├── ConstSharing.ma ├── 010-Lam.ma ├── datatype.ma ├── 010-Eq.ma ├── 010-Lam.na ├── SubTyping.na ├── 032-Nisse.ma ├── datatype.na ├── EtaEq.ma ├── EtaEq2.ma ├── nat.ma ├── 020-Finite.ma ├── 030-Ulf.ma ├── 031-TripleF.ma ├── 033-IfSwap.ma └── 040-Integers.ma ├── report-radanne ├── poster │ ├── Lam.agda │ ├── case.agda │ ├── code.agda │ ├── tangocolors.sty │ ├── beamerthemeI6pd2.sty │ └── poster.tex ├── martin-lof.png ├── mathpartir.sty ├── slides │ ├── seq.ml │ └── slides.tex ├── Makefile ├── Nisse.agda ├── TripleF.agda ├── Ulf.agda ├── lst.tex ├── Common.hs └── Report.hs ├── src ├── Setup.hs ├── FeInterface.hs ├── Examples.hs ├── Nano │ ├── Frontend.hs │ └── Resolve.hs ├── Micro │ ├── Frontend.hs │ └── Resolve.hs ├── nanoAgda.cabal ├── Nano.cf ├── Micro.cf ├── RM.hs ├── Ident.hs ├── Options.hs ├── Main.hs ├── Display.hs ├── TexnAgda.hs ├── TCM.hs ├── Fresh.hs ├── Terms.hs ├── Eq.hs ├── Eval.hs ├── Heap.hs └── TypeCheck.hs ├── .gitignore ├── Talk.org ├── src-Andreas ├── Check.hs └── Value.hs ├── Radanne-Internship.md ├── Outline-Draft.md └── core.org /examples/000-Universe.ma: -------------------------------------------------------------------------------- 1 | TERM 2 | *1 3 | TYPE 4 | *2 5 | -------------------------------------------------------------------------------- /report-radanne/poster/Lam.agda: -------------------------------------------------------------------------------- 1 | id : (a : Set) -> a -> a 2 | id _ x = x 3 | -------------------------------------------------------------------------------- /examples/000-Universe.na: -------------------------------------------------------------------------------- 1 | TERM 2 | s0 = *0 ; 3 | s0 4 | TYPE 5 | s1 = *1 ; 6 | s1 -------------------------------------------------------------------------------- /examples/ConstSharing.ma: -------------------------------------------------------------------------------- 1 | TERM 2 | x := *0 ; 3 | y := *0 ; 4 | x 5 | TYPE 6 | *1 -------------------------------------------------------------------------------- /examples/010-Lam.ma: -------------------------------------------------------------------------------- 1 | TERM 2 | \a -> \x -> x 3 | 4 | TYPE 5 | (a : *1) -> (x : a) -> a 6 | 7 | -------------------------------------------------------------------------------- /report-radanne/martin-lof.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jyp/sctt/HEAD/report-radanne/martin-lof.png -------------------------------------------------------------------------------- /report-radanne/mathpartir.sty: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jyp/sctt/HEAD/report-radanne/mathpartir.sty -------------------------------------------------------------------------------- /report-radanne/slides/seq.ml: -------------------------------------------------------------------------------- 1 | let x = 3 in 2 | let y = 2 in 3 | let f' = (+) x in 4 | let z = f' y in 5 | z 6 | -------------------------------------------------------------------------------- /src/Setup.hs: -------------------------------------------------------------------------------- 1 | import System.Cmd 2 | import Distribution.Simple 3 | 4 | main = do 5 | system "bnfc -d --haskell Nano.cf" 6 | system "bnfc -d --haskell Micro.cf" 7 | defaultMain 8 | -------------------------------------------------------------------------------- /report-radanne/poster/case.agda: -------------------------------------------------------------------------------- 1 | myFun x with f x 2 | ... | Foo = !\color{Gray}( No knowledge that! f x == Foo !\color{Gray})! 3 | ... | Bar = !\color{Gray}( No knowledge that! f x == Bar !\color{Gray})! 4 | -------------------------------------------------------------------------------- /examples/datatype.ma: -------------------------------------------------------------------------------- 1 | TERM 2 | Unit = { 'unit } : *0 ; 3 | \s -> ( c : { 'Foo , 'Bar } ) * 4 | ( case c of { 5 | 'Foo -> s. 6 | 'Bar -> Unit. 7 | } ) 8 | TYPE 9 | *0 -> *0 -------------------------------------------------------------------------------- /report-radanne/Makefile: -------------------------------------------------------------------------------- 1 | 2 | all: Report.pdf 3 | 4 | %.pdf: %.tex 5 | pdflatex $< 6 | pdflatex $< 7 | 8 | %.tex: %.hs 9 | ghc --make $< 10 | ./$* 11 | 12 | %.hspp: %.hs 13 | ghc -E $< 14 | 15 | -------------------------------------------------------------------------------- /examples/010-Eq.ma: -------------------------------------------------------------------------------- 1 | TERM 2 | Eq = (\A -> \x -> \y -> (P : A -> *0) -> P x -> P y) 3 | : (A : *0) -> A -> A -> *1; 4 | refl = (\A -> \x -> \P -> \p -> p) 5 | : (A : *0) -> (x:A) -> Eq A x x; 6 | *0 7 | TYPE 8 | *1 9 | -------------------------------------------------------------------------------- /examples/010-Lam.na: -------------------------------------------------------------------------------- 1 | TERM 2 | f = \a -> ( f' = \x -> (r=x; r); 3 | f' ) ; 4 | f 5 | TYPE 6 | s1 = *0 ; 7 | f_ty = (a : s1) -> ( a' = a ; 8 | a2a = (x : a') -> a'; 9 | a2a ) ; 10 | f_ty -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .cake 2 | Nano/ 3 | Micro/ 4 | *_flymake.hs 5 | *~ 6 | dist/ 7 | *.o 8 | *.hi 9 | *.tex 10 | *.mp 11 | *.mpx 12 | *.pdf 13 | *.aux 14 | *.log 15 | PaperTools/ 16 | /report-radanne/Report 17 | /report-radanne/Report.out 18 | /report-radanne/mpboxes.txt 19 | -------------------------------------------------------------------------------- /report-radanne/Nisse.agda: -------------------------------------------------------------------------------- 1 | module Nisse where 2 | 3 | open import Data.Product 4 | 5 | nisse : (A : Set) -> (B : Set) -> (P : A → B → Set) -> (p : A × B) -> 6 | let (u1 , u2) = p 7 | v = P u1 u2 8 | in v -> v 9 | nisse A B P (u1' , u2') = 10 | let v' = P u1' u2' in \(x : v') -> x 11 | -------------------------------------------------------------------------------- /examples/SubTyping.na: -------------------------------------------------------------------------------- 1 | TERM 2 | s1 = *1 ; 3 | s0 = *0 ; 4 | t1 = (x' : s1) -> s0 ; 5 | t2 = (x' : s0) -> s1 ; -- we have t1 ≤ t2 but not t2 ≤ t1 6 | f = \y -> (y = y ; y) ; 7 | f_ty = (x : t1) -> t2 ; 8 | -- f_ty = (x : t2) -> t1 ; -- this should fail. 9 | x = f : f_ty ; 10 | s0 11 | 12 | TYPE 13 | s1 = *1 ; 14 | s1 -------------------------------------------------------------------------------- /examples/032-Nisse.ma: -------------------------------------------------------------------------------- 1 | TERM 2 | A = *0 : *1; B = *0 : *1; 3 | Sharing = 4 | (\P -> \p -> (u1,u2) = split p; 5 | v = P u1 u2; 6 | \x -> (y = x : v ; y) ) 7 | : (P : A -> B -> *0) -> 8 | (p : (a : A) * B) -> 9 | (u1,u2) = split p; 10 | v = P u1 u2 ; 11 | v -> v ; 12 | *0 13 | TYPE 14 | *1 15 | -------------------------------------------------------------------------------- /examples/datatype.na: -------------------------------------------------------------------------------- 1 | TERM 2 | Unit_t = { 'unit } ; 3 | Unit_ty = *0 ; 4 | Unit = Unit_t : Unit_ty ; 5 | f = \s -> ( 6 | tag = { 'Foo , 'Bar } ; 7 | f' = (c : tag) X 8 | (case c of { 9 | 'Foo -> s' = s ; s' . 10 | 'Bar -> Unit' = Unit ; Unit' 11 | }) ; 12 | f') ; 13 | f 14 | TYPE 15 | star0 = *0 ; 16 | f_ty = ( s : star0) -> star0 ; 17 | f_ty -------------------------------------------------------------------------------- /src/FeInterface.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, ExistentialQuantification, RecordWildCards #-} 2 | module FeInterface where 3 | 4 | import Display 5 | import TCM 6 | 7 | data FrontEnd = forall token modul. Pretty token => FE { 8 | myLLexer :: String -> [token], 9 | pModule :: [token] -> Either String modul, 10 | resolveModule :: modul -> Either String (Term',Term') 11 | } 12 | -------------------------------------------------------------------------------- /report-radanne/TripleF.agda: -------------------------------------------------------------------------------- 1 | module TripleF where 2 | 3 | open import Data.Bool 4 | 5 | data _==_ {A : Set} (x : A) : A -> Set where 6 | refl : x == x 7 | 8 | tripleF : (f : Bool -> Bool) -> (x : Bool) -> 9 | (f x) == (f (f (f x))) 10 | tripleF f x with x | f x 11 | ... | true | true = refl 12 | ... | true | false = refl 13 | ... | false | true = refl 14 | ... | false | false = refl 15 | -------------------------------------------------------------------------------- /src/Examples.hs: -------------------------------------------------------------------------------- 1 | module Examples where 2 | 3 | import Terms 4 | import Ident 5 | import Fresh 6 | import Eq 7 | 8 | test_refl x = run emptyHeap $ testTerm x x 9 | 10 | type TermId = Term Id Id 11 | 12 | tt,ff,case_t :: Term String String 13 | ff = Constr "x" (Tag "False") (Conc "x") 14 | tt = Constr "y" (Tag "True") (Conc "y") 15 | case_t = Case "x" [Br "True" $ Conc $ "y", Br "False" $ Conc $ "z"] 16 | 17 | -------------------------------------------------------------------------------- /examples/EtaEq.ma: -------------------------------------------------------------------------------- 1 | TERM 2 | Eq = (\A -> \x -> \y -> (P : A -> *0) -> P x -> P y) 3 | : (A : *0) -> A -> A -> *1; 4 | refl = (\A -> \x -> \P -> \p -> p) 5 | : (A : *0) -> (x:A) -> Eq A x x; 6 | 7 | EtaEq = (\A -> \B -> \f -> ( 8 | foo = (refl (A -> B) f) 9 | : Eq (A -> B) f (\x -> f x) ; 10 | *0 11 | )) 12 | : (A : *0) -> (B : *0) -> (f : A -> B) -> *1 13 | ; 14 | *0 15 | TYPE 16 | *1 -------------------------------------------------------------------------------- /examples/EtaEq2.ma: -------------------------------------------------------------------------------- 1 | TERM 2 | Eq = (\A -> \x -> \y -> (P : A -> *0) -> P x -> P y) 3 | : (A : *0) -> A -> A -> *1; 4 | refl = (\A -> \x -> \P -> \p -> p) 5 | : (A : *0) -> (x:A) -> Eq A x x; 6 | 7 | EtaEq = (\A -> \B -> \p -> 8 | (refl ((a : A) * B) p)) 9 | : (A : *0) -> (B : *0) -> (p : (a : A) * B) -> 10 | (p' = p ; 11 | (x,y) = split p' ; 12 | Eq ((a : A) * B) p' (x,y)) 13 | ; 14 | *0 15 | TYPE 16 | *1 -------------------------------------------------------------------------------- /report-radanne/poster/code.agda: -------------------------------------------------------------------------------- 1 | data Nat : Set where 2 | Zero : Nat 3 | Succ : Nat -> Nat 4 | 5 | _+_ : Nat -> Nat -> Nat 6 | Zero + n = n 7 | (Succ n) + m = Succ (n + m) 8 | 9 | data Vec (A : Set) : Nat -> Set where 10 | Nil : Vec A Zero 11 | Cons : {n : Nat} -> A -> Vec A n -> Vec A (Succ n) 12 | 13 | append : forall { n m A } -> Vec A n -> Vec A m 14 | -> Vec A (n + m) 15 | append Nil ys = ys 16 | append (Cons x xs) ys = Cons x (append xs ys) 17 | -------------------------------------------------------------------------------- /report-radanne/Ulf.agda: -------------------------------------------------------------------------------- 1 | module Ulf where 2 | 3 | open import Data.Bool 4 | 5 | data _≡_ {A : Set} (x : A) : A → Set where 6 | refl : x ≡ x 7 | 8 | Ulf : (A : Set) -> (A -> Bool) -> (A -> Bool) -> A -> Bool 9 | Ulf A f g x = h' y 10 | where h : Bool -> A -> Bool 11 | h true = f 12 | h false = g 13 | 14 | x0 = x 15 | y = f x0 16 | 17 | h' : Bool -> Bool 18 | h' true = let z : (h y x0) ≡ y 19 | z = refl 20 | in true 21 | h' false = false 22 | -------------------------------------------------------------------------------- /examples/nat.ma: -------------------------------------------------------------------------------- 1 | TERM 2 | Top := {'nil}; 3 | Tag := {'zer, 'suc}; 4 | nat = (rec n -> (t : Tag) * (case t of {'zer -> Top. 'suc -> n})) 5 | : *0; 6 | zero = ('zer, 'nil) : nat; 7 | succ = (\n -> ('suc, n)) : nat -> nat; 8 | natElim = (rec el -> \P -> \z -> \s -> \m -> 9 | (m1,m2) = split m; 10 | case m1 of 11 | {'zer -> case m2 of {'nil -> z.}. 12 | 'suc -> s m2 (el P z s m2).}) 13 | : (P : nat -> *0) -> P zero -> ((n : nat) -> P n -> P (succ n)) -> (m : nat) -> P m; 14 | nat 15 | TYPE 16 | *0 -------------------------------------------------------------------------------- /examples/020-Finite.ma: -------------------------------------------------------------------------------- 1 | TERM 2 | Three = {'t0, 't1, 't2} : *0; 3 | Three_elim = (\P -> \a -> \b -> \c -> \x -> case x of { 4 | 't0 -> a. 5 | 't1 -> b. 6 | 't2 -> c. 7 | }) 8 | : (P : Three -> *0) -> P 't0 -> P 't1 -> P 't2 -> (x : Three) -> P x; 9 | inc0 = (\x -> x) : Three -> Three; 10 | inc1 = Three_elim (\x -> Three) 't1 't2 't0 : Three -> Three; 11 | inc2 = Three_elim (\x -> Three) 't2 't0 't1 : Three -> Three; 12 | plus = Three_elim (\y -> Three -> Three) inc0 inc1 inc2 13 | : Three -> Three -> Three; 14 | Three 15 | TYPE 16 | *0 -------------------------------------------------------------------------------- /src/Nano/Frontend.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, ExistentialQuantification, RecordWildCards #-} 2 | module Nano.Frontend where 3 | 4 | import qualified Nano.Abs as A 5 | import Nano.Par 6 | import Nano.Lex 7 | import Nano.Layout 8 | import Nano.ErrM 9 | import Nano.Resolve 10 | import Display 11 | import FeInterface 12 | 13 | instance Pretty Token where 14 | pretty = text . show 15 | 16 | errToEither (Ok x) = Right x 17 | errToEither (Bad x) = Left x 18 | 19 | fe = FE { pModule = errToEither . Nano.Par.pModule, 20 | resolveModule = Nano.Resolve.resolve, 21 | myLLexer = resolveLayout True . myLexer} 22 | -------------------------------------------------------------------------------- /src/Micro/Frontend.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, ExistentialQuantification, RecordWildCards #-} 2 | module Micro.Frontend where 3 | 4 | import qualified Micro.Abs as A 5 | import Micro.Par 6 | import Micro.Lex 7 | import Micro.Layout 8 | import Micro.ErrM 9 | import Micro.Resolve 10 | import Display 11 | import FeInterface 12 | 13 | instance Pretty Token where 14 | pretty = text . show 15 | 16 | errToEither (Ok x) = Right x 17 | errToEither (Bad x) = Left x 18 | 19 | fe = FE { pModule = errToEither . Micro.Par.pModule, 20 | resolveModule = Micro.Resolve.resolve, 21 | myLLexer = resolveLayout True . myLexer} 22 | -------------------------------------------------------------------------------- /examples/030-Ulf.ma: -------------------------------------------------------------------------------- 1 | TERM 2 | Eq = (\A -> \x -> \y -> (P : A -> *0) -> P x -> P y) 3 | : (A : *0) -> A -> A -> *1; 4 | refl = (\A -> \x -> \P -> \p -> p) 5 | : (A : *0) -> (x:A) -> Eq A x x; 6 | Bool = { 'true, 'false } : *0; 7 | A = *0 : *1; 8 | Ulf = (\f -> \g -> \x -> 9 | ( h = \b -> case b of { 10 | 'true -> f. 11 | 'false -> g. } 12 | : (b : Bool) -> A -> Bool; 13 | y = f x; 14 | case y of { 15 | 'true -> z = (refl Bool y) : Eq Bool (h y x) y; 16 | 'true. 17 | 'false -> 'false.} 18 | )) 19 | : (f : A -> Bool) -> (g : A -> Bool) -> A -> Bool; 20 | *0 21 | TYPE 22 | *1 23 | -------------------------------------------------------------------------------- /examples/031-TripleF.ma: -------------------------------------------------------------------------------- 1 | TERM 2 | Eq = (\A -> \x -> \y -> (P : A -> *0) -> P x -> P y) 3 | : (A : *0) -> A -> A -> *1; 4 | refl = (\A -> \x -> \P -> \p -> p) 5 | : (A : *0) -> (x:A) -> Eq A x x; 6 | Bool = { 'true, 'false } : *0; 7 | tripleF = (\f -> \x -> ( 8 | case x of { 9 | 'true -> case f x of { 10 | 'true -> refl Bool 'true. 11 | 'false -> refl Bool 'false. 12 | }. 13 | 'false -> case f x of { 14 | 'true -> refl Bool 'true. 15 | 'false -> refl Bool 'false. 16 | }. 17 | })) 18 | : (f: Bool -> Bool) -> (x : Bool) -> 19 | Eq Bool (f x) (f (f (f x))); 20 | *0 21 | TYPE 22 | *1 23 | -------------------------------------------------------------------------------- /Talk.org: -------------------------------------------------------------------------------- 1 | * A scientific goal 2 | We aim to construct a type-theory which can be used as a backend for 3 | dependently-typed languages such as Agda or Coq. Such a language 4 | should be 5 | 6 | - a type-theory: correctness should be expressible via types 7 | - low-level: one should be able to translate various high-level 8 | languages into this language while retaining properties such as 9 | run-time behaviour, etc. 10 | - minimal: the type-checking program should be amenable to 11 | verification. 12 | 13 | 14 | * A SC presentation of TT: Motivation 15 | - Evaluate SC as a presentation of TT 16 | - ND to SC is "mechanical"... but what does the result look like in practice? 17 | - Debug PiSigma 18 | - Sharing is expressible (J. Launchbury -- A Natural Semantics for Lazy Evaluation) 19 | - Natural syntax for case, etc 20 | - Integration of with 21 | - Low-level; as in other compilers. 22 | 23 | * Syntax 24 | 25 | * Support for 'with' 26 | make sure there is a functional dependency form d to x in the environment entries 27 | x = d 28 | -------------------------------------------------------------------------------- /report-radanne/lst.tex: -------------------------------------------------------------------------------- 1 | \lstset{ 2 | tabsize=4, 3 | aboveskip={0.4\baselineskip}, 4 | belowcaptionskip=0.4\baselineskip, 5 | columns=fixed, 6 | showstringspaces=false, 7 | extendedchars=true, 8 | breaklines=true, 9 | frame=none, 10 | xleftmargin=\parindent, 11 | basicstyle=\footnotesize\ttfamily, 12 | keywordstyle=\bfseries\color{green!60!black}, 13 | keywordstyle=[2]\bfseries\color{red!50!white}, 14 | commentstyle=\itshape\color{purple!60!black}, 15 | identifierstyle=\color{blue!30!black}, 16 | stringstyle=\color{orange}, 17 | } 18 | 19 | \lstdefinelanguage{Agda}{ 20 | keywords=[2]{Nil, Cons, Zero, Succ, Foo, Bar}, 21 | morekeywords={data,where,case,let,in,with,forall}, 22 | literate={->}{{$\to{}$}}1 {xx}{{$\times$}}1 {==}{{$\equiv$}}1 {*}{{$\times$}}1, 23 | } 24 | 25 | \lstdefinelanguage{nanoAgda}{ 26 | keywords=[2]{Foo, Bar}, 27 | morekeywords={case,of,split,rec,TERM, TYPE}, 28 | literate={->}{{$\to$}}1 {\\}{{$\lambda$}}1 {'}{`}1 29 | {*}{{$\times$}}1 {*0}{{$\star_0$}}2 {*1}{{$\star_1$}}2 {*2}{{$\star_2$}}2, 30 | } -------------------------------------------------------------------------------- /src/nanoAgda.cabal: -------------------------------------------------------------------------------- 1 | name: nanoAgda 2 | version: 0.1.0.0 3 | synopsis: nanoAgda: a core language for type theory 4 | -- description: 5 | -- license: 6 | -- license-file: 7 | author: Jean-Philippe Bernardy 8 | -- copyright: 9 | category: Language 10 | build-type: Custom 11 | cabal-version: >=1.8 12 | 13 | executable nac 14 | main-is: Main.hs 15 | hs-source-dirs: . 16 | ghc-options: -Wall 17 | build-tools: alex, happy 18 | other-modules: Nano.Lex, Nano.Par, Micro.Lex, Micro.Par 19 | build-depends: base >= 4, 20 | geniplate ==0.6.*, 21 | bifunctors, 22 | genifunctors, 23 | mtl ==2.1.*, 24 | pretty ==1.1.*, 25 | containers >=0.4, 26 | -- pretty-show, 27 | array ==0.4.*, 28 | filepath, 29 | -- process, 30 | cmdargs 31 | -------------------------------------------------------------------------------- /examples/033-IfSwap.ma: -------------------------------------------------------------------------------- 1 | TERM 2 | 3 | Eq = (\A -> \x -> \y -> (P : A -> *0) -> P x -> P y) 4 | : (A : *0) -> A -> A -> *1; 5 | refl = (\A -> \x -> \P -> \p -> p) 6 | : (A : *0) -> (x:A) -> Eq A x x; 7 | 8 | Bool := { 'true, 'false } ; 9 | If = (\ x -> \ A -> \ B -> case x of 10 | { 'true -> A. 11 | 'false -> B. 12 | }) 13 | : (x : Bool) -> (A : *0) -> (B : *0) -> *0 ; 14 | 15 | PBool = ((x : Bool) -> *0) 16 | : *1 ; 17 | if = (\ P -> \ x -> \ y -> \ z -> case x of 18 | { 'true -> y. 19 | 'false -> z. 20 | }) 21 | : (P : PBool) -> (x : Bool) -> (y : P 'true) -> (z : P 'false) -> P x ; 22 | not = (\ x -> if (\ x -> Bool) x 'false 'true) 23 | : (x : Bool) -> Bool ; 24 | 25 | thm = (\ P -> \ x -> \ y -> \ z -> case z of 26 | { 'true -> refl (P 'true) y. 27 | 'false -> refl (P 'false) z. 28 | }) 29 | : (P : PBool) -> (x : Bool) -> (y : P 'true) -> (z : P 'false) -> 30 | Eq (If x (P x) (P (not x))) (if P x y z) (if P (not x) z y) ; 31 | 32 | *0 33 | TYPE 34 | *1 35 | -------------------------------------------------------------------------------- /src/Nano.cf: -------------------------------------------------------------------------------- 1 | entrypoints Module ; 2 | 3 | layout "of" ; 4 | 5 | Module. Module ::= "TERM" Term "TYPE" Term ; 6 | 7 | First. Proj ::= ".1" ; 8 | Second. Proj ::= ".2" ; 9 | 10 | separator Tag ","; 11 | T. Tag ::= "'" Var; 12 | Br. Branch ::= Tag "->" Term; 13 | 14 | separator Branch "."; 15 | Concl. Term2 ::= Var ; 16 | Case. Term2 ::= "case" Var "of" "{" [Branch] "}"; 17 | 18 | Destr. Term1 ::= Var "=" Destr ";" Term1; 19 | Constr. Term1 ::= Var "=" Constr ";" Term1; 20 | 21 | coercions Term 2; 22 | 23 | Appl. Destr ::= Var Var; 24 | Proj. Destr ::= Var Proj; 25 | Cut. Destr ::= Var ":" Var; 26 | 27 | Hyp. Constr ::= Var; 28 | Lam. Constr ::= "\\" Var "->" Term2; 29 | Pi. Constr ::= "(" Var ":" Var ")" "->" Term2; 30 | Pair. Constr ::= "(" Var "," Var ")"; 31 | Sigma. Constr ::= "(" Var ":" Var ")" "X" Term2; 32 | Tag. Constr ::= Tag ; 33 | Fin. Constr ::= "{" [Tag] "}" ; 34 | Univ. Constr ::= "*" Nat; 35 | 36 | position token Var (letter)((letter|digit|'-'|'_'|'\'')*) ; 37 | position token Nat digit+ ; 38 | 39 | comment "{-" "-}" ; 40 | comment "--" ; 41 | 42 | -------------------------------------------------------------------------------- /src/Micro.cf: -------------------------------------------------------------------------------- 1 | 2 | entrypoints Module ; 3 | 4 | layout "of" ; 5 | 6 | Module. Module ::= "TERM" Term "TYPE" Term ; 7 | 8 | separator Tag ","; 9 | T. Tag ::= "'" Var; 10 | Br. Branch ::= Tag "->" Term; 11 | 12 | separator Branch "."; 13 | Concl. Term2 ::= DC ; 14 | Case. Term2 ::= "case" DC "of" "{" [Branch] "}"; 15 | 16 | Split. Term2 ::= "(" Var "," Var ")" "=" "split" DC ";" Term1; 17 | Destr. Term1 ::= Var "=" DC ";" Term1; 18 | Constr. Term1 ::= Var ":=" DC ";" Term1; 19 | 20 | coercions Term 2; 21 | 22 | 23 | Tag. DC3 ::= Tag ; 24 | V. DC3 ::= Var; 25 | Pair. DC3 ::= "(" DC "," DC ")"; 26 | Lam. DC3 ::= "\\" Var "->" Term2; 27 | Rec. DC3 ::= "rec" Var "->" Term2; 28 | Fin. DC3 ::= "{" [Tag] "}" ; 29 | Univ. DC3 ::= "*" Nat; 30 | Appl. DC2 ::= DC2 DC3; 31 | Fun. DC1 ::= DC2 "->" Term2; 32 | Pi. DC1 ::= "(" Var ":" DC ")" "->" Term2; 33 | Sigma. DC1 ::= "(" Var ":" DC ")" "*" Term2; 34 | Cut. DC ::= DC1 ":" DC; 35 | 36 | coercions DC 4; 37 | 38 | position token Var (letter)((letter|digit|'-'|'_'|'\'')*) ; 39 | position token Nat digit+ ; 40 | 41 | comment "{-" "-}" ; 42 | comment "--" ; 43 | -------------------------------------------------------------------------------- /src/RM.hs: -------------------------------------------------------------------------------- 1 | {-#LANGUAGE NamedFieldPuns, RecordWildCards, 2 | GeneralizedNewtypeDeriving, GADTs, ScopedTypeVariables, RankNTypes, 3 | DeriveFunctor #-} 4 | 5 | module RM where 6 | import Fresh 7 | import Ident 8 | import qualified Data.Map as M 9 | import Data.Map (Map) 10 | import Control.Monad.Reader 11 | import Control.Applicative 12 | 13 | newtype K k a = K {fromK :: k} deriving Functor 14 | newtype I a = I {fromI :: a} deriving Functor 15 | data Env = Env {envHyp :: Map String Id, 16 | envCon :: Map String Id} 17 | deriving Show 18 | 19 | emptyEnv = Env M.empty M.empty 20 | 21 | newtype Lens a b = L {fromLens :: forall f. Functor f => (b -> f b) -> (a -> f a)} 22 | view :: Lens a b -> a -> b 23 | view (L g) r = fromK (g K r) 24 | 25 | set :: forall a b. Lens a b -> b -> a -> a 26 | set (L l) x r = runReader (l ((\_ -> ask) :: b -> Reader b b) r) x 27 | 28 | upd :: Lens a b -> (b -> b) -> (a -> a) 29 | upd (L l) f a = fromI $ l (I . f) a 30 | 31 | hyp,con :: Lens Env (Map String Id) 32 | con = L $ \f (Env h c) -> fmap (Env h) (f c) 33 | hyp = L $ \f (Env h c) -> fmap (flip Env c) (f h) 34 | 35 | newtype R a = R {fromR :: ReaderT Env FreshM a} 36 | deriving (Functor, Applicative, Monad, MonadReader Env) 37 | 38 | 39 | -------------------------------------------------------------------------------- /src/Ident.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 2 | module Ident where 3 | 4 | import Data.Function (on) 5 | import Data.Char 6 | import Numeric 7 | import Display 8 | import qualified Text.PrettyPrint.HughesPJ as P 9 | 10 | instance Pretty Id where 11 | pretty (Id n (Unique i) _) = text n P.<> subscriptPretty i 12 | 13 | type Name = String 14 | 15 | newtype Unique = Unique Int deriving (Eq,Ord,Enum) 16 | 17 | instance Show Unique where 18 | show (Unique (-1)) = "" 19 | show (Unique i) = showIntAtBase n at i "" 20 | where 21 | alpha = map intToDigit [0..9] ++ ['a'..'z'] ++ ['A'..'Z'] ++ "'" 22 | n = length alpha 23 | at = (alpha !!) 24 | 25 | data Id = Id 26 | { id_name :: Name 27 | , id_unique :: Unique 28 | , id_m_loc :: Maybe (Int,Int) 29 | -- ^ Maybe a source location this identifier appeared at 30 | } 31 | 32 | instance Show Id where 33 | show (Id n i _) = n ++ "_" ++ show i 34 | 35 | mkId :: Name -> Unique -> Id 36 | mkId n u = Id n u Nothing 37 | 38 | unsafeId :: Name -> Id 39 | unsafeId n = Id n (Unique (error "unsafeId")) Nothing 40 | 41 | instance Eq Id where 42 | (==) = (==) `on` id_unique 43 | 44 | instance Ord Id where 45 | compare = compare `on` id_unique 46 | -------------------------------------------------------------------------------- /src/Options.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveDataTypeable #-} 2 | module Options (Args(..),TypeSystem(..),options) where 3 | 4 | import System.Console.CmdArgs.Implicit 5 | import System.IO.Unsafe 6 | 7 | data TypeSystem 8 | = CCω 9 | | Predicative 10 | deriving (Show,Data,Typeable) 11 | 12 | data Args = Args 13 | { verb :: Int 14 | , typeSystem :: TypeSystem 15 | -- , showRelevance :: Bool 16 | -- , collapseRelevance :: Bool 17 | -- , ignoreBinder :: Bool 18 | , files :: [String] 19 | } 20 | deriving (Show,Data,Typeable) 21 | 22 | sample :: Mode (CmdArgs Args) 23 | sample = cmdArgsMode Args 24 | { verb = 0 &= help "verbosity" &= opt (0 :: Int) 25 | , typeSystem = enum [ CCω &= name "I" &= help "Impredicative" 26 | , Predicative &= name "P" &= help "Predicative" 27 | ] 28 | -- , showRelevance = False &= help "display more irrelevance annotations in normal forms" 29 | -- , collapseRelevance = False &= help "! (param) does not generate new relevance levels." 30 | -- , ignoreBinder = False &= help "ignore binder annotations." 31 | , files = [] &= args &= typFile 32 | } 33 | 34 | options :: Args 35 | options = unsafePerformIO (cmdArgsRun sample) 36 | 37 | -------------------------------------------------------------------------------- /src-Andreas/Check.hs: -------------------------------------------------------------------------------- 1 | -- | Double checking values. 2 | 3 | module Check where 4 | 5 | import Data.Set (Set) 6 | import qualified Data.Set as Set 7 | 8 | import Value 9 | 10 | -- * Contexts and heaps 11 | 12 | data CxtEntry = CxtEntry { cxtHyp :: Hyp, cxtTyp :: Conc } -- ^ @x : Y_@. 13 | 14 | newtype Cxt = Cxt { cxt :: [CxtEntry] } 15 | 16 | newtype Heap = Heap { heap :: Binds } 17 | 18 | -- * Constraints 19 | 20 | data Constraint = Constraint { cTag :: Tag, cHyp :: Hyp } -- ^ @'t = x@. 21 | 22 | newtype Constraints = Constraints { constraints :: [Constraint] } 23 | 24 | -- * Type-checking monad 25 | 26 | -- | Type-checking environment. 27 | data Env = Env 28 | { envCxt :: Cxt 29 | , envHeap :: Heap 30 | , envConstraints :: Constraints 31 | } 32 | 33 | -- | Type-checking error. 34 | type TCErr = String 35 | 36 | -- | Type-checking monad. 37 | newtype CheckM a = CheckM { checkM :: ReaderT Env (Either TCErr a) } 38 | 39 | -- * Type-checking algorithm 40 | 41 | -- | Look up a hypothesis in the heap. 42 | lookHyp :: Hyp -> CheckM 43 | 44 | whnf :: Term -> CheckM RTerm 45 | 46 | shouldBePi :: Type -> CheckM PiType 47 | 48 | infer :: LTerm -> CheckM Type 49 | infer d = 50 | case d of 51 | LHyp x -> lookHyp x >>= \case 52 | IsHyp a -> return a 53 | IsAlias y -> infer y 54 | App x y_ -> infer x >>= shouldBePi >>= \ (PiType a b) -> do 55 | check y_ a 56 | apply b x 57 | -------------------------------------------------------------------------------- /src-Andreas/Value.hs: -------------------------------------------------------------------------------- 1 | -- | Values are sequent-calculus terms where each intermediate result is named 2 | 3 | module Value where 4 | 5 | import Data.Set (Set) 6 | import qualified Data.Set as Set 7 | 8 | newtype Name = Name { name :: String } -- TODO: Integer 9 | newtype Tag = MkTag { tag :: String } -- TODO: Int 10 | 11 | type Hyp = Name -- ^ Hypothesis variable @x@. 12 | type Conc = Name -- ^ Conclusion variable @y_@. 13 | 14 | -- | Projections from Sigma-type. 15 | data Proj 16 | = L -- ^ Left / first projection. 17 | | R -- ^ Right / second projection. 18 | 19 | data PiSig 20 | = Pi -- ^ Dependent function type. 21 | | Sigma -- ^ Dependent pair type. 22 | 23 | -- | Left-hand side terms aka. elimination/destruction/inferable terms. 24 | data LTerm 25 | = LHyp Hyp -- ^ Hypothesis @x@ (indirection/alias). 26 | | App Hyp Conc -- ^ Application @x y_@. 27 | | Proj Hyp Proj -- ^ Projection @x.p@. 28 | | Ann RTerm Type -- ^ Introduction @c : A@ (this is the cut!). 29 | 30 | data RTerm 31 | = Conc Conc -- ^ Conclusion var @y_@ (indirection/alias). 32 | | Hyp Hyp -- ^ Hypothesis var @x@ 33 | | Tag Tag -- ^ Tag @'t@. 34 | | Pair Conc Conc -- ^ Pair @(y1_, y2_)@. 35 | | Lam Hyp Term -- ^ Lambda-abstr. @\ x -> v@. 36 | | Quant PiSig Conc RTerm -- ^ Quantifier Sigma @(x : Y_) & C@ or Pi @(x : Y_) -> C@. 37 | | Tags Tags -- ^ Tagset @{'t1,...,'tn}@. 38 | | Univ -- ^ Universe @Type@. 39 | 40 | type Type = Term 41 | data Term 42 | = Let Binds Term -- ^ Let binding @let bs in v@. 43 | | Case Hyp Branches -- ^ Case distinction @case x of brs@. 44 | | Core RTerm -- ^ Leaf. 45 | 46 | type Branches = [(Tag,Term)] -- ^ @{ 't1 -> v1; ...; 'tn -> vn }@. 47 | 48 | data Bind 49 | = LBind Hyp LTerm -- ^ @x = d@ 50 | | RBind Conc RTerm -- ^ @y_ = c@ 51 | 52 | type Binds = [Bind] 53 | type Tags = Set Tag 54 | -------------------------------------------------------------------------------- /src/Main.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -fno-warn-orphans #-} 2 | {-# LANGUAGE OverloadedStrings, ExistentialQuantification, RecordWildCards, TupleSections #-} 3 | module Main where 4 | 5 | import Data.Either 6 | {-# LANGUAGE TupleSections #-} 7 | import Options 8 | import TypeCheck 9 | import Control.Monad.Error 10 | import Control.Applicative 11 | import Display 12 | import TCM 13 | import FeInterface 14 | import qualified Micro.Frontend 15 | import qualified Nano.Frontend 16 | import System.FilePath 17 | 18 | 19 | chooseFrontEnd :: FilePath -> FrontEnd 20 | chooseFrontEnd p = case takeExtension p of 21 | ".na" -> Nano.Frontend.fe 22 | ".ma" -> Micro.Frontend.fe 23 | 24 | type Verbosity = Int 25 | 26 | putStrV :: Verbosity -> Doc -> Checker () 27 | putStrV v s = if verb options >= v then liftIO $ putStrLn (render s) else return () 28 | 29 | runFile :: FilePath -> Checker () 30 | runFile f = do 31 | let fe = chooseFrontEnd f 32 | putStrV 1 $ "Processing file:" <+> text f 33 | contents <- liftIO $ readFile f 34 | run fe contents f 35 | 36 | type Checker a = ErrorT Doc IO a 37 | 38 | run :: FrontEnd -> String -> FilePath -> Checker () 39 | run FE{..} s fname = let ts = myLLexer s in case pModule ts of 40 | Left err -> do 41 | putStrV 1 $ "Tokens:" <+> pretty ts 42 | throwError $ text $ fname ++ ": parse failed: " ++ err 43 | Right tree -> do 44 | let Right (rVal,rTyp) = resolveModule tree 45 | putStrV 1 $ "[Resolved value]" $$ pretty rVal 46 | putStrV 1 $ "[Resolved type]" $$ pretty rTyp 47 | let (res,info) = typeCheck rVal rTyp 48 | mapM_ (putStrV 2) info 49 | case res of 50 | Left err -> throwError err 51 | Right _ -> return () 52 | 53 | main :: IO () 54 | main = do 55 | results <- forM (files options) $ \f -> (f,) <$> (runErrorT $ runFile f) 56 | let oks = [f | (f, Right ()) <- results] 57 | errs = [(f,e) | (f, Left e) <- results] 58 | mapM_ (putStrLn . render . snd) errs 59 | putStrLn $ show (length oks) ++ "/" ++ show (length results) ++ " files typecheck." 60 | putStrLn $ "failing: " ++ show (map fst errs) 61 | -------------------------------------------------------------------------------- /report-radanne/poster/tangocolors.sty: -------------------------------------------------------------------------------- 1 | % Defines the tango palette for use with LaTeX. 2 | % 3 | % Copyright 2006 by Patrick Pletscher 4 | % 5 | % This program can be redistributed and/or modified under the terms 6 | % of the GNU Public License, version 2. 7 | 8 | % butter (yellowish) 9 | \definecolor{tabutter}{rgb}{0.98824, 0.91373, 0.30980} % #fce94f 10 | \definecolor{ta2butter}{rgb}{0.92941, 0.83137, 0} % #edd400 11 | \definecolor{ta3butter}{rgb}{0.76863, 0.62745, 0} % #c4a000 12 | 13 | % orange 14 | \definecolor{taorange}{rgb}{0.98824, 0.68627, 0.24314} % #fcaf3e 15 | \definecolor{ta2orange}{rgb}{0.96078, 0.47451, 0} % #f57900 16 | \definecolor{ta3orange}{rgb}{0.80784, 0.36078, 0} % #ce5c00 17 | 18 | % chocolate (brownish) 19 | \definecolor{tachocolate}{rgb}{0.91373, 0.72549, 0.43137} % #e9b96e 20 | \definecolor{ta2chocolate}{rgb}{0.75686, 0.49020, 0.066667} % #c17d11 21 | \definecolor{ta3chocolate}{rgb}{0.56078, 0.34902, 0.0078431} % #8f5902 22 | 23 | % chameleon (greenish) 24 | \definecolor{tachameleon}{rgb}{0.54118, 0.88627, 0.20392} % #8ae234 25 | \definecolor{ta2chameleon}{rgb}{0.45098, 0.82353, 0.086275} % #73d216 26 | \definecolor{ta3chameleon}{rgb}{0.30588, 0.60392, 0.023529} % #4e9a06 27 | 28 | % sky blue 29 | \definecolor{taskyblue}{rgb}{0.44706, 0.56078, 0.81176} % #728fcf 30 | \definecolor{ta2skyblue}{rgb}{0.20392, 0.39608, 0.64314} % #3465a4 31 | \definecolor{ta3skyblue}{rgb}{0.12549, 0.29020, 0.52941} % #204a87 32 | 33 | % plum (violettish) 34 | \definecolor{taplum}{rgb}{0.67843, 0.49804, 0.65882} % #ad7fa8 35 | \definecolor{ta2plum}{rgb}{0.45882, 0.31373, 0.48235} % #75507b 36 | \definecolor{ta3plum}{rgb}{0.36078, 0.20784, 0.4} % #5c3566 37 | 38 | % scarlet red 39 | \definecolor{tascarletred}{rgb}{0.93725, 0.16078, 0.16078} % #ef2929 40 | \definecolor{ta2scarletred}{rgb}{0.8, 0, 0} % #cc0000 41 | \definecolor{ta3scarletred}{rgb}{0.64314, 0, 0} % #a40000 42 | 43 | % aluminium 44 | \definecolor{taaluminium}{rgb}{0.93333, 0.93333, 0.92549} % #eeeeec 45 | \definecolor{ta2aluminium}{rgb}{0.82745, 0.84314, 0.81176} % #d3d7cf 46 | \definecolor{ta3aluminium}{rgb}{0.72941, 0.74118, 0.71373} % #babdb6 47 | 48 | % gray 49 | \definecolor{tagray}{rgb}{0.53333, 0.54118, 0.52157} % #888a85 50 | \definecolor{ta2gray}{rgb}{0.33333, 0.34118, 0.32549} % #555753 51 | \definecolor{ta3gray}{rgb}{0.18039, 0.20392, 0.21176} % #2e3436 52 | -------------------------------------------------------------------------------- /src/Display.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE PackageImports, GADTs, KindSignatures, StandaloneDeriving, EmptyDataDecls, FlexibleInstances, OverloadedStrings, OverlappingInstances #-} 2 | 3 | module Display (Pretty(..), Doc, ($$), ($+$), (<+>), ($$+), ($$++), text, hang, vcat, parensIf, sep, comma, nest, parens, braces, int, 4 | subscriptPretty, superscriptPretty, subscriptShow, render, punctuate) where 5 | 6 | import Prelude hiding (length, reverse) 7 | import Text.PrettyPrint.HughesPJ 8 | import Numeric (showIntAtBase) 9 | import qualified Data.Map as M 10 | class Pretty a where 11 | pretty :: a -> Doc 12 | 13 | instance Pretty x => Pretty [x] where 14 | pretty x = brackets $ sep $ punctuate comma (map pretty x) 15 | 16 | instance Pretty Int where 17 | pretty = int 18 | 19 | instance Pretty Bool where 20 | pretty = text . show 21 | 22 | instance (Pretty a, Pretty b) => Pretty (Either a b) where 23 | pretty (Left a) = "◂ " <> pretty a 24 | pretty (Right a) = "▸ " <> pretty a 25 | 26 | instance (Pretty a, Pretty b) => Pretty (a,b) where 27 | pretty (a,b) = "(" <> pretty a <> "," <> pretty b <> ")" 28 | 29 | instance (Pretty k, Pretty v) => Pretty (M.Map k v) where 30 | pretty m = sep $ punctuate ";" [pretty k <> " ↦ " <> pretty v | (k,v) <- M.toList m] 31 | 32 | instance Pretty a => (Pretty (Maybe a)) where 33 | pretty Nothing = "¿" 34 | pretty (Just x) = "¡" <> pretty x 35 | instance Pretty String where 36 | pretty = text 37 | 38 | scriptPretty :: String -> Int -> Doc 39 | scriptPretty s = text . scriptShow s 40 | 41 | scriptShow :: (Integral a, Show a) => [Char] -> a -> [Char] 42 | scriptShow [] _ = error "scriptShow on empty list" 43 | scriptShow (minus:digits) x = if x < 0 then minus : sho (negate x) else sho x 44 | where sho z = showIntAtBase 10 (\i -> digits !! i) z [] 45 | 46 | superscriptPretty :: Int -> Doc 47 | superscriptPretty = scriptPretty "⁻⁰¹²³⁴⁵⁶⁷⁸⁹" 48 | 49 | subscriptPretty :: Int -> Doc 50 | subscriptPretty = scriptPretty "-₀₁₂₃₄₅₆₇₈₉" 51 | 52 | subscriptShow :: Int -> String 53 | subscriptShow = scriptShow "-₀₁₂₃₄₅₆₇₈₉" 54 | 55 | parensIf :: Bool -> Doc -> Doc 56 | parensIf True = parens 57 | parensIf False = id 58 | 59 | -- default indentation 60 | indentation :: Int 61 | indentation = 2 62 | 63 | -- Usefull hang operators 64 | ($$+) :: Doc -> Doc -> Doc 65 | d $$+ d' = hang d indentation d' 66 | 67 | ($$++) :: Doc -> Doc -> Doc 68 | d $$++ d' = vcat [d, nest indentation d'] 69 | 70 | infixl 9 $$+ 71 | infixl 9 $$++ 72 | -------------------------------------------------------------------------------- /src/TexnAgda.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | module TexnAgda where 3 | 4 | import Terms 5 | import Ident 6 | 7 | import MarXup 8 | import MarXup.Tex 9 | import MarXup.Latex 10 | import MarXup.Latex.Math 11 | import MarXup.DerivationTrees 12 | 13 | import Data.List 14 | import Data.Monoid 15 | 16 | 17 | punctuate :: Monoid c => c -> [c] -> c 18 | punctuate p = mconcat . intersperse p 19 | 20 | sep :: [TeX] -> TeX 21 | sep = punctuate " " 22 | 23 | sepp :: [TeX] -> TeX 24 | sepp = punctuate space 25 | 26 | keyword :: String -> TeX 27 | keyword = math . mathsf . tex 28 | 29 | (<+>) :: TeX -> TeX -> TeX 30 | a <+> b = a <> " " <> b 31 | 32 | (<++>) :: TeX -> TeX -> TeX 33 | a <++> b = a <> space <> b 34 | 35 | symi :: String -> TeX -> TeX -> TeX 36 | symi s a b = a <> keyword s <> b 37 | 38 | (<=>) :: TeX -> TeX -> TeX 39 | (<=>) = symi "=" 40 | 41 | () :: TeX -> TeX -> TeX 42 | () = symi ";" 43 | 44 | (<:>) :: TeX -> TeX -> TeX 45 | (<:>) = symi ":" 46 | 47 | () :: TeX -> TeX -> TeX 48 | a b = a <> newline <> b 49 | 50 | (→) :: TeX -> TeX -> TeX 51 | a → b = a <+> cmd0 "to" <+> b 52 | 53 | (×) :: TeX -> TeX -> TeX 54 | a × b = a <+> cmd0 "cross" <+> b 55 | 56 | texTerm :: Term Id Id -> TeX 57 | texTerm term = 58 | case term of 59 | Destr x v t -> texHyp x <=> texDestr v texTerm t 60 | Case x brs -> 61 | keyword "case" <+> texHyp x <+> keyword "of" <+> 62 | brackets (sep $ map texBranch brs) 63 | Constr x c t -> texConc x <=> texConstr c texTerm t 64 | Concl x -> texConc x 65 | 66 | 67 | texBranch :: Branch Id Id -> TeX 68 | texBranch (Br tag t) = 69 | textual tag → texTerm t <+> keyword "." 70 | 71 | texDestr :: Destr Id -> TeX 72 | texDestr d = 73 | case d of 74 | App f x -> texHyp f <+> texConc x 75 | Cut t ty -> texConc t <+> keyword ":" <+> texConc ty 76 | 77 | texConstr :: Constr Id Id -> TeX 78 | texConstr constr = 79 | case constr of 80 | Hyp x -> texHyp x 81 | Lam x t -> keyword "λ" <> texHyp x <> keyword "." <> texTerm t 82 | Rec x t -> keyword "rec" <+> texHyp x <> keyword "." <> texTerm t 83 | Pi x tyA tyB -> paren (texHyp x <:> texConc tyA) → texTerm tyB 84 | Sigma x tyA tyB -> paren (texHyp x <:> texConc tyA) × texTerm tyB 85 | Pair x y -> paren (texConc x <+> keyword "," <+> texConc y) 86 | Tag t -> textual t 87 | Fin ts -> brackets (punctuate "; " $ map textual ts) 88 | Universe i -> cmd0 "star" <> textual "_" <> (textual $ show i) 89 | 90 | texHyp :: Id -> TeX 91 | texHyp x = textual $ show x 92 | 93 | texConc :: Conc Id -> TeX 94 | texConc (Conc x) = cmd "overline" (textual $ show x) 95 | -------------------------------------------------------------------------------- /src/TCM.hs: -------------------------------------------------------------------------------- 1 | {-#LANGUAGE NamedFieldPuns, RecordWildCards, GeneralizedNewtypeDeriving, GADTs, ScopedTypeVariables, OverloadedStrings, TypeSynonymInstances, FlexibleInstances, FlexibleContexts, MultiParamTypeClasses #-} 2 | module TCM where 3 | 4 | import Terms 5 | import Ident 6 | import Fresh 7 | import Display 8 | import Control.Monad.Reader 9 | import Control.Monad.Writer 10 | import Control.Monad.RWS 11 | import Control.Monad.Error 12 | import Control.Applicative 13 | import Data.Map (Map) 14 | 15 | type Term' = Term Id Id 16 | type Constr' = Constr Id Id 17 | type Destr' = Destr Id 18 | type Heap' = Heap Id Id 19 | type Branch' = Branch Id Id 20 | 21 | 22 | type DeCo r = Either (Destr r) (Conc r) 23 | 24 | data Heap n r = Heap { dbgDepth :: Int 25 | , heapConstr :: Map (Conc n) (Constr n r) 26 | , heapRevConstr :: Map (Constr n r) (Conc n) 27 | , heapDestr :: Map (Hyp n) (DeCo r) 28 | , heapRevDestr :: Map (Destr r) (Hyp n) 29 | , heapAlias :: Map r r 30 | , context :: Map n (Variance, Conc r) -- ^ types 31 | } 32 | 33 | instance (Pretty r, Pretty n) => Pretty (Heap n r) where 34 | pretty (Heap {..}) = sep [hang lab 2 v 35 | | (lab,v) <- [("constr" ,pretty heapConstr) 36 | ,("revconstr" ,pretty heapRevConstr) 37 | ,("destr" ,pretty heapDestr) 38 | ,("revdestr" ,pretty heapRevDestr) 39 | ,("alias" ,pretty heapAlias) 40 | ,("context",pretty context)] 41 | ] 42 | 43 | newtype TC a = TC {fromTC :: ErrorT Doc (RWST Heap' [Doc] () FreshM) a} 44 | deriving (Functor, Applicative, Monad, MonadReader Heap', MonadWriter [Doc], MonadError Doc) 45 | 46 | instance Error Doc where 47 | noMsg = "unknown error" 48 | strMsg = text 49 | 50 | runTC :: Unique -> Heap' -> TC a -> (Either Doc a,[Doc]) 51 | runTC u h0 (TC x) = runFreshMFromUnique u $ evalRWST (runErrorT x) h0 () 52 | 53 | liftTC :: FreshM a -> TC a 54 | liftTC x = TC $ lift $ lift x 55 | 56 | substTC xx a_ bb = liftTC (subst xx a_ bb) 57 | 58 | substByDestr :: (r~Id,n~Id) => Hyp r -> Destr r -> Term n r -> TC (Term n r) 59 | substByDestr h d t = do 60 | x' <- liftTC $ refreshId h 61 | t' <- substTC h x' t 62 | return $ Destr x' d t' 63 | 64 | terr :: Doc -> TC a 65 | terr msg = do 66 | h <- ask 67 | throwError $ sep [hang "heap" 2 (pretty h), msg] 68 | 69 | report :: Doc -> TC () 70 | report msg = do 71 | lvl <- dbgDepth <$> ask 72 | tell [text (replicate lvl ' ') <> msg] 73 | -------------------------------------------------------------------------------- /Radanne-Internship.md: -------------------------------------------------------------------------------- 1 | An ongoing quest in the mathematical logic community is to find 2 | suitable logic foundation for mathematics. One of the strong 3 | candidates is the intuitionistic type-theory of Per Martin-Löf (MLTT). 4 | A natural question then is: on what foundations does MLTT rest? On of 5 | the answers is that it corresponds to an intuitive notion of 6 | computation. A related, but different type of answer is to produce a 7 | system which checks proofs written in MLTT. (By the Curry-Howard 8 | isomorphism, MLTT corresponds to a programming language with dependent 9 | types, and the proof checker corresponds to a type-checker). It is 10 | then important that the type checker is as simple as possible, in 11 | order to be sure of its correctness. 12 | 13 | This works falls into a line of research which attempts to design the 14 | smallest type-theory equivalent to MLTT, together with type-checkers 15 | for them. Achievements include Agda Light (Norell), Mini TT (Coquand 16 | et al.), or PiSigma (Altenkirch et al.) In our view, PiSigma is 17 | especially attractive because it rests upon only three simple 18 | primitive constructions: finite types (Fin), dependent products (Pi) 19 | and dependent sums (Sigma). Another interesting aspect of PiSigma is 20 | that the eliminators for Sigma and Fin are to be written in sequent 21 | calculus style. However, because Pi is treated differently, the 22 | languages suffers serious problems (lack of subject reduction). 23 | 24 | Traditionally, implementations of minimalistic type-theories are based 25 | on natural deduction. Using a sequent-calculus style is interesting, 26 | because it corresponds to naming every intermediate result (as it is 27 | usual in intermediate representations of traditional programming 28 | languages). Naming intermediate results means that they can be shared 29 | in subsequent parts of the term, which potentially solves two 30 | problems: 31 | 32 | 1. The size of certain terms becomes so large that type-checking them 33 | becomes prohibitively expensive. 34 | 35 | 2. Even for moderately-sized programs, the normalized output given by 36 | the type-checker is to big to be verified by users. 37 | 38 | The goal of this project is to solve the above issues by developing a 39 | minimalistic type-theory in sequent calculus style, and implement a 40 | type-checker for it. 41 | 42 | With this goal in mind, we will: 43 | 44 | - Develop a number of examples to test our theory. 45 | - Design and document fragments of a type-theory in sequent calculus style. 46 | - Implement a type-checker in the Haskell programming language. 47 | - Verify that the examples we have designed work with our implementation. 48 | - If time allows, assess if the theory is suitable as a core, by 49 | extending it with experimental features. 50 | -------------------------------------------------------------------------------- /src/Fresh.hs: -------------------------------------------------------------------------------- 1 | module Fresh where 2 | 3 | import Control.Applicative hiding (empty) 4 | import Control.Monad.State 5 | import Control.Arrow ((&&&)) 6 | import Ident 7 | import Data.Bifoldable 8 | 9 | import Data.Bitraversable 10 | import Data.Bifunctor 11 | import Data.Maybe (fromMaybe) 12 | fromNames :: Bitraversable p => p Name Name -> p Id Id 13 | fromNames = fromNamesFlag True 14 | 15 | fromNamesLiberal :: Bitraversable p => p Name Name -> p Id Id 16 | fromNamesLiberal = fromNamesFlag False 17 | 18 | fromNamesFlag :: Bitraversable p => Bool -> p Name Name -> p Id Id 19 | fromNamesFlag safe x = runFreshM (evalStateT (bitraverse fresh_name look x) []) 20 | where 21 | fresh_name s = do 22 | x <- lift fresh 23 | let r = Id s x Nothing 24 | modify ((s,r):) 25 | return r 26 | look s = do 27 | m <- get 28 | case lookup s m of 29 | Nothing -> 30 | if safe 31 | then error $ "Unknown identifier : " ++ s 32 | else fresh_name s 33 | Just x -> return x 34 | 35 | names :: Bifoldable t => t name ref -> [name] 36 | names = biconcatMap (\ n -> [n]) (\ _r -> []) 37 | 38 | refreshBinders :: (Bifunctor t,Bifoldable t) => t Id Id -> FreshM (t Id Id) 39 | refreshBinders s = do 40 | freshened <- sequence [ (,) b <$> refreshId b | b <- names s ] 41 | let lk x = fromMaybe x (lookup x freshened) 42 | return $ bimap lk lk $ s 43 | 44 | rSubst :: (Bifunctor t, Eq r) => r -> r -> t n r -> t n r 45 | rSubst r0 r1 = bimap id (\r -> if r == r0 then r1 else r) 46 | 47 | subst :: (Bifoldable t, Bifunctor t) => Id -> Id -> t Id Id -> FreshM (t Id Id) 48 | subst x r t = rSubst x r <$> refreshBinders t 49 | 50 | -- | New fresh Id 51 | fresh :: FreshM Unique 52 | fresh = state (Unique &&& succ) 53 | 54 | 55 | freshId :: FreshM Id 56 | freshId = do u <- fresh 57 | return $ mkId "tmp" u 58 | 59 | freshFrom :: Name -> FreshM Id 60 | freshFrom n = do u <- fresh 61 | return $ mkId n u 62 | 63 | freshIds :: Int -> FreshM [Id] 64 | freshIds n = replicateM n freshId 65 | 66 | refreshId :: Id -> FreshM Id 67 | refreshId x = do 68 | u <- fresh 69 | return x { id_unique = u } 70 | 71 | infixl 4 <.> 72 | -- | Applies a pure value in an applicative computation 73 | (<.>) :: Applicative f => f (a -> b) -> a -> f b 74 | m <.> x = m <*> pure x 75 | 76 | type FreshM = State Int 77 | 78 | runFreshM :: FreshM a -> a 79 | runFreshM m = evalState m 0 80 | 81 | runFreshMFromUnique :: Unique -> FreshM a -> a 82 | runFreshMFromUnique (Unique n) m = evalState m n 83 | 84 | runFreshMFrom :: Bifoldable t => t Id Id -> FreshM a -> a 85 | runFreshMFrom = runFreshMFromUnique . nextUnique 86 | 87 | nextUnique :: Bifoldable t => t Id Id -> Unique 88 | nextUnique = succ . maximum . map Ident.id_unique . biList 89 | 90 | -------------------------------------------------------------------------------- /src/Nano/Resolve.hs: -------------------------------------------------------------------------------- 1 | {-#LANGUAGE NamedFieldPuns, RecordWildCards, 2 | GeneralizedNewtypeDeriving, GADTs, ScopedTypeVariables, RankNTypes, 3 | DeriveFunctor #-} 4 | 5 | module Nano.Resolve where 6 | 7 | import Terms 8 | import qualified Nano.Abs as A 9 | import Fresh 10 | import Ident 11 | import qualified Data.Map as M 12 | import Data.Map (Map) 13 | import Control.Monad.Reader 14 | import Control.Applicative 15 | import TCM 16 | import RM 17 | 18 | resolveVar :: (Lens Env (Map String Id)) -> A.Var -> R Id 19 | resolveVar l (A.Var (_,x)) = do 20 | env <- ask 21 | let v = M.lookup x . view l $ env 22 | case v of 23 | Just i -> return i 24 | Nothing -> error $ 25 | "env = " ++ show env ++ "\n" ++ 26 | "unknown identifier: " ++ x 27 | 28 | resolveConc :: (Lens Env (Map String Id)) -> A.Var -> R (Conc Id) 29 | resolveConc l x = Conc <$> resolveVar l x 30 | 31 | insert :: (Lens Env (Map String Id)) -> A.Var -> (Id -> R a) -> R a 32 | insert l (A.Var (_,x)) k = do 33 | v <- R $ lift $ freshFrom x 34 | local (upd l $ M.insert x v) (k v) 35 | 36 | 37 | resolve :: A.Module -> Either String (Term',Term') 38 | resolve t = Right $ runFreshM $ runReaderT (fromR $ resolveModule t) emptyEnv 39 | 40 | resolveModule :: A.Module -> R (Term',Term') 41 | resolveModule (A.Module t1 t2) = (,) <$> resolveTerm t1 <*> resolveTerm t2 42 | 43 | resolveTerm :: A.Term -> R (Term Id Id) 44 | resolveTerm (A.Constr x c t) = do 45 | c' <- resolveConstr c 46 | insert con x $ \x' -> Constr (Conc x') c' <$> resolveTerm t 47 | resolveTerm (A.Destr x d t) = do 48 | d' <- resolveDestr d 49 | insert hyp x $ \x' -> Destr x' d' <$> resolveTerm t 50 | resolveTerm (A.Case x bs) = Case <$> resolveVar hyp x <*> (forM bs $ \(A.Br tag t) -> Br <$> resolveTag tag <*> resolveTerm t) 51 | resolveTerm (A.Concl x) = Concl <$> resolveConc con x 52 | 53 | resolveDestr :: A.Destr -> R (Destr Id) 54 | resolveDestr (A.Appl f x) = App <$> resolveVar hyp f <*> resolveConc con x 55 | -- resolveDestr (A.Proj p f) = Proj <$> resolveVar hyp p <*> pure (resolveProj f) 56 | resolveDestr (A.Cut x t) = Cut <$> resolveConc con x <*> resolveConc con t 57 | 58 | resolveConstr :: A.Constr -> R (Constr Id Id) 59 | resolveConstr (A.Hyp x) = Hyp <$> resolveVar hyp x 60 | resolveConstr (A.Lam x t) = insert hyp x $ \x' -> 61 | Lam x' <$> resolveTerm t 62 | resolveConstr (A.Pi x c t) = insert hyp x $ \x' -> 63 | Pi Invar x' <$> resolveConc con c <*> resolveTerm t 64 | resolveConstr (A.Pair a b) = Pair <$> resolveConc con a <*> resolveConc con b 65 | resolveConstr (A.Sigma x c t) = insert hyp x $ \x' -> 66 | Sigma Invar x' <$> resolveConc con c <*> resolveTerm t 67 | resolveConstr (A.Tag t) = Tag <$> resolveTag t 68 | resolveConstr (A.Fin ts) = Fin <$> mapM resolveTag ts 69 | resolveConstr (A.Univ (A.Nat (_,n))) = Universe <$> pure (read n) 70 | 71 | 72 | resolveTag (A.T (A.Var (_,x))) = pure x 73 | -------------------------------------------------------------------------------- /examples/040-Integers.ma: -------------------------------------------------------------------------------- 1 | TERM 2 | 3 | Bool := { 'true , 'false } ; 4 | 5 | Eq = (\A -> \x -> \y -> (P : A -> *0) -> P x -> P y) 6 | : (A : *0) -> A -> A -> *1; 7 | 8 | refl = (\A -> \x -> \P -> \p -> p) 9 | : (A : *0) -> (x:A) -> Eq A x x; 10 | 11 | T = Eq Bool 'true 12 | : Bool -> *1 ; 13 | 14 | t = refl Bool 'true 15 | : T 'true ; 16 | 17 | Bit := { 'zero , 'one } ; 18 | incr = 19 | \n -> case n of { 20 | 'zero -> ('one, 'zero). 21 | 'one -> ('zero, 'one) 22 | } 23 | : Bit -> (b : Bit) * Bit ; 24 | 25 | -- return (number, carry) 26 | add = (\n1 -> \n2 -> case n1 of { 27 | 'zero -> x := (n2 , 'zero ) ; x . 28 | 'one -> incr n2 29 | }) 30 | : Bit -> Bit -> (b : Bit) * Bit ; 31 | 32 | add_carry = 33 | (\n1 -> \n2 -> \n3 -> ( 34 | (n', carry1) = split (add n1 n2) ; 35 | (n'', carry2) = split (add n' n3) ; 36 | (carry, foo ) = split (add carry1 carry2) ; 37 | x := ( n'' , carry ) ; x)) 38 | : Bit -> Bit -> Bit -> (b:Bit) * Bit ; 39 | 40 | 41 | mult = (\n1 -> \n2 -> case n1 of { 42 | 'zero -> 'zero. 43 | 'one -> case n2 of { 44 | 'zero -> 'zero. 45 | 'one -> 'one 46 | }}) 47 | : Bit -> Bit -> Bit ; 48 | 49 | stuff = add 'zero 'one : (b:Bit) * Bit ; 50 | 51 | equal = 52 | (\n1 -> \n2 -> case n1 of { 53 | 'zero -> case n2 of { 54 | 'zero -> 'true. 55 | 'one -> 'false 56 | }. 57 | 'one -> case n2 of { 58 | 'zero -> 'false. 59 | 'one -> 'true 60 | } 61 | }) 62 | : (n1 : Bit) -> (n2 : Bit) -> Bool ; 63 | 64 | 65 | ( n , c ) = split (add 'one 'one) ; 66 | stuff = t : T (equal 'zero n ) ; 67 | 68 | -- low bits on the left. 69 | Bit5 := (b:Bit) * ((b:Bit) * ((b:Bit) * ((b:Bit) * Bit))) ; 70 | 71 | bit5 = (\n4 -> \n3 -> \n2 -> \n1 -> \n0 -> 72 | (x := (n0,(n1,(n2,(n3,n4)))) ; x) ) 73 | : Bit -> Bit -> Bit -> Bit -> Bit -> Bit5 ; 74 | 75 | -- add5 = 76 | -- (\n1 -> \n2 -> ( 77 | -- (b0, carry0) = split (add n1.1 n2.1 ); 78 | -- (b1, carry1) = split (add_carry n1.2.1 n2.2.1 carry0 ); 79 | -- (b2, carry2) = split (add_carry n1.2.2.1 n2.2.2.1 carry1 ); 80 | -- (b3, carry3) = split (add_carry n1.2.2.2.1 n2.2.2.2.1 carry2 ); 81 | -- (b4, carry4) = split (add_carry n1.2.2.2.2 n2.2.2.2.2 carry3 ); 82 | 83 | -- (b0, (b1, (b2, (b3, b4)))) 84 | 85 | -- )) : Bit5 -> Bit5 -> Bit5 ; 86 | 87 | 88 | -- {- 89 | -- 01101 90 | -- + 00111 91 | -- = 10100 92 | -- -} 93 | 94 | -- int1 = (bit5 'zero 'one 'one 'zero 'one) : Bit5 ; 95 | -- int2 = (bit5 'zero 'zero 'one 'one 'one) : Bit5 ; 96 | -- result = (bit5 'one 'zero 'one 'zero 'zero) : Bit5 ; 97 | 98 | -- stuff = 99 | -- refl Bit5 result : 100 | -- (Eq Bit5 (add5 int1 int2) result) ; 101 | 102 | *0 103 | TYPE 104 | *1 -------------------------------------------------------------------------------- /Outline-Draft.md: -------------------------------------------------------------------------------- 1 | % LLTT: A Sequent-Calculus Presentation of Type-Theory. 2 | 3 | A. The problem: 4 | 5 | - Type-Theory is Great! 6 | - Two problems: 7 | 8 | 1. It’s a bit slow. Type inference generates big terms (recall the 9 | canonical ML program). The intermediate representation does not 10 | support sharing. Side note: it’s not good for humans to read normal 11 | forms like this either! 12 | 13 | 2. It’s a bit annoying to use. The “case” function does not quite 14 | work. McBride invented “the view from the left”, but this departs 15 | significantly from the tradition of “terms” as text. In practice, 16 | it makes the system awkward to use, as “with” is translated to an 17 | extra abstraction. (This means that normal forms using with become 18 | big!) The types of functions using with can’t even be expressed by 19 | the user. More quirks of “with”: needs for extra constructs on top 20 | of it (inspect idiom); and some programs still do not typecheck. 21 | Here, we instead propose to properly implement the “case” construction. 22 | 23 | THE ROOT OF THE PROBLEM: naming intermediate results is essential both 24 | for humans and computers to manipulate typed terms. 25 | 26 | Our solution: Make a implementation of a core type-theory based on 27 | sequent-calulus rather than natural deduction. 28 | 29 | Technical contributions. (List) 30 | 31 | B. The case of Case. 32 | 33 | - desired rule for case 34 | 35 | Γ[tt/x] ⊢ e1 : C[tt/x] 36 | Γ[ff/x] ⊢ e2 : C[ff/x] 37 | ----------------------------------------- 38 | Γ |- case x of {tt -> e1; ff -> e2} : C 39 | 40 | - remarks that it works only if the scrutinee is a variable 41 | - solution: name every intermediate result 42 | - hence technically: we want a sequent calculus. 43 | 44 | C. Interlude: sequent calculi vs. natural deduction. 45 | - what is s.c.? what is n.d.? 46 | - It is conjectured that every natural deduction system can be 47 | presented in s.c. form. (paper) 48 | - corresponds to CPS (see eg. stg machine; Appel’s book: compiling 49 | with continuations) or even SSA 50 | - Most existing functional compilers either use a s.c.-based 51 | intermediate language, or the equivalent CPS presentation. In sum, 52 | we apply “old” functional language compiler technology to dependent 53 | types. 54 | 55 | D. Our calculus 56 | - normal forms (Case done. discuss now Sigma and Pi; show full set of rules) 57 | - example programs and their normal forms 58 | 59 | E. Implementation 60 | 61 | F. Analysis 62 | - Model back to natural deduction? 63 | 64 | Sound: Γ;h ⊢ t:A ⇒ ⟦Γ⟧h ⊢ ⟦t⟧h : ⟦A⟧h 65 | 66 | (the heap h gives rise to a substitution which is applied everywhere) 67 | Maybe it is enough to show that the model preserves substitution (in 68 | normal forms.) 69 | 70 | Complete: Γ ⊢ t:A ⇒ Γ;empty ⊢ t:A 71 | 72 | (using a trivial embedding which names the intermediate results.) 73 | 74 | ⟦t[s/x]⟧ = ⟦t⟧[⟦s⟧/x] 75 | - Alt: 76 | - subject reduction (adding a binding to an env. does not destroy its 77 | structure) 78 | - normalisation (substitution takes a finite amount of time) 79 | 80 | G. Related work 81 | 82 | F. Related work 83 | 84 | 85 | http://syntaxexclamation.wordpress.com/2013/06/17/new-draft-proofs-upside-down/ 86 | 87 | Une autre reference reliee a Hugo Herbelin 88 | 89 | https://www.google.se/url?sa=t&rct=j&q=&esrc=s&source=web&cd=1&ved=0CC0QFjAA&url=http%3A%2F%2Fw3.math.uminho.pt%2F~jes%2FHerbelinProgramme(camera-ready).pdf&ei=xO06Uum5OKmg4gT2o4GgBg&usg=AFQjCNGbouAhWtjVNzmJ5I9F96bama8B8Q&sig2=q23cVW14I14t5En5ZdlVdA&bvm=bv.52288139,d.bGE 90 | 91 | H. Discussion and Conclusion 92 | 93 | 94 | -------------------------------------------------------------------------------- /src/Terms.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE GADTs, DeriveFunctor, TemplateHaskell, OverloadedStrings, RecordWildCards #-} 2 | 3 | 4 | module Terms where 5 | 6 | import Data.Bitraversable 7 | import Data.Bifoldable 8 | import Data.Bifunctor 9 | import Data.Generics.Genifunctors 10 | import Display 11 | import Data.Monoid 12 | 13 | type Tag = String 14 | 15 | -- | Hypotheses variables. 16 | type Hyp a = a 17 | 18 | -- | Conclusion variables. 19 | newtype Conc a = Conc { conc :: a } 20 | deriving (Functor, Eq, Ord) 21 | 22 | instance Show a => Show (Conc a) where 23 | show (Conc x) = "_" ++ show x 24 | 25 | data Branch n r = Br Tag (Term n r) 26 | deriving (Eq, Ord, Functor) 27 | 28 | instance Bifoldable Term where bifoldMap = bifoldMapDefault 29 | instance Bifunctor Term where bimap = bimapDefault 30 | instance Bitraversable Term where bitraverse = $(genTraverse ''Term) 31 | 32 | data Variance = 33 | Covar 34 | | Contravar 35 | | Invar 36 | deriving (Eq) 37 | 38 | instance Pretty Variance where 39 | pretty Covar = "₊" 40 | pretty Contravar = "₋" 41 | pretty Invar = "₌" 42 | 43 | instance Ord Variance where 44 | Covar <= Covar = True 45 | Covar <= Invar = True 46 | Invar <= Invar = True 47 | Contravar <= Invar = True 48 | Contravar <= Contravar = True 49 | _ <= _ = False 50 | 51 | data Term n r where 52 | Destr :: Hyp n -> Destr r -> Term n r -> Term n r 53 | Split :: Hyp n -> Hyp n -> Hyp r -> Term n r -> Term n r 54 | Case :: Hyp r -> [Branch n r] -> Term n r 55 | Constr :: Conc n -> Constr n r -> Term n r -> Term n r 56 | Concl :: Conc r -> Term n r -- ^ Conclude 57 | deriving (Eq, Ord, Functor) 58 | 59 | data Destr r where 60 | App :: Hyp r -> Conc r -> Destr r 61 | Cut :: Conc r -> Conc r {-^ the type-} -> Destr r 62 | deriving (Eq, Ord, Functor) 63 | 64 | data Constr n r where 65 | Hyp :: Hyp r -> Constr n r 66 | Rec :: Hyp n -> Term n r -> Constr n r 67 | Lam :: Hyp n -> Term n r -> Constr n r 68 | Pi :: Variance -> Hyp n -> Conc r -> Term n r -> Constr n r 69 | Sigma :: Variance -> Hyp n -> Conc r -> Term n r -> Constr n r 70 | Pair :: Conc r -> Conc r -> Constr n r 71 | Tag :: Tag -> Constr n r 72 | Fin :: [Tag] -> Constr n r 73 | Universe :: Int -> Constr n r 74 | deriving (Eq, Ord, Functor) 75 | 76 | instance (Pretty r) => Pretty (Conc r) where 77 | pretty (Conc x) = text "_" <> pretty x 78 | 79 | instance (Pretty r, Pretty n) => Pretty (Term n r) where 80 | pretty (Destr x v t) = pretty x <+> "=" $$+ pretty v <+> ";" $$ pretty t 81 | pretty (Split x y z t) = "(" <> pretty x <> "," <> pretty y <> ") = " <> pretty z <> ";" $$ pretty t 82 | pretty (Constr x v t) = pretty x <+> "=" $$+ pretty v <+> ";" $$ pretty t 83 | pretty (Case x bs) = 84 | "case " <> pretty x <> " of" $$+ 85 | (braces $ sep $ punctuate "." $ map pretty bs) 86 | pretty (Concl x) = pretty x 87 | 88 | instance (Pretty r, Pretty n) => Pretty (Branch n r) where 89 | pretty (Br tag t) = "'" <> text tag <> "->" $$+ pretty t 90 | 91 | instance Pretty r => Pretty (Destr r) where 92 | -- pretty (Tag' v) = "'" <> text v 93 | pretty (App f x) = pretty f <> " " <> pretty x 94 | pretty (Cut x t) = pretty x <+> ":" <+> pretty t 95 | 96 | instance (Pretty r, Pretty n) => Pretty (Constr n r) where 97 | pretty (Hyp h) = pretty h 98 | pretty (Rec x b) = ("rec " <> pretty x <> " ->") $$+ (pretty b) 99 | pretty (Lam x b) = ("\\" <> pretty x <> " ->") $$+ (pretty b) 100 | pretty (Pi v x t b) = 101 | (parens (pretty x <+> (":" <> pretty v) <+> pretty t) <> " ->") 102 | $$+ (pretty b) 103 | pretty (Sigma v x t b) = 104 | (parens (pretty x <+> (":" <> pretty v) <+> pretty t) <> " ×") 105 | $$+ (pretty b) 106 | pretty (Pair a b) = parens $ pretty a <> "," <> pretty b 107 | pretty (Tag t) = "'" <> text t 108 | pretty (Fin ts) = braces $ sep $ punctuate "," $ map text ts 109 | pretty (Universe x) = "*" <> subscriptPretty x 110 | -------------------------------------------------------------------------------- /src/Eq.hs: -------------------------------------------------------------------------------- 1 | {-#LANGUAGE NamedFieldPuns, RecordWildCards, GeneralizedNewtypeDeriving, GADTs, ScopedTypeVariables, OverloadedStrings, PatternGuards #-} 2 | module Eq (isSubTypeOf) where 3 | import Terms 4 | import qualified Data.Map as M 5 | import Data.Monoid 6 | import Control.Monad.RWS 7 | import Control.Applicative 8 | import Ident 9 | import Display 10 | import TCM 11 | import Heap 12 | import Eval 13 | import Fresh (freshId, refreshId) 14 | 15 | -- | Direction of the subtyping. Give the "bigger" part. 16 | data Dir = L | E | R 17 | 18 | instance Pretty Dir where 19 | pretty R = "≤" 20 | pretty E = "=" 21 | pretty L = "≥" 22 | 23 | inv L = R 24 | inv E = E 25 | inv R = L 26 | 27 | subCompare R = (<=) 28 | subCompare E = (==) 29 | subCompare L = (>=) 30 | 31 | 32 | 33 | x <&&> y = do 34 | rx <- x 35 | if rx then y else return False 36 | 37 | isSubTypeOf :: (r~Id) => (Variance, Conc r) -> (Variance, Conc r) -> TC Bool 38 | isSubTypeOf (v1,t1) (v2,t2) = 39 | return (subCompare R v1 v2) <&&> testConc R t1 t2 40 | 41 | 42 | testTerm :: (r~Id,n~Id) => Dir -> Term n r -> Term n r -> TC Bool 43 | testTerm d t1 t2 = onConcl t1 $ \c1 -> onConcl t2 $ \c2 -> testConc d c1 c2 44 | 45 | testConc :: (r~Id,n~Id) => Dir -> Conc r -> Conc r -> TC Bool 46 | testConc d x_1 x_2 47 | | x_1 == x_2 = return True -- optimisation, so equal structures are not deeply traversed. 48 | | otherwise = dbgTest "Conc" d x_1 x_2 $ hnf x_1 $ \c1 -> hnf x_2 $ \c2 -> testConstr' d c1 c2 49 | 50 | dbgTest msg d x y k = do 51 | report $ "Testing" <+> msg <+> ":" <+> pretty x <+> pretty d <+> pretty y 52 | r <- enter k 53 | report $ "Result =" <+> pretty r 54 | return r 55 | 56 | testConstr' d c1 c2 = dbgTest "Construction" d c1 c2 $ do 57 | testConstr d c1 c2 58 | 59 | 60 | testConstr :: (r~Id,n~Id) => Dir -> Constr n r -> Constr n r -> TC Bool 61 | testConstr d (Hyp a1) (Hyp a2) = testHyp d a1 a2 62 | testConstr d (Lam x1 t1) (Lam x2 t2) = local (addAlias' x1 x2) $ testTerm d t1 t2 63 | testConstr d (Pair a1 b1)(Pair a2 b2) = testConc d a1 a2 <&&> testConc d b1 b2 64 | testConstr d (Pi v1 x1 a1 t1) (Pi v2 x2 a2 t2) | subCompare d v1 v2 = 65 | testConc (inv d) a1 a2 <&&> (local (addAlias' x1 x2) $ testTerm d t1 t2) 66 | testConstr d (Sigma v1 x1 a1 t1) (Sigma v2 x2 a2 t2) | subCompare d v1 v2 = 67 | testConc E a1 a2 <&&> (local (addAlias' x1 x2) $ testTerm d t1 t2) 68 | -- The variance here deserve discussion. 69 | testConstr _ (Tag t1)(Tag t2) = return $ t1 == t2 70 | testConstr _ (Fin ts1)(Fin ts2) = return $ ts1 == ts2 71 | testConstr d (Universe x1)(Universe x2) = 72 | return $ subCompare d x1 x2 73 | testConstr d (Rec r1 t1)(Rec r2 t2) = local (addAlias' r1 r2) $ testTerm d t1 t2 -- note that we don't unfold here! 74 | 75 | -- handling eta expansion 76 | testConstr d (Lam x tl) (Hyp y) = etaLam d x tl y 77 | testConstr d (Hyp y) (Lam x tl) = etaLam d x tl y 78 | 79 | testConstr d (Pair x1 x2) (Hyp y) = etaPair d x1 x2 y 80 | testConstr d (Hyp y) (Pair x1 x2) = etaPair d x1 x2 y 81 | 82 | testConstr _ _ _ = return False 83 | 84 | etaLam d x tl y = do 85 | x' <- Conc <$> liftTC (refreshId x) 86 | z <- liftTC freshId 87 | z' <- Conc <$> liftTC (refreshId z) 88 | local (addConstr' x' (Hyp x)) $ 89 | local (addConstr' z' (Hyp z)) $ 90 | normalizeAndAddDestr z (App y x') $ 91 | testTerm d tl (Concl z') 92 | 93 | etaPair d x1 x2 y = do 94 | z1 <- liftTC freshId 95 | z1' <- Conc <$> liftTC (refreshId z1) 96 | z2 <- liftTC freshId 97 | z2' <- Conc <$> liftTC (refreshId z2) 98 | local (addConstr' z1' (Hyp z1)) $ 99 | local (addConstr' z2' (Hyp z2)) $ 100 | addSplit z1 z2 y $ 101 | ( testConc d x1 z1' <&&> testConc d x2 z2' ) 102 | 103 | 104 | testHyp :: Dir -> Hyp Id -> Hyp Id -> TC Bool 105 | testHyp d a1 a2 = dbgTest "Hyp" d a1 a2 $ do 106 | h1 <- aliasOf a1 107 | h2 <- aliasOf a2 108 | md1 <- lookDestr h1 109 | md2 <- lookDestr h2 110 | if h1 == h2 then return True 111 | else case (md1,md2) of 112 | (Just (Left d1), Just (Left d2)) -> dbgTest "App" d d1 d2 $ testDestr d d1 d2 113 | -- we don't have to care about the 'right' case here: if the 114 | -- hyp were evaluated, then the hnf reduction would have taken 115 | -- care of further evaluation before reaching this point. 116 | _ -> return False 117 | 118 | lookDestr x = do 119 | hC <- heapDestr <$> ask 120 | return $ M.lookup x hC 121 | 122 | testDestr d (App f1 a1) (App f2 a2) = testHyp d f1 f2 <&&> testConc d a1 a2 123 | testDestr _ _ _ = return False 124 | -------------------------------------------------------------------------------- /src/Eval.hs: -------------------------------------------------------------------------------- 1 | {-#LANGUAGE NamedFieldPuns, RecordWildCards, GeneralizedNewtypeDeriving, GADTs, ScopedTypeVariables, OverloadedStrings, PatternGuards #-} 2 | module Eval(hnf, onConcl, addTag, addSplit, unfoldRec,hnfUnfoldRec, normalizeAndAddDestr) where 3 | 4 | import qualified Data.Map as M 5 | import Data.Monoid 6 | import Control.Monad.RWS 7 | import Control.Applicative 8 | 9 | import Terms 10 | import Ident 11 | import Display 12 | import TCM 13 | import Heap 14 | import Fresh (refreshId,freshFrom) 15 | 16 | unfoldRec :: Monoid b => Conc Id -> Hyp Id -> Term Id Id -> (Conc Id -> TC b) -> TC b 17 | unfoldRec c r t k = do 18 | body <- substByDestr r (Cut c (error "rec. typ.")) t 19 | onConcl body k 20 | 21 | hnf,hnfUnfoldRec :: (Monoid a,r~Id,n~Id) => Conc Id -> (Constr Id Id -> TC a) -> TC a 22 | hnfUnfoldRec = hnf' True 23 | hnf = hnf' False 24 | 25 | 26 | lookHeapUF :: Monoid a => Bool -> Conc Id -> (Constr Id Id -> TC a) -> TC a 27 | lookHeapUF unfold c k = do 28 | c' <- lookHeapC c 29 | case c' of 30 | Rec r t | unfold -> unfoldRec c r t (\c'' -> lookHeapUF unfold c'' k) 31 | _ -> k c' 32 | 33 | -- | Top-level head normalization function. 34 | -- 35 | -- Takes a conclusion and looks it up in the heap. 36 | -- If it is bound to a hypothesis, recursively evaluate that by calling @hnfHyp@. 37 | -- Otherwise, it must be a construction, and we are done. 38 | hnf' :: (Monoid a,r~Id,n~Id) => Bool -> Conc n -> (Constr n r -> TC a) -> TC a 39 | hnf' unfold c k = lookHeapUF unfold c $ \c' -> case c' of 40 | Hyp x -> hnfHyp unfold x k 41 | _ -> k c' 42 | 43 | -- | Look for a redex, and evaluate to head normal form. (no Rec allowed in the result) 44 | hnfHyp :: (Monoid a,r~Id,n~Id) => Bool -> Hyp r -> (Constr n r -> TC a) -> TC a 45 | -- check if there is some reduction to perform. if so replace the thunk by its value in the heap. then this must be a continuation. 46 | hnfHyp unfold x k = do 47 | report $ "Evaluating hyp: " <> pretty x 48 | h <- ask 49 | -- lookup definition of hypothesis 50 | let lk = M.lookup (getAlias (heapAlias h) x) $ heapDestr h 51 | case lk of 52 | -- case: abstract (free) hypothesis without a definition 53 | Nothing -> k (Hyp x) 54 | -- case: already in head normal form 55 | Just (Right c) -> do 56 | report $ "Is evaluated to concl: " <> pretty c 57 | lookHeapUF unfold c k 58 | -- case: its a destruction of something, so possibly a redex, need to normalize 59 | Just (Left d) -> do 60 | report $ "Evaluating destr: " <> pretty d 61 | enter $ hnfDestr unfold x d $ \c -> addDef x c (k c) 62 | 63 | -- | Normalize a destruction. 64 | hnfDestr :: (Monoid a,r~Id,n~Id) => Bool -> Hyp r -> Destr r -> (Constr n r -> TC a) -> TC a 65 | hnfDestr unfold h d k = case d of 66 | -- only lazy destruction is application 67 | (App f a_) -> hnfHyp True f $ \c -> case c of 68 | -- case: beta-redex 69 | (Lam xx bb) -> do 70 | bb' <- substByDestr xx (Cut a_ (error "body of lambda should not be checked again.")) bb 71 | onConcl bb' $ \c1 -> do 72 | report $ "Application " <> pretty f <> " " <> pretty a_ <> " reduces to " <> pretty c1 73 | hnf' unfold c1 k 74 | -- case: neutral 75 | Hyp _ -> do 76 | -- Check whether the application is has an abbreviation (=alias) h'. 77 | -- This alias could be bound to a definition (see "smart case"). 78 | -- 79 | -- This is necessary because after evaluation of 'f', the 80 | -- hyp. may have been aliased to another hyp which now has a 81 | -- definition. 82 | h' <- aliasOf h 83 | if h' /= h -- condition to avoid looping 84 | then hnfHyp unfold h' k -- to establish unique normal forms wrt. sharing 85 | else k (Hyp h) 86 | _ -> terr $ "type-error in app-evaluation" 87 | _ -> terr $ "cannot be found as target in cut maps:" <+> pretty d 88 | 89 | normalizeAndAddDestr :: Hyp Id -> Destr Id -> TC a -> TC a 90 | normalizeAndAddDestr = addDestr 91 | 92 | onConcl :: Monoid a => Term' -> (Conc Id -> TC a) -> TC a 93 | onConcl (Concl c) k = k c 94 | onConcl (Destr x d t1) k = normalizeAndAddDestr x d (onConcl t1 k) 95 | onConcl (Constr x c t1) k = addConstr x c (onConcl t1 k) 96 | onConcl (Split x y z t1) k = addSplit x y z $ onConcl t1 k 97 | onConcl (Case x bs) k = mconcat <$> do 98 | forM bs $ \(Br tag t1) -> 99 | addTag x tag $ onConcl t1 k 100 | 101 | addTag :: forall a. Monoid a => Hyp Id -> String -> TC a -> TC a 102 | addTag x t k = do 103 | report $ "Adding tag " <> pretty x <> " = '" <> text t 104 | hnfHyp True x $ \c -> case c of 105 | (Tag t') -> if t == t' then k else return mempty -- conflicting tags, abort. 106 | (Hyp x') -> addDef x' (Tag t) k 107 | 108 | addSplit :: (Monoid a,r~Id,n~Id) => Hyp r -> Hyp r -> Hyp n -> TC a -> TC a 109 | addSplit x y z k = do 110 | hnfHyp True z $ \c -> case c of 111 | Pair x' y' -> addCut x x' $ 112 | addCut y y' $ 113 | k 114 | Hyp z' -> do 115 | xName <- Conc <$> liftTC (refreshId x) 116 | yName <- Conc <$> liftTC (refreshId y) 117 | zName <- Conc <$> liftTC (refreshId z) 118 | addConstr xName (Hyp x) $ 119 | addConstr yName (Hyp y) $ 120 | addConstr zName (Pair xName yName) $ 121 | addCut z' zName k 122 | -------------------------------------------------------------------------------- /report-radanne/poster/beamerthemeI6pd2.sty: -------------------------------------------------------------------------------- 1 | \ProvidesPackage{beamerthemeI6pd2} % this style was created by Thomas Deselaers an Philippe Dreuw 2 | 3 | 4 | \RequirePackage{tangocolors} 5 | \selectcolormodel{cmyk} 6 | \mode 7 | 8 | \setbeamercolor{headline}{fg=tabutter,bg=white!85!ta3orange} 9 | \setbeamercolor{footline}{fg=tabutter, bg=ta3gray} 10 | \setbeamerfont{footline}{size=\large,series=\tt} 11 | \setbeamercolor{separation line}{bg=ta2orange} 12 | \setbeamercolor{title in headline}{fg=ta2orange} 13 | \setbeamercolor{author in headline}{fg=ta2orange} 14 | \setbeamercolor{institute in headline}{fg=ta3orange} 15 | 16 | \setbeamercolor{framesubtitle}{fg=ta3orange, bg=ta2gray} 17 | \setbeamercolor{author in head/foot}{fg=ta2orange, bg=black} 18 | \setbeamercolor{title in head/foot}{fg=ta2orange, bg=black} 19 | 20 | \setbeamercolor*{normal text}{fg=tachameleon, bg=white!85!ta3orange} 21 | \setbeamercolor*{block body}{bg=white!90!ta3orange,fg=black} 22 | \setbeamercolor*{block title}{fg=taorange,bg=ta2gray} 23 | \setbeamerfont{block title}{size=\large,series=\bf} 24 | \setbeamercolor{upper separation line head}{fg=ta2orange} 25 | 26 | \setbeamercolor*{example body}{fg=ta3aluminium,bg=black} 27 | \setbeamercolor*{example text}{fg=ta3aluminium,bg=black} 28 | \setbeamercolor*{example title}{bg=taorange,fg=ta2gray} 29 | 30 | %\setbeamercolor{alerted text}{fg=ta3gray} 31 | 32 | %\setbeamercolor{example text}{fg=taorange} 33 | \setbeamercolor{structure}{fg=ta3skyblue} 34 | 35 | \setbeamertemplate{itemize items}[triangle] 36 | \setbeamertemplate{navigation symbols}{} % no navigation on a poster 37 | 38 | \setbeamercolor{separation line}{use=structure,bg=black} 39 | 40 | % \setbeamertemplate{block begin}{ 41 | % \vskip.75ex 42 | % \begin{beamercolorbox}[ht=3.5ex,dp=0.5ex,center,leftskip=-1em,colsep*=.75ex]{block title}% 43 | % \usebeamerfont*{block title}% 44 | % {\phantom{Gg}\insertblocktitle}% phantom because of baseline problem 45 | % \end{beamercolorbox}% 46 | % {\ifbeamercolorempty[bg]{block body}{}{\nointerlineskip\vskip-0.5pt}}% 47 | % \usebeamerfont{block body}% 48 | % \begin{beamercolorbox}[leftskip=1em,colsep*=.75ex,sep=0.5ex,vmode]{block body}% 49 | % \ifbeamercolorempty[bg]{block body}{\vskip-.25ex}{\vskip-.75ex}\vbox{}% 50 | % } 51 | 52 | % \setbeamertemplate{block end}{ 53 | % \end{beamercolorbox} 54 | % } 55 | 56 | %%%%%%%%%%%%%%% 57 | \setbeamertemplate{headline}{ 58 | \leavevmode 59 | 60 | \begin{beamercolorbox}[wd=\paperwidth]{headline} 61 | \begin{columns}[T] 62 | \begin{column}{.02\paperwidth} 63 | \end{column} 64 | \begin{column}{.7\paperwidth} 65 | \vskip4ex 66 | \raggedleft 67 | \usebeamercolor{title in headline}{\color{fg}\textbf{\LARGE{\inserttitle}}\\[1ex]} 68 | \usebeamercolor{author in headline}{\color{fg}\large{\insertauthor}\\[1ex]} 69 | \usebeamercolor{institute in headline}{\color{fg}\large{\insertinstitute}\\[1ex]} 70 | \end{column} 71 | \begin{column}{.02\paperwidth} 72 | \end{column} 73 | \begin{column}{.24\paperwidth} 74 | \vskip8ex 75 | \begin{center} 76 | \includegraphics[width=\linewidth]{ChalmersU_black.eps} 77 | \end{center} 78 | \vskip2ex 79 | \end{column} 80 | \begin{column}{.02\paperwidth} 81 | \end{column} 82 | \end{columns} 83 | \vskip2ex 84 | \end{beamercolorbox} 85 | 86 | \begin{beamercolorbox}[wd=\paperwidth]{lower separation line head} 87 | \rule{0pt}{3pt} 88 | \end{beamercolorbox} 89 | } 90 | 91 | % \setbeamertemplate{headline}{ 92 | % \leavevmode 93 | % \begin{beamercolorbox}[sep=0.5cm,wd=.8\paperwidth]{headline} 94 | % \usebeamercolor{title in headline}{\raggedleft\color{fg}\textbf{\LARGE{\inserttitle}}\\[1ex]} 95 | % \usebeamercolor{author in headline}{\raggedleft\color{fg}\large{\insertauthor}\\[1ex]} 96 | % \usebeamercolor{institute in headline}{\raggedleft\color{fg}\large{\insertinstitute}\\[1ex]} 97 | % \end{beamercolorbox}% 98 | % \begin{beamercolorbox}[wd=.2\paperwidth]{logo in headline} 99 | % \centering 100 | % \LARGE{LOGO} 101 | % \end{beamercolorbox} 102 | 103 | 104 | %} 105 | 106 | %%%%%%%%%%%%%%% 107 | \setbeamertemplate{footline}{ 108 | \begin{beamercolorbox}[wd=\paperwidth]{upper separation line foot} 109 | \rule{0pt}{3pt} 110 | \end{beamercolorbox} 111 | 112 | \leavevmode% 113 | \begin{beamercolorbox}[ht=4ex,leftskip=1em,rightskip=1em]{author in head/foot}% 114 | \hfill 115 | \texttt{gabriel.radanne@zoho.com} 116 | \vskip1ex 117 | \end{beamercolorbox} 118 | \vskip0pt% 119 | \begin{beamercolorbox}[wd=\paperwidth]{lower separation line foot} 120 | \rule{0pt}{3pt} 121 | \end{beamercolorbox} 122 | } 123 | 124 | %%%%%%%%%%%%%%% 125 | % Display a grid to help align images ... and it looks nice with this color scheme 126 | %\beamertemplategridbackground[1cm] 127 | 128 | %%%%%%%%%%%%%%% 129 | \mode 130 | 131 | % adapt height of imtemize rectangles 132 | \setbeamertemplate{itemize items}[triangle] 133 | \setbeamertemplate{itemize item}{\raisebox{0.12ex}{$\blacktriangleright$}\hskip0.1em} 134 | \setbeamertemplate{itemize subitem}{\raisebox{0.12ex}{$\triangleright$}\hskip0.1em} 135 | % or define your own template using \defbeamertemplate{itemize item}, see beameruserguide.pdf 136 | 137 | % equal font sizes for all levels 138 | \setbeamerfont{itemize/enumerate body}{size=\normalsize} 139 | \setbeamerfont{itemize/enumerate subbody}{size=\normalsize} 140 | \setbeamerfont{itemize/enumerate subsubbody}{size=\normalsize} 141 | -------------------------------------------------------------------------------- /src/Micro/Resolve.hs: -------------------------------------------------------------------------------- 1 | {-#LANGUAGE NamedFieldPuns, RecordWildCards, 2 | GeneralizedNewtypeDeriving, GADTs, ScopedTypeVariables, RankNTypes, 3 | DeriveFunctor, TupleSections #-} 4 | 5 | module Micro.Resolve where 6 | 7 | import Terms 8 | import qualified Micro.Abs as A 9 | import Fresh 10 | import Ident 11 | import qualified Data.Map as M 12 | import Data.Map (Map) 13 | import Control.Monad.Reader 14 | import Control.Applicative 15 | import TCM 16 | import RM 17 | 18 | -- | Naming utilies 19 | 20 | nameVar (A.Var (_,x)) = x 21 | 22 | nameLeft x = x ++ "ˡ" 23 | nameRight x = x ++ "ʳ" 24 | 25 | nameCut x _ = x 26 | 27 | resolveVar :: (Lens Env (Map String Id)) -> A.Var -> R (Maybe Id) 28 | resolveVar l (A.Var (_,x)) = M.lookup x . view l <$> ask 29 | 30 | liftR x = R $ lift $ x 31 | freshIdR = liftR freshId 32 | freshFromR x = liftR $ freshFrom x 33 | freshFromV :: A.Var -> R Id 34 | freshFromV x = freshFromR $ nameVar x 35 | 36 | insert :: (Lens Env (Map String Id)) -> A.Var -> (Id -> R a) -> R a 37 | insert l y k = do 38 | v <- freshFromV y 39 | insert' l y v (k v) 40 | 41 | insert' l (A.Var (_,x)) v = local (upd l $ M.insert x v) 42 | 43 | type Slice = Term' -> Term' 44 | 45 | resolve :: A.Module -> Either String (Term',Term') 46 | resolve t = Right $ runFreshM $ runReaderT (fromR $ resolveModule t) emptyEnv 47 | 48 | resolveModule :: A.Module -> R (Term',Term') 49 | resolveModule (A.Module t1 t2) = (,) <$> resolveTerm t1 <*> resolveTerm t2 50 | 51 | resolveTerm :: A.Term -> R (Term Id Id) 52 | resolveTerm x = resolveTerm' "top" x 53 | 54 | resolveTerm' :: String -> A.Term -> R (Term Id Id) 55 | resolveTerm' name (A.Destr x c t) = do 56 | (c'id,c') <- resolveDestr (nameVar x) c 57 | insert' hyp x c'id $ c' <$> resolveTerm' name t 58 | resolveTerm' name (A.Concl c) = do 59 | (c'id,c') <- resolveConstr name c 60 | return $ c' $ Concl (Conc c'id) 61 | resolveTerm' name (A.Constr x c t) = do 62 | (c'id,c') <- resolveConstr (nameVar x) c 63 | insert' con x c'id $ c' <$> resolveTerm' name t 64 | resolveTerm' name (A.Split x y d t) = do 65 | (d'id,d') <- resolveDestr (nameVar x ++ nameVar y) d 66 | insert hyp x $ \x' -> 67 | insert hyp y $ \y' -> 68 | d' . Split x' y' d'id <$> resolveTerm' name t 69 | resolveTerm' name (A.Case x bs) = do 70 | (x'id,x') <- resolveDestr name x 71 | bs' <- forM bs $ \(A.Br tag@(A.T tag') t) -> do 72 | (resolveTag tag,) <$> resolveTerm' (name ++ "|" ++ nameVar tag') t 73 | return (x' $ Case x'id [Br tag t' | (tag,t') <- bs']) 74 | 75 | resolveDestr :: String -> A.DC -> R (Id,Slice) 76 | resolveDestr _ (A.V x) = do 77 | x' <- resolveVar hyp x 78 | case x' of 79 | Just x'' -> return (x'',id) 80 | Nothing -> error $ "Unknown variable: " ++ show x 81 | resolveDestr name (A.Appl f x) = do 82 | (f'id,f') <- resolveDestr (name ++ "ᶠ") f 83 | (x'id,x') <- resolveConstr (name ++ "ᵃ") x 84 | r <- freshFromR name 85 | return (r,f' . x' . Destr r (App f'id (Conc x'id))) 86 | resolveDestr name (A.Cut x t) = do 87 | (x'id,x') <- resolveConstr (nameLeft name) x 88 | (t'id,t') <- resolveConstr (nameRight name) t 89 | r <- freshFromR name 90 | return (r, x'.t'.Destr r (Cut (Conc x'id) (Conc t'id))) 91 | resolveDestr _ x = do 92 | error $ "Tryed to make an inline cut. (Cuts must be explicit via use of =)\n" ++ show x 93 | 94 | resolveConstr :: String -> A.DC -> R (Id,Slice) 95 | resolveConstr _name (A.V x) = do 96 | x' <- resolveVar con x 97 | case x' of 98 | Nothing -> embedHyp (nameVar x) (A.V x) 99 | Just x'' -> return (x'',id) 100 | resolveConstr name (A.Rec x t) = 101 | insert hyp x $ \x' -> do 102 | r <- freshIdR 103 | t' <- resolveTerm' (name ++ "ʳᵉᶜ") t 104 | return (r,Constr (Conc r) (Rec x' t')) 105 | resolveConstr name (A.Lam x t) = 106 | insert hyp x $ \x' -> do 107 | r <- freshFromR name 108 | t' <- resolveTerm' name t 109 | return (r,Constr (Conc r) (Lam x' t')) 110 | resolveConstr name (A.Pi x c t) = do 111 | (c'id,c') <- resolveConstr (nameLeft name) c 112 | r <- freshFromR name 113 | insert hyp x $ \x' -> do 114 | t' <- resolveTerm' (nameRight name) t 115 | return (r,c' . Constr (Conc r) (Pi Invar x' (Conc c'id) t')) 116 | resolveConstr name (A.Fun c t) = do 117 | (c'id,c') <- resolveConstr (nameLeft name) c 118 | r <- freshFromR name 119 | t' <- resolveTerm' (nameRight name) t 120 | x' <- freshIdR 121 | return (r,c' . Constr (Conc r) (Pi Invar x' (Conc c'id) t')) 122 | resolveConstr name (A.Sigma x c t) = do 123 | (c'id,c') <- resolveConstr (nameLeft name) c 124 | r <- freshIdR 125 | insert hyp x $ \x' -> do 126 | t' <- resolveTerm' (nameRight name) t 127 | return (r,c' . Constr (Conc r) (Sigma Invar x' (Conc c'id) t')) 128 | resolveConstr name (A.Pair a b) = do 129 | (a'id,a') <- resolveConstr (name ++ ".1") a 130 | (b'id,b') <- resolveConstr (name ++ ".2") b 131 | r <- freshIdR 132 | return (r,a'.b'.Constr (Conc r) (Pair (Conc a'id) (Conc b'id))) 133 | resolveConstr name (A.Tag t) = do 134 | r <- freshFromR name 135 | return (r,Constr (Conc r) (Tag $ resolveTag t)) 136 | resolveConstr name (A.Fin ts) = do 137 | r <- freshFromR name 138 | return (r,Constr (Conc r) (Fin $ map resolveTag ts)) 139 | resolveConstr name (A.Univ (A.Nat (_,n))) = do 140 | r <- freshFromR name 141 | return (r,Constr (Conc r) (Universe $ read n)) 142 | resolveConstr name h = embedHyp name h 143 | 144 | embedHyp :: String -> A.DC -> R (Id, Slice) 145 | embedHyp name h = do 146 | r <- freshFromR name 147 | (h'id,h') <- resolveDestr name h 148 | return (r,h' . Constr (Conc r) (Hyp h'id)) 149 | 150 | 151 | resolveTag (A.T (A.Var (_,x))) = x 152 | -------------------------------------------------------------------------------- /core.org: -------------------------------------------------------------------------------- 1 | <2013-12-12 Do> 2 | 3 | * Syntax 4 | 5 | x Hypothesis variable 6 | _x Conclusion variable 7 | 8 | 'l Label 9 | 10 | p Projection 11 | ::= 1 | 2 12 | 13 | br Branch 14 | ::= 'l → n 15 | 16 | n Normal form 17 | ::= 18 | | _x | conclude | 19 | | let x = d in n | destruction | 20 | | case x of b* | case analysis | 21 | | let _x = c in n | construction | 22 | 23 | d ::= Destruction 24 | | y _z | application | 25 | | z.p | projection | 26 | | _x : _y | cut (not in normal forms!) | 27 | 28 | c Constructions 29 | ::= 30 | | x | | use of hyp. | 31 | | λ x. n | Π (x : _y). n | functions | 32 | | (_x, _y) | Σ (x : _y). n | pairs | 33 | | 'l | {'ls} | labels | 34 | | ★ | | universe | 35 | 36 | * Environments 37 | 38 | e ::= Environment entry 39 | | x : _y | new hypothesis | context | 40 | | _x = c | defined conclusion | heapConstr | 41 | | x = y | alias | heapAlias | 42 | | x = d | defined hypothesis | heapCuts, heapDestr | 43 | | x = _y | cut | heapCuts | 44 | 45 | γ,h ::= e* Environment (or heap) 46 | 47 | ** Environment extension 48 | 49 | γ + e environment extension 50 | 51 | γ + x = γ, x new hypothesis 52 | 53 | γ + (x = d) = γ, x = y if y = d ∈ γ (modulo equality of variables) 54 | γ, x = d otherwise 55 | 56 | γ + (_x = c) = γ, _x = _y if _y = c ∈ γ (modulo equality of variables) 57 | γ, _x = c otherwise 58 | 59 | γ + ('l = x) = γ if 'l = x ∈ γ (modulo equality of variables) 60 | = ⊥ if 'u = x ∈ γ for l ≠ u ( -"- ) 61 | = γ, 'l = x otherwise 62 | 63 | 64 | For the eager evaluation of cuts one can proceed as follows: 65 | 66 | check for construction of a redex. If one is created, reduce it. One obtains a binding of the form x = n 67 | Push the lets into the environment. When finding a case, SPLIT the result! The checking must then proceed for 68 | each split. 69 | * Equality check 70 | 71 | ** Judgements 72 | 73 | γ ⊢ n = n' compare normal forms 74 | γ ⊢ c = c' compare constructions 75 | 76 | ** Rules 77 | 78 | ⊥ ⊢ rhs --> true 79 | 80 | γ ⊢ let b in n = n' --> γ + b ⊢ n = n' 81 | γ ⊢ n = let b in n' --> γ + b ⊢ n = n' 82 | 83 | γ ⊢ case x of {'li → ni} = n --> ∀i. γ + 'li = x ⊢ ni = n 84 | γ ⊢ n = case x of {'li → ni} --> ∀i. γ + 'li = x ⊢ n = ni 85 | 86 | γ ⊢ _x = _y --> check equality of variables 87 | 88 | γ ⊢ _x = c --> γ ⊢ γ(_x) = c 89 | γ ⊢ c = _y --> γ ⊢ c = γ(_y) 90 | 91 | γ ⊢ x = y --> check equality of variables 92 | (may include eq. via label) 93 | 94 | γ ⊢ λ x. n = λ x'. n' --> γ, x, x'=x ⊢ n = n' 95 | γ ⊢ λ x. n = y --> γ, x, _x = x, z = y _x ⊢ n = z 96 | 97 | γ ⊢ (_x1, _x2) = (_y1, _y2) --> γ ⊢ _x1 = _y1 && γ ⊢ _x2 = _y2 98 | γ ⊢ (_x1, _x2) = y --> γ + z=y.1 ⊢ _x1 = z && γ + z=y.2 ⊢ _x2 = z 99 | 100 | γ ⊢ 'l = 'l --> true 101 | 102 | 103 | 104 | * Typing rules 105 | ** Judgements 106 | 107 | | Γ;h ⊢ b | check a destruction | 108 | | Γ;h ⊢ x ⇉ A | infer an hyp. var | 109 | | Γ;h ⊢ n ⇇ C | check a term/normal form | 110 | | Γ;h ⊢ c ⇇ C | check a constructor | 111 | 112 | ** Rules 113 | 114 | *** Γ;h ⊢ b 115 | 116 | 117 | Γ;h ⊢ y ⇉ (z:A) → B Γ;h ⊢ _z ⇇ A 118 | -------------------------------------------- 119 | Γ;h ⊢ x = y _z 120 | 121 | Γ;h ⊢ y ⇉ (z:A) × B 122 | -------------------------- 123 | Γ;h ⊢ x = y.1 124 | 125 | Γ;h ⊢ y ⇉ (z:A) × B 126 | -------------------------- 127 | Γ;h ⊢ x = y.2 128 | 129 | 130 | Γ;h ⊢ _x ⇇ _A 131 | ------------------------ 132 | Γ;h ⊢ x = _x : _A 133 | 134 | *** Γ;h ⊢ x ⇉ A 135 | 136 | Γ(x) = A 137 | -------------------- 138 | Γ;h ⊢ x ⇉ A 139 | 140 | 141 | h(x) = y _z Γ;h ⊢ y ⇉ (z:A) → B 142 | ---------------------------------------- 143 | Γ;h ⊢ x ⇉ let z = _z in B 144 | 145 | 146 | h(x) = y.1 Γ;h ⊢ y ⇉ (z:_A) × B 147 | ---------------------------------------- 148 | Γ;h ⊢ x ⇉ _A 149 | 150 | 151 | h(x) = y.2 Γ;h ⊢ y ⇉ (z:_A) × B 152 | ---------------------------------------- 153 | Γ;h ⊢ x ⇉ let z = y.1 in B 154 | 155 | 156 | h(x) = (_x:_A) 157 | ------------------ 158 | Γ;h ⊢ x ⇉ _A 159 | 160 | *** Γ;h ⊢ n ⇇ C 161 | 162 | for each i: Γ; h + ti = x ⊢ ai ⇇ C 163 | Γ(x) = {`ti} 164 | ------------------------------------ 165 | Γ;h ⊢ case x of {`ti ↦ ai } ⇇ C 166 | 167 | 168 | Γ;h+b ⊢ a ⇇ C Γ;h ⊢ b 169 | ---------------------------------- 170 | Γ;h ⊢ let b in a ⇇ C 171 | 172 | 173 | Γ;h+c ⊢ a ⇇ C 174 | ---------------------------------- 175 | Γ;h ⊢ let c in a ⇇ C 176 | 177 | 178 | h(z) = x Γ;h ⊢ x ⇉ A Γ;h ⊢ A = C 179 | ------------------------------------------- 180 | Γ;h ⊢ z ⇇ C 181 | 182 | 183 | h(_z) = c Γ;h ⊢ c ⇇ C 184 | ------------------------- 185 | Γ;h ⊢ _z ⇇ C 186 | 187 | *** Γ;h ⊢ c ⇇ C 188 | 189 | Γ;h+b ⊢ c ⇇ C (no need to re-check the binding as the type is checked already) 190 | ------------------------- 191 | Γ;h ⊢ c ⇇ let b in c 192 | 193 | 194 | for each i: Γ;h + ti = x ⊢ c ⇇ Ci 195 | ----------------------------------------- 196 | Γ;h ⊢ c ⇇ case x of {`ti -> Ci} 197 | 198 | 199 | Γ;h ⊢ c ⇇ h(_x) 200 | ------------------ 201 | Γ;h ⊢ c ⇇ _x 202 | 203 | 204 | Γ;h ⊢ _a ⇇ _A Γ;h+x=_a:_A ⊢ _b ⇇ B 205 | ---------------------------------------------- 206 | Γ;h ⊢ (_a,_b) ⇇ (x:_A) × B 207 | 208 | 209 | Γ,w:_A;h ⊢ t ⇇ let x = w in B 210 | ------------------------------------ 211 | Γ;h ⊢ λw. t ⇇ (x:_A) -> B 212 | 213 | 214 | For the lazy evaluation of cuts, one adds the following rules: 215 | 216 | 217 | h(x) = y z h(y)=λw.n Γ;h ⊢ c ⇇ n[z/w] 218 | -------------------------------------------------- 219 | Γ;h ⊢ c ⇇ x 220 | 221 | 222 | 223 | h(x) = y.1 h(y)=(_w,_z) Γ;h ⊢ c ⇇ _w 224 | ------------------------------------------ 225 | Γ;h ⊢ c ⇇ x 226 | 227 | * Evaluation 228 | * reduction of n1[n2/x] (never needed!) 229 | 230 | 0. Check for occurence of x in n1; quick exit to n1 if no occurence. (Optional) 231 | 1. Push the bindings/cases of n2 out, and construct a heap of bindings on the fly. 232 | 2. we then have the form TREE(n1[_x/x]), return TREE(let x = _x in n1) 233 | -------------------------------------------------------------------------------- /report-radanne/Common.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -i../src/ -XTypeSynonymInstances -XOverloadedStrings -XRecursiveDo -pgmF marxup3 -F #-} 2 | module Common where 3 | 4 | import MarXup 5 | import MarXup.Math 6 | import MarXup.Latex 7 | import MarXup.Verbatim 8 | import MarXup.Latex.Math 9 | import MarXup.Latex.Bib 10 | import MarXup.Tex 11 | 12 | import Control.Monad 13 | import Control.Monad.Fix (MonadFix,mfix) 14 | import Control.Applicative 15 | import Data.Monoid 16 | import Data.List (sort,intersperse) 17 | import Data.String 18 | 19 | 20 | 21 | bibliographyAll :: TeX 22 | bibliographyAll = do 23 | bibliographystyle "abbrvnat" 24 | bibliography "../PaperTools/bibtex/jp" 25 | 26 | authors :: [AuthorInfo] 27 | authors = [AuthorInfo "Gabriel Radanne" "gabriel.radanne@zoho.com" "Under the supervision of Jean-Philippe Bernardy"] 28 | 29 | 30 | texttt = text . (cmd "texttt") 31 | 32 | na :: TeX 33 | na = «nano-Agda» 34 | 35 | item' x = cmd "item" (cmd "textbf" x) 36 | description = env "description" 37 | 38 | -- | Math commands : 39 | γ = Con $ cmd "gamma" nil 40 | γ' = Con $ cmd "gamma'" nil 41 | γty = Con $ cmd "Gamma" nil 42 | γty' = Con $ cmd "Delta" nil 43 | γc = Con $ cmd "gamma_c" nil 44 | γa = Con $ cmd "gamma_a" nil 45 | γd = Con $ cmd "gamma_d" nil 46 | γd' = Con $ cmd "gamma_d'" nil 47 | h = text "h" 48 | h' = prim h 49 | h'' = prim $ h' 50 | 51 | bot = Con $ cmd "bot" nil 52 | λ = Con $ cmd "lambda" nil 53 | π = Con $ cmd "Pi" nil 54 | σ = Con $ cmd "Sigma" nil 55 | nat = Con "ℕ" 56 | 57 | fa = Con $ cmd "forall" nil 58 | 59 | mparen = outop "(" ")" 60 | mbracket = outop "{" "}" 61 | mbrac = outop "[" "]" 62 | 63 | quad = cmd0 "quad" 64 | qquad = cmd0 "qquad" 65 | space = tex "\\," 66 | app f x = f <-> mparen x 67 | (\=) = binop 1 "=" 68 | (=/=) = binop 1 "≠" 69 | (≡) = binop 1 (cmd0 "equiv") 70 | (≅) = binop 1 (cmd0 "cong") 71 | () = binop 1 space 72 | (<->) = binop 1 nil 73 | (<^>) = binop 1 "," 74 | (<.>) = binop 1 "." 75 | (<:>) = binop 1 ":" 76 | (//) = binop 1 "/" 77 | (∨) = binop 1 "∨" 78 | (∧) = binop 1 "∧" 79 | (≠) = binop 1 (cmd0 "neq") 80 | a \== b = mparen $ a \= b 81 | (∈) = binop 1 (cmd0 "in") 82 | (→) = binop 1 (cmd0 "to") 83 | (←) = binop 1 (cmd0 "gets") 84 | (×)= binop 1 (cmd0 "times") 85 | (|->) = binop 1 (cmd0 "mapsto") 86 | concl = UnOp 1 (cmd "overline") 1 87 | iff = UnOp 1 (\s -> mathsf "if " <> space <> s ) 1 88 | proj p = UnOp 1 (\s -> s <> mathsf ("." <> p) ) 1 89 | proj1 = proj "1" 90 | proj2 = proj "2" 91 | prim = UnOp 1 (\s -> s <> "'") 1 92 | 93 | squig = Con $ cmd0 "rightsquigarrow" 94 | (~>) = binop 1 $ cmd0 "rightsquigarrow" 95 | (~/>) = binop 1 $ cmd0 "not \\rightsquigarrow" 96 | (~~>) = binop 1 $ (backslash <> tex "rightsquigarrow" <> (element $ indice $ text "c")) 97 | (~>*) = binop 1 $ ((backslash <> tex "rightsquigarrow") <> superscript (tex "*")) 98 | (#) = binop 1 ", " 99 | subst t x y = t <-> mbrac ( x // y ) 100 | (==>) = binop 1 $ cmd0 "Rightarrow" 101 | 102 | indice = UnOp 1 (\ x -> tex "_" <> braces x) 1 103 | 104 | (@-) = BinOp 1 (\ x y -> x <> tex "_" <> braces y) 1 1 105 | 106 | (⊢) = binop 1 (cmd0 "vdash") 107 | (\::=) = binop 1 "::=" 108 | 109 | (<@) = binop 1 (cmd0 "leftleftarrows") 110 | (@>) = binop 1 (cmd0 "rightrightarrows") 111 | 112 | 113 | x,y,z,c,d,l,l2,t :: Math 114 | x = text "x" 115 | y = text "y" 116 | x' = prim x 117 | y' = prim y 118 | z = text "z" 119 | z' = prim z 120 | w = text "w" 121 | 122 | c = text "c" 123 | cty = text "C" 124 | d = text "d" 125 | d' = prim $ text "d'" 126 | 127 | xty = text "X" 128 | xty' = prim xty 129 | yty = text "Y" 130 | zty = text "Z" 131 | 132 | i = text "i" 133 | j = text "j" 134 | 135 | l = text "`l" 136 | l2 = text "`m" 137 | n = text "n" 138 | t = text "t" 139 | u = text "u" 140 | t' = prim $ t 141 | t'' = prim $ t' 142 | tty = text "T" 143 | tty' = prim $ text "T" 144 | 145 | lra = cmd0 "longrightarrow" 146 | star = Con $ cmd0 "star" 147 | kind = ( star @- ) 148 | 149 | pair_ x y = mparen $ binop 1 "," x y 150 | lambda_ x t = λ <-> x <.> t 151 | let_ x a t = texttt«let» x \= a texttt«in» t 152 | case_ x l = 153 | let l' = text $ mconcat $ intersperse ", " l in 154 | texttt«case» x texttt«of» mbracket l' 155 | pi_ x y t = mparen ( x <:> y ) → t 156 | sigma_ x y t = mparen ( x <:> y ) × t 157 | fin_ l = mbracket l 158 | cut_ x y = mparen (x <:> y) 159 | 160 | figure'' :: TeX -> Tex a -> Tex (SortedLabel, a) 161 | figure'' caption body = env' "figure" ["!h"] $ do 162 | l<-body 163 | cmd "caption" caption 164 | l'<-label "Fig." 165 | vspace"-0.3cm" 166 | return (l',l) 167 | 168 | figure' :: TeX -> TeX -> Tex SortedLabel 169 | figure' c b = do { (l,_) <- figure'' c b ; return l } 170 | 171 | align' :: [[TeX]] -> Tex SortedLabel 172 | align' body = env "align" $ do 173 | mkrows $ map mkcols body 174 | label "Eq." 175 | 176 | centering = cmd0 "centering" 177 | footnote = cmd "footnote" 178 | 179 | minipage :: String -> TeX -> Tex a -> Tex a 180 | minipage align length = 181 | env'' "minipage" [align] [length <> cmd0 "linewidth"] 182 | 183 | subfigure :: String -> TeX -> TeX -> TeX -> Tex SortedLabel 184 | subfigure align length caption body = 185 | env'' "subfigure" [align] [length <> cmd0 "linewidth"] $ do 186 | body 187 | cmd "caption" caption 188 | label "Fig." 189 | 190 | 191 | 192 | -- | Mathpartir 193 | 194 | rule name pre conc = 195 | cmdm "inferrule" [name] [mkrows pre, conc] 196 | 197 | ruleref = cmd "textsc" 198 | 199 | -- | Lstlistings 200 | 201 | -- data Listing a = Listing {fromListing::String, value::a} 202 | type Listing a = Verbatim a 203 | 204 | agdai :: Listing () -> TeX 205 | agdai = lstinline ["language=Agda"] 206 | 207 | nai :: Listing () -> TeX 208 | nai = lstinline ["language=nanoAgda"] 209 | 210 | -- listing :: [String] -> Listing () -> TeX 211 | -- listing opt (Listing s _) = 212 | -- env' "lstlisting" opt (tex s) 213 | 214 | -- lstinline :: [String] -> Listing () -> TeX 215 | -- lstinline opt (Listing s _) = 216 | -- let sep = tex "$" 217 | -- opt' = tex $ mconcat . intersperse ", " 218 | -- $ "basicstyle=\\ttfamily" : opt in 219 | -- backslash <> "lstinline" <> brackets opt' <> sep <> tex s <> sep 220 | 221 | 222 | agdacode :: Listing () -> TeX 223 | agdacode code = 224 | listing ["language=Agda"] code 225 | 226 | nacode :: String -> TeX 227 | nacode file = 228 | cmd' "lstinputlisting" ["language=nanoAgda"] $ tex file 229 | 230 | -- | Presentation stuff 231 | 232 | blockB :: TeX -> Tex a -> Tex a 233 | blockB a content = env'' "block" [] [a] content 234 | 235 | columns :: String -> Tex a -> Tex a 236 | columns a content = 237 | env' "columns" [a] content 238 | 239 | columnE :: String -> TeX -> Tex a -> Tex a 240 | columnE a size content = 241 | env'' "column" [a] [size <> cmd0 "linewidth"] content 242 | 243 | column :: TeX -> TeX 244 | column size = 245 | cmd "column" $ size <> cmd0 "linewidth" 246 | -------------------------------------------------------------------------------- /src/Heap.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE RecordWildCards, GADTs, OverloadedStrings, TypeSynonymInstances, FlexibleInstances, RecordWildCards #-} 2 | 3 | module Heap (emptyHeap, addDef,addCut,lookHeapC,getAlias,addConstr, addConstr', addDestr', enter,addDestr,addAlias',aliasOf,pConc,pHyp,addAlias) where 4 | 5 | import Control.Monad.RWS 6 | import Control.Applicative 7 | 8 | import Data.Bifunctor 9 | import qualified Data.Map as M 10 | 11 | import Terms 12 | import Ident 13 | import Display 14 | import TCM 15 | import Fresh 16 | 17 | emptyHeap :: Heap n r 18 | emptyHeap = Heap 0 M.empty M.empty M.empty M.empty M.empty M.empty 19 | 20 | enter :: TC a -> TC a 21 | enter = local (\h@Heap{..} -> h {dbgDepth = dbgDepth + 1}) 22 | 23 | addDef :: (Monoid a,Id~n,Id~r,Ord n) => Hyp n -> Constr n r -> TC a -> TC a 24 | addDef h c k = do 25 | case c of 26 | Hyp h' -> addAlias h h' k 27 | _ -> do 28 | c' <- Conc <$> liftTC (refreshId h) 29 | addConstr c' c $ local (addCut' h (Right c')) k 30 | 31 | addElimDef :: (Monoid a,Id~n,Id~r,Ord n) => Hyp n -> Destr n -> TC a -> TC a 32 | addElimDef h d = local (addCut' h $ Left d) 33 | 34 | addCut :: (Id~n,Id~r,Ord n) => Hyp n -> Conc r -> TC a -> TC a 35 | addCut src c k = do 36 | report $ "adding cut: " <> pretty src <> " => " <> pretty c 37 | c' <- lookHeapC c 38 | case c' of 39 | Hyp h' -> addAlias src h' k 40 | _ -> local (addCut' src $ Right c) k 41 | 42 | addCut' :: Ord n => n -> DeCo r -> Heap n r -> Heap n r 43 | addCut' src trg h@Heap{..} = h{heapDestr = M.insert src trg heapDestr } 44 | 45 | addAlias' :: Ord r => r -> r -> Heap n r -> Heap n r 46 | addAlias' src trg h@Heap{..} = h{heapAlias = f <$> M.insert src trg heapAlias } 47 | where f x = if x == src then trg else x 48 | 49 | addAliases' :: Ord r => [(r,r)] -> Heap n r -> Heap n r 50 | addAliases' = foldr (.) id . map (uncurry addAlias') 51 | 52 | addConstr' :: Ord n => Conc n -> Constr n r -> Heap n r -> Heap n r 53 | addConstr' src trg h@Heap{..} = h{heapConstr = M.insert src trg heapConstr } 54 | 55 | addRevConstr' :: (Ord n, Ord r) => Constr n r -> Conc n -> Heap n r -> Heap n r 56 | addRevConstr' src trg h@Heap{..} = h{heapRevConstr = M.insert src trg heapRevConstr } 57 | 58 | addDestr' :: Ord r => Destr r -> n -> Heap n r -> Heap n r 59 | addDestr' src trg h@Heap{..} = h{heapRevDestr = M.insert src trg heapRevDestr } 60 | 61 | getAlias h x = M.findWithDefault x x h 62 | 63 | addAliases :: [(Id,Id)] -> TC a -> TC a 64 | addAliases [] k = k 65 | addAliases as k = do 66 | report $ ("Adding aliases:" $$+ pretty as) 67 | h <- addAliases' as <$> ask 68 | let hD' :: M.Map (Destr Id) [Hyp Id] 69 | applyAlias = getAlias $ heapAlias h 70 | hD' = M.mapKeysWith (++) (fmap applyAlias) $ fmap (:[]) $ heapRevDestr h 71 | myhead (x:_) = x 72 | hD'' = fmap myhead hD' 73 | classes = M.elems hD' 74 | aliases = [(x,y) | (x:xs) <- classes, y <- xs] 75 | -- apply aliases to redexes 76 | -- todo: remove orphan redexes? 77 | hC' :: M.Map (Hyp Id) (DeCo Id) 78 | hC' = bimap (applyAlias <$>) id <$> heapDestr h 79 | local (\h2 -> h2 {heapRevDestr = hD'', heapAlias = heapAlias h, heapDestr = hC'}) $ 80 | addAliases aliases k 81 | 82 | addAlias :: Id -> Id -> TC a -> TC a 83 | addAlias src trg = addAliases [(src,trg)] 84 | 85 | aliasOf x = flip getAlias x . heapAlias <$> ask 86 | 87 | -- | Look for some constructed value in the heap. 88 | lookHeapC :: (r~Id,n~Id) => Conc n -> TC (Constr n r) 89 | lookHeapC x = do 90 | h <- ask 91 | x' <- Conc <$> aliasOf (conc x) 92 | let lk = M.lookup x' (heapConstr h) 93 | case lk of 94 | Nothing -> terr $ "Construction not found: " <> pretty x 95 | Just c -> return c 96 | 97 | 98 | 99 | addDestr :: Hyp Id -> Destr Id -> TC a -> TC a 100 | addDestr x (Cut c _ct) k = addCut x c k 101 | addDestr x d k = do 102 | h <- ask 103 | let d' = getAlias (heapAlias h) <$> d 104 | report ("Adding destr." 105 | $$+ pretty x <+> "=" 106 | $$+ pretty d <+> "; aliased to" <+> pretty d') 107 | local (addCut' x (Left d')) $ case M.lookup d' (heapRevDestr h) of 108 | Just y -> addAlias y x k 109 | Nothing -> local (addDestr' d' x) k 110 | 111 | -- | return true if fizzled, otherwise call the continuation. 112 | addConstr :: Monoid a => Conc Id -> Constr' -> TC a -> TC a 113 | addConstr x c k = do 114 | h <- ask 115 | let c' = getAlias (heapAlias h) <$> c 116 | report ("Adding construction" 117 | $$+ pretty x <+> "=" 118 | $$+ pretty c <+> "; aliased to" <+> pretty c') 119 | case c of 120 | Tag t | Just (Tag t') <- M.lookup x $ heapConstr h -> 121 | if t /= t' then return mempty else k 122 | _ -> local (addConstr' x c') $ case M.lookup c' (heapRevConstr h) of 123 | Just (Conc y) -> addAlias y (conc x) k 124 | Nothing -> local (addRevConstr' c' x) k 125 | 126 | instance Monoid Bool where 127 | mempty = True 128 | mappend = (&&) 129 | 130 | -- | Pretty printing 131 | 132 | class Prettier a where 133 | prettier :: a -> TC Doc 134 | 135 | pConc :: Conc Id -> TC Doc 136 | pConc x = prettier =<< lookHeapC x 137 | 138 | pHyp :: Hyp Id -> TC Doc 139 | pHyp x = do 140 | h <- ask 141 | let lk = M.lookup (getAlias (heapAlias h) x) $ heapDestr h 142 | case lk of 143 | Nothing -> return $ pretty x 144 | Just (Right c) -> pConc c 145 | Just (Left d) -> prettier d 146 | 147 | instance Prettier Term' where 148 | prettier (Concl c) = pConc c 149 | prettier (Destr h d t) = addDestr h d $ prettier t 150 | prettier (Constr x c t) = addConstr x c $ prettier t 151 | prettier (Split x y z t) = do 152 | z' <- pHyp z 153 | t' <- prettier t 154 | return $ ("split " <> z' <> "into " <> pretty x <> "," <> pretty y $$ t') 155 | prettier (Case x bs) = do 156 | bs' <- mapM prettier bs 157 | h <- pHyp x 158 | return $ ("case" <+> h <+> "of {") $$+ (sep $ punctuate "." $ bs') $$ "}" 159 | 160 | instance Prettier Constr' where 161 | prettier (Hyp h) = pHyp h 162 | prettier (Lam x b) = do 163 | b' <- prettier b 164 | return $ ("\\" <> pretty x <> " ->") $$+ b' 165 | prettier (Pi v x t b) = do 166 | t' <- pConc t 167 | b' <- prettier b 168 | return $ (parens (pretty x <+> (":"<> pretty v) <+> t') <+> "->") $$+ b' 169 | prettier (Sigma v x t b) = do 170 | t' <- pConc t 171 | b' <- prettier b 172 | return $ (parens (pretty x <+> (":"<> pretty v) <+> t') <+> " × ") $$+ b' 173 | prettier (Pair a b) = do 174 | a' <- pConc a 175 | b' <- pConc b 176 | return $ parens $ a' $$+ "," $$+ b' 177 | prettier x = return $ pretty x 178 | 179 | instance Prettier Destr' where 180 | prettier (App f x) = do 181 | f' <- pHyp f 182 | x' <- pConc x 183 | return $ f' <+> x' 184 | prettier (Cut x t) = do 185 | x' <- pConc x 186 | t' <- pConc t 187 | return $ x' <+> ":" <+> t' 188 | 189 | instance Prettier Branch' where 190 | prettier (Br tag t) = (\x -> ("'" <> text tag <> " ->") $$+ x) <$> prettier t 191 | -------------------------------------------------------------------------------- /src/TypeCheck.hs: -------------------------------------------------------------------------------- 1 | {-#LANGUAGE NamedFieldPuns, RecordWildCards, GeneralizedNewtypeDeriving, GADTs, ScopedTypeVariables, OverloadedStrings #-} 2 | module TypeCheck (typeCheck) where 3 | import Terms 4 | import qualified Data.Map as M 5 | 6 | import Control.Monad.Reader 7 | import Control.Monad.Writer 8 | import Control.Applicative 9 | import Eval 10 | import Eq 11 | import Fresh 12 | import Ident 13 | import Display 14 | import TCM 15 | import Heap 16 | 17 | getVar x = do 18 | ctx <- context <$> ask 19 | case M.lookup x ctx of 20 | Nothing -> terr $ "Panic: " <> pretty x <> " hyp. not found in context." 21 | Just c -> return $ fst c 22 | 23 | 24 | -- TODO: don't return a boolean. 25 | 26 | typeCheck :: Term' -> Term' -> (Either Doc (),[Doc]) 27 | typeCheck a t = runTC (max (nextUnique t) (nextUnique a)) emptyHeap chk 28 | where chk = do report $ "Start" 29 | checkSort t 100000 30 | checkTermAgainstTerm a (Invar, t) 31 | 32 | addCtx' :: Ord n => n -> (Variance, Conc r) -> Heap n r -> Heap n r 33 | addCtx' x t h@Heap{..} = h{context = M.insert x t context } 34 | 35 | addCtx :: Id -> (Variance, Conc Id) -> (TC ()) -> TC () 36 | addCtx x t k = do 37 | report $ "Adding hyp " <> pretty x <> ":" <> pretty t 38 | local (addCtx' x t) k 39 | 40 | -- Infer the type of a destruction and return it as a normal form. 41 | inferDestr :: (n~Id,r~Id) => Destr r -> (Conc r -> TC ()) -> TC () 42 | inferDestr (Cut v vt) k = do 43 | checkConclSort vt 10000 44 | checkConcl v (Invar, vt) -- TODO | Not sure if it's the good solution. 45 | k vt 46 | inferDestr (App f a_) k = 47 | inferHyp f $ \ft -> 48 | case ft of 49 | (Pi v x t_ u) -> do 50 | checkConcl a_ (v, t_) 51 | retTyp <- substByDestr x (Cut a_ t_) u 52 | onConcl retTyp k 53 | _ -> terr $ pretty f <> " has not a function type" 54 | 55 | inferHyp :: (n~Id,r~Id) => Hyp r -> (Constr n r -> TC ()) -> TC () 56 | inferHyp h k = (\(_,c) -> hnfUnfoldRec c k) =<< inferHyp' h 57 | 58 | -- | Mere lookup of type in the context 59 | inferHyp' :: (n~Id,r~Id) => Hyp r -> TC (Variance, Conc r) 60 | inferHyp' h = do 61 | ctx <- context <$> ask 62 | case M.lookup h ctx of 63 | Nothing -> terr $ "Panic: " <> pretty h <> " hyp. not found in context." 64 | Just c -> return c 65 | 66 | -- maintains the invariant that every hyp. has an entry in the context. 67 | checkBindings :: (n~Id,r~Id) => Term n r -> (Conc r -> TC ()) -> TC () 68 | checkBindings (Concl c) k = k c 69 | checkBindings (Constr x c t1) k = do 70 | -- report $ "constructing" <> pretty x 71 | addConstr x c $ do 72 | -- report $ "constructed" <> pretty x 73 | checkBindings t1 k 74 | checkBindings (Destr x d t1) k = inferDestr d $ \dt -> do 75 | report $ "inferred " <> pretty d <> " to be of type " <> pretty dt 76 | addCtx x (Invar, dt) $ addDestr x d $ checkBindings t1 k 77 | -- TODO | Compute the correct variance. 78 | 79 | checkBindings (Split x y z t1) k = inferHyp z $ \zt -> case zt of 80 | Sigma v xx t_ u -> do 81 | u' <- substTC xx x u 82 | addCtx x (v,t_) $ onConcl u' $ \u'' -> 83 | addCtx y (Invar, u'') $ addSplit x y z $ checkBindings t1 k 84 | -- TODO | Compute the correct variance. 85 | _ -> do 86 | doc_z <- pHyp z -- fixme: print the type 87 | terr $ (pretty z <+> "has not a pair type.") $$+ (pretty zt <+> "=" $$+ doc_z) 88 | checkBindings (Case x bs) k = 89 | inferHyp x $ \xt -> 90 | case xt of 91 | Fin ts -> do 92 | let ts' = [t | Br t _ <- bs] 93 | when (ts /= ts') $ terr $ "mismatching tags in case on " <> pretty x 94 | forM_ bs $ \(Br tag t1) -> addTag x tag $ checkBindings t1 k 95 | _ -> terr $ pretty x <> " has not a fin. type, but " <> pretty xt 96 | 97 | checkTermAgainstTerm :: (n~Id,r~Id) => Term n r -> (Variance, Term n r) -> TC () 98 | checkTermAgainstTerm e t = checkBindings e $ \c -> checkConAgainstTerm c t 99 | 100 | checkConAgainstTerm :: (n~Id,r~Id) => Conc r -> (Variance, Term n r) -> TC () 101 | checkConAgainstTerm c (v,t) = onConcl t $ \t' -> checkConcl c (v, t') 102 | 103 | checkConcl :: (n~Id,r~Id) => Conc r -> (Variance, Conc r) -> TC () 104 | checkConcl v t = do 105 | report $ "checking conclusion " <> pretty v <> ":" <> pretty t 106 | v' <- lookHeapC v 107 | checkConstrAgainstConcl v' t 108 | 109 | checkConstrAgainstConcl :: (n~Id,r~Id) => Constr n r -> (Variance, Conc r) -> TC () 110 | checkConstrAgainstConcl (Hyp h) u = checkHyp h u 111 | checkConstrAgainstConcl (Rec n b) t = do 112 | addCtx n t $ checkTermAgainstTerm b (fst t, Concl $ snd t) 113 | checkConstrAgainstConcl val (v, typ) = do 114 | report $ "checking construction" 115 | $$+ (sep ["val" <+> pretty val, "typ" <+> pretty typ]) 116 | hnf typ $ \typ' -> checkConstr val (v, typ') 117 | where 118 | checkConstr :: (n~Id,r~Id) => Constr n r -> (Variance, Constr n r) -> TC () 119 | checkConstr (Hyp _) t = error "dealt with above" 120 | checkConstr (Pair a_ b_) (var, Sigma v xx ta_ tb_) = do 121 | checkConcl a_ (v, ta_) 122 | tb' <- substByDestr xx (Cut a_ ta_) tb_ 123 | checkConAgainstTerm b_ (var, tb') 124 | checkConstr (Lam x b_) (var, Pi v xx ta_ tb_) = do 125 | addCtx x (v,ta_) $ addAlias xx x $ checkTermAgainstTerm b_ (var, tb_) 126 | checkConstr tag@(Tag t) ty@(_, Fin ts) = unless (t `elem` ts) $ terr $ 127 | pretty tag <> " is not found in " <> pretty ty 128 | checkConstr (Sigma v xx ta_ tb_) (_, Universe s) = do 129 | checkConclSort ta_ s 130 | addCtx xx (v, ta_) $ checkSort tb_ s 131 | checkConstr (Pi v xx ta_ tb_) (_, Universe s) = do 132 | checkConclSort ta_ s 133 | addCtx xx (v, ta_) $ checkSort tb_ s 134 | checkConstr (Fin _) (_, Universe _s) = return () 135 | checkConstr (Universe s') (_, Universe s) = 136 | unless (s' < s) $ terr $ int s' <> " is not a subsort of" <> int s 137 | checkConstr x (var, Rec r t) = do 138 | unfoldRec typ r t $ \t' -> checkConstrAgainstConcl x (var, t') 139 | 140 | checkConstr v t = terr $ hang "Type mismatch: " 2 $ sep ["value: " <> pretty v, "type: " <> pretty t] 141 | 142 | 143 | checkHyp h u = do 144 | t <- inferHyp' h 145 | eq <- isSubTypeOf t u 146 | doc_t <- pConc $ snd t 147 | doc_u <- pConc $ snd u 148 | doc_h <- pHyp h 149 | unless eq $ terr $ 150 | pretty t <+> "is not a subtype of" <+> pretty u <+> " in the following context, hence the type of" <+> pretty h <+> "is wrong." 151 | $+$ (pretty t <+> "=") $$+ doc_t 152 | $+$ (pretty u <+> "=") $$+ doc_u 153 | $+$ (pretty h <+> "=") $$+ doc_h 154 | 155 | 156 | checkSort :: (n~Id,r~Id) => Term n r -> Int -> TC () 157 | checkSort t s = checkBindings t $ \c -> checkConclSort c s 158 | 159 | checkConclSort :: (n~Id,r~Id) => Conc r -> Int -> TC () 160 | checkConclSort c s = do 161 | report $ "checking " <> pretty c <> " has sort " <> pretty s 162 | s' <- Conc <$> do liftTC $ freshFrom $ ("*" ++ subscriptShow s ++ " ") 163 | addConstr s' (Universe s) $ 164 | checkConcl c (Invar, s') -- TODO: don't allocate duplicate sort names. 165 | -------------------------------------------------------------------------------- /report-radanne/poster/poster.tex: -------------------------------------------------------------------------------- 1 | 2 | %%% Local Variables: 3 | %%% mode: latex 4 | %%% TeX-master: t 5 | %%% End: 6 | \documentclass[final, xcolor=svgnames]{beamer} 7 | \usepackage[orientation=portrait,size=a0extended,scale=1.6]{beamerposter} 8 | \usepackage{graphicx} 9 | \usepackage[utf8]{inputenc} 10 | \usepackage[english]{babel} 11 | \usepackage{graphicx} 12 | \usepackage{amsmath} 13 | \usepackage{amssymb} 14 | % \input{../../PaperTools/latex/unicodedefs} 15 | 16 | \usepackage{multicol} 17 | \usepackage{mathpartir} 18 | \usepackage{listings} 19 | \usepackage[]{xspace} 20 | \usetheme{I6pd2} 21 | 22 | 23 | \colorlet{titlecolor}{tabutter} 24 | \colorlet{blockcolor}{ta3aluminium} 25 | \colorlet{fillcolor}{tagray} 26 | 27 | % \usepackage[right=1cm,left=1cm,top=1cm,bottom=1cm]{geometry} 28 | % \setlength{\parindent}{0.0cm} 29 | % \geometry{ 30 | % hmargin=2.5cm, % little modification of margins 31 | % } 32 | 33 | 34 | \newcommand{\coq}{\textsc{Coq}\xspace} 35 | \newcommand{\agda}{\textsc{Agda}\xspace} 36 | \newcommand{\ma}{\textsc{microAgda}\xspace} 37 | \newcommand{\na}{\textsc{nanoAgda}\xspace} 38 | 39 | \input{../lst} 40 | 41 | \lstset{ 42 | aboveskip=0pt, 43 | belowcaptionskip=0pt, 44 | basicstyle=\small\ttfamily, 45 | keywordstyle=\bfseries\color{ta3chameleon}, 46 | keywordstyle=[2]\bfseries\color{tascarletred}, 47 | commentstyle=\itshape\color{taplum}, 48 | identifierstyle=\color{ta3skyblue}, 49 | stringstyle=\color{taorange}, 50 | escapeinside={!}{!}, 51 | } 52 | 53 | \usepackage{tikz} 54 | \input{../tikz} 55 | \usetikzlibrary{shapes,shapes.arrows,arrows,calc,decorations.markings} 56 | 57 | \title{A sequent-calculus presentation of type-theory} 58 | \author{Gabriel Radanne --- Under the supervision of Jean-Philippe Bernardy} 59 | \institute{ENS Rennes --- Chalmers University of Technology} 60 | 61 | \begin{document} 62 | \raggedright{} 63 | \begin{frame}[shrink] 64 | \begin{block}{Dependent Types} 65 | \begin{columns}[t] 66 | \column{.55\textwidth} 67 | In most programming languages, terms and types live in two different worlds: one cannot refer to terms in types and types can not be manipulated like terms. 68 | On the other hand, in a dependently typed programming language, types can depend on terms. 69 | This addition may sound modest at first, but it makes the language more powerful... and harder to typecheck. 70 | 71 | Here is an example of a program in \agda. \agda is a statically typed functional programming language that uses dependent types to express more properties with types and to increase safety. 72 | Th code on the right define the \lstinline[language=nanoAgda]{Nat} datatype, encoding natural numbers and the addition on natural numbers. 73 | We then use it to create the \lstinline[language=nanoAgda]{Vec} datatype. Vectors are similar to list except that the length of the list is encoded in the type. We then use the addition on natural numbers to calculate the length of the concatenation of two vectors. 74 | With the \lstinline[language=nanoAgda]{Vec} datatype, we can use types to verify that array access are never out of bounds. 75 | 76 | However, the \agda typechecker exhibits some well known issues. 77 | \column{.44\textwidth} 78 | \lstinputlisting[language=Agda]{code.agda} 79 | \end{columns} 80 | \end{block} 81 | \vspace{2cm} 82 | \begin{columns}[T] 83 | \column{.009\linewidth} 84 | \column{.495\linewidth} 85 | \begin{block}{Natural Deduction vs. Sequent calculus} 86 | \agda's type checker uses a natural deduction style: 87 | \begin{itemize} 88 | \item Inference duplicates parts of terms. 89 | \item These parts are not shared in the \agda core representation anymore. 90 | \item Typechecking must be done multiple times, causing performance penalties. 91 | \end{itemize} 92 | In sequent calculus style, every subterm is bound to a variable. 93 | \begin{figure} 94 | \vspace{30pt} 95 | \begin{tikzpicture}[yscale=3] 96 | \node[anchor=center] (lambda) at (0,0) { 97 | \texttt{$\lambda{}x$.($f$ $x$ $x$) {\color{Gray}(.\tikzcoord{bt}..)}} 98 | }; 99 | \node[anchor=center, text centered, text width=14cm] (lambda2) at (-9.5,-2) { 100 | \texttt{$f$\ {\color{Gray}(.\tikzcoord{bt2}..)\ (.\tikzcoord{bt3}..)}}\\ 101 | in natural deduction style 102 | }; 103 | \node[anchor=center, text centered, text width=15cm] (lambda3) at (9.5,-2) { 104 | \texttt{let $x'$ = {\color{Gray}(.\tikzcoord{bt4}..)}\ in $f$ $x'$ $x'$}\\ 105 | in sequent calculus style 106 | }; 107 | \end{tikzpicture} 108 | \centering 109 | \end{figure} 110 | \begin{tikzpicture}[remember picture, overlay] 111 | \node[xshift=0.31cm,yshift=-0.2cm, coordinate] (bt') at (bt) {} ; 112 | \draw[remember picture, big arrow] (bt') to[out=-120,in=60] ($(bt2)+(0.3,0.8)$) ; 113 | \draw[remember picture, big arrow] (bt') to[out=-120,in=60] ($(bt3)+(0.3,0.8)$) ; 114 | \draw[remember picture, big arrow] (bt') to[out=-60,in=120] ($(bt4)+(0.3,0.8)$) ; 115 | \end{tikzpicture} 116 | \end{block} 117 | \column{.495\linewidth} 118 | \begin{block}{Minimality} 119 | \agda currently does not have a core language that can be reasoned about and formally verified. \coq, on the other hand, is built as successive extensions of a core language (CIC). 120 | 121 | We aim to create a language that can serve as core for \agda or other dependently typed languages and that is small enough to be formally verified. 122 | \begin{figure}[htbp] 123 | \centering 124 | \begin{tikzpicture}[yscale=1] 125 | \node[draw, circle, scale=1.6] (Coq) at (8,3) {\coq} ; 126 | \node[draw, circle, scale=0.9] (CCC) at (8,-3.8) {CIC} ; 127 | \node[draw, circle, scale=1.5] (agda) at (-20,3) {\agda} ; 128 | \node[draw, ellipse, scale=0.9] (ma) at (-7,3) {\ma} ; 129 | \node[draw, ellipse, scale=0.7] (na) at (-7,-3.8) {\na} ; 130 | \draw[big arrow,thick] (Coq) -- (CCC) ; 131 | \draw[big arrow,thick] (ma) -- (na) ; 132 | \draw[big arrow,thick, loosely dashed] (agda) to[bend right] (na) ; 133 | \draw[big arrow,thick, loosely dashed] (CCC) to (na) ; 134 | \end{tikzpicture} 135 | \end{figure} 136 | \end{block} 137 | \end{columns} 138 | \begin{columns}[t] 139 | \column{.2\linewidth} 140 | \column{.60\linewidth} 141 | \begin{block}{Propagation of typing information} 142 | Natural deduction style makes propagating typing constraints to subterms difficult. 143 | 144 | For example, \agda's typechecker has no knowledge of which branch was taken while it typechecks the body of a case. 145 | \begin{center} 146 | \begin{minipage}{0.85\textwidth} 147 | \lstinputlisting[basicstyle=\ttfamily,language=Agda]{case.agda} 148 | \end{minipage} 149 | \end{center} 150 | \end{block} 151 | \column{.2\linewidth} 152 | \end{columns} 153 | % \begin{figure}[htbp] 154 | % \centering 155 | % \begin{tikzpicture} 156 | % \node [thick, shape border uses incircle, draw=black, single arrow, minimum width=3cm, minimum height=8cm, single arrow head extend=1cm, shape border rotate=-90]{}; 157 | % \end{tikzpicture} 158 | % \vspace{-2cm} 159 | % \end{figure} 160 | \vspace{6cm} 161 | \begin{columns}[t] 162 | \column{.005\linewidth} 163 | \column{.495\linewidth} 164 | \begin{block}{\na} 165 | We propose a type-theory which can be used as a back-end for dependently-typed languages such as \agda or \coq. We call this language \na. Concretely, our goals are to have a language that is: 166 | \begin{itemize} 167 | \item A type-theory: Correctness should be expressible via types. 168 | \item Low-level: One should be able to translate high-level languages into this language while retaining properties such as run-time behaviour, complexity, etc. 169 | \item Minimal: The language should be well defined and it should be possible to formally verify the type-checking algorithm. 170 | \end{itemize} 171 | \end{block} 172 | \begin{block}{\ma} 173 | \ma is another new language with a simpler syntax than \na. This new syntax can be translated to \na without typechecking. The translation binds every intermediate term to a fresh variable and replaces the subterm by this variable. 174 | \vspace{25pt} 175 | \end{block} 176 | \column{.495\linewidth} 177 | \begin{block}{Translation from \agda to \na} 178 | Here is an example of the polymorphic identity in \agda, \na and \ma. 179 | \begin{figure} 180 | \begin{tikzpicture} 181 | \tikzstyle{foo}=[draw=Gray, very thick, rectangle, rounded corners=10pt] 182 | 183 | \node[foo, text width=16cm] (agda) at (0,0) 184 | {\lstinputlisting[language=nanoAgda]{Lam.agda}} ; 185 | \node[foo, text width=17.4cm] (ma) at (20,0) 186 | {\lstinputlisting[language=nanoAgda]{../../examples/010-Lam.ma}} ; 187 | \node[foo, scale=0.8,text width=30.5cm] (na) at (10,-12.5) 188 | {\lstinputlisting[language=nanoAgda]{../../examples/010-Lam.na}} ; 189 | 190 | \node[anchor=north] at (agda.south) {\agda} ; 191 | \node[anchor=north] at (ma.south) {\ma} ; 192 | \node[anchor=north] at (na.south) {\na} ; 193 | \end{tikzpicture} 194 | \end{figure} 195 | \end{block} 196 | \end{columns} 197 | \centering 198 | \end{frame} 199 | \end{document} 200 | -------------------------------------------------------------------------------- /report-radanne/slides/slides.tex: -------------------------------------------------------------------------------- 1 | 2 | %%% Local Variables: 3 | %%% mode: latex 4 | %%% TeX-master: t 5 | %%% End: 6 | 7 | \documentclass[xcolor=svgnames,11pt]{beamer} 8 | \usepackage[utf8]{inputenc} 9 | \usepackage[english]{babel} 10 | \usepackage{hyperref} 11 | \usepackage{mathrsfs} 12 | \usepackage{geometry} 13 | \usepackage{listings} 14 | \usepackage{graphicx} 15 | \usepackage{xspace} 16 | \usepackage{verbatim} 17 | \usepackage{textcomp} 18 | \usepackage{amsmath} 19 | \usepackage{amsfonts} 20 | \usepackage{syntax} 21 | \usepackage{amssymb} 22 | \usepackage{mathtools} 23 | \usepackage{subcaption} 24 | \usepackage{textgreek} 25 | \usetheme{Frankfurt} 26 | \usecolortheme{crane} 27 | 28 | \usepackage{../mathpartir} 29 | 30 | \beamertemplatenavigationsymbolsempty 31 | \usepackage{tikz} 32 | \usetikzlibrary{backgrounds,positioning,shapes, 33 | shadings,shadows,arrows,decorations.markings,calc,fit,fadings} 34 | \input{../tikz} 35 | 36 | \newcommand{\mysc}[1]{\textsc{#1}\xspace} 37 | \newcommand{\coq}{\textsc{Coq}\xspace} 38 | \newcommand{\agda}{\textsc{Agda}\xspace} 39 | \newcommand{\ma}{\textsc{microAgda}\xspace} 40 | \newcommand{\na}{\textsc{nanoAgda}\xspace} 41 | 42 | \input{../lst} 43 | \lstset{ 44 | aboveskip=0pt, 45 | belowcaptionskip=0pt, 46 | basicstyle=\small\ttfamily, 47 | escapeinside={!}{!}, 48 | } 49 | 50 | 51 | \addtobeamertemplate{footline}{ 52 | \hfill 53 | \insertframenumber/20 \hspace{1mm} 54 | \vspace{2 mm} 55 | } 56 | 57 | \title{A sequent-calculus presentation of type-theory} 58 | \author[Gabriel Radanne]{Gabriel Radanne\\ Under the supervision of Jean-Philippe Bernardy} 59 | \institute[ENS Rennes]{ENS Rennes --- Chalmers University of Technology} 60 | 61 | \begin{document} 62 | 63 | \begin{frame}[plain] 64 | \titlepage 65 | \end{frame} 66 | 67 | \begin{frame}{Plan} 68 | \tableofcontents%[section] 69 | \end{frame} 70 | 71 | \section{An Introduction to dependent types} 72 | 73 | \begin{frame}[fragile] 74 | Imagine we want to define lists, but with guarantees on the length of the list. 75 | 76 | We have the length operation: 77 | 78 | \alt<1>{$|$ \lstinline[language=caml,basicstyle=\ttfamily]{['a' ; 'b' ; 'c']} $| = 3$.}{$|$ \lstinline[language=caml,basicstyle=\ttfamily]{ 'a' :: 'b' :: 'c' :: []} $| = 3$.} 79 | 80 | \pause\pause 81 | \ \\We can define the head function like this in \mysc{OCaml}: 82 | \begin{lstlisting}[language=caml] 83 | let head x = match x with 84 | | [] -> failwith "PANIC" !\only<4->{\\\color{Gray}We want the type-system to ensure this doesn't happen.}! 85 | | (h::t) -> h 86 | \end{lstlisting}\pause 87 | 88 | \lstinline[language=caml,basicstyle=\ttfamily]{head l} 89 | should only be valid if $|$\lstinline[language=caml,basicstyle=\ttfamily]{l}$|>0$. 90 | \end{frame} 91 | 92 | \begin{frame}[fragile] 93 | Let's start by natural numbers: 94 | \begin{lstlisting}[language=Agda] 95 | data Nat : Set where 96 | Zero : Nat 97 | Succ : Nat -> Nat 98 | \end{lstlisting}\pause 99 | 100 | \begin{lstlisting}[language=Agda] 101 | three : Nat 102 | three = Succ (Succ (Succ Zero)) 103 | \end{lstlisting}\pause 104 | 105 | We can now define a special kind of list: 106 | \begin{lstlisting}[language=Agda] 107 | data Vec (A : Set) : Nat -> Set where 108 | Nil : Vec A Zero 109 | Cons : {n : Nat} -> A -> Vec A n -> Vec A (Succ n) 110 | \end{lstlisting}\pause 111 | 112 | \begin{lstlisting}[language=Agda] 113 | myVec : Vec Char three 114 | myVec = Cons 'a' (Cons 'b' (Cons 'c' Nil)) 115 | \end{lstlisting} 116 | 117 | \end{frame} 118 | 119 | \begin{frame}[fragile] 120 | \begin{lstlisting}[basicstyle=\footnotesize\ttfamily,language=Agda] 121 | data Nat : Set where 122 | Zero : Nat 123 | Succ : Nat -> Nat 124 | \end{lstlisting} 125 | \begin{lstlisting}[basicstyle=\footnotesize\ttfamily,language=Agda] 126 | data Vec (A : Set) : Nat -> Set where 127 | Nil : Vec A Zero 128 | Cons : {n : Nat} -> A -> Vec A n -> Vec A (Succ n) 129 | \end{lstlisting} 130 | 131 | \ \\The head function: 132 | \begin{lstlisting}[language=Agda] 133 | head : forall { A n } -> Vec A (Succ n) -> A 134 | head (Cons x xs) = x 135 | \end{lstlisting}\pause 136 | 137 | \ \\ 138 | \begin{lstlisting}[language=Agda] 139 | head Nil !\color{Gray}$\gets$ This is a type error.! 140 | \end{lstlisting} 141 | \end{frame} 142 | 143 | \begin{frame}[fragile] 144 | \begin{lstlisting}[basicstyle=\footnotesize\ttfamily,language=Agda] 145 | data Nat : Set where 146 | Zero : Nat 147 | Succ : Nat -> Nat 148 | \end{lstlisting} 149 | \begin{lstlisting}[basicstyle=\footnotesize\ttfamily,language=Agda] 150 | data Vec (A : Set) : Nat -> Set where 151 | Nil : Vec A Zero 152 | Cons : {n : Nat} -> A -> Vec A n -> Vec A (Succ n) 153 | \end{lstlisting} 154 | 155 | \ \\When we concatenate two vectors, $|$\lstinline[language=caml,basicstyle=\ttfamily]{append l l'}$| = |$ 156 | \lstinline[language=caml,basicstyle=\ttfamily]{l}$| + |$ 157 | \lstinline[language=caml,basicstyle=\ttfamily]{l'}$|$.\pause 158 | \begin{lstlisting}[language=Agda] 159 | append : forall { n m A } -> 160 | Vec A n -> Vec A m -> Vec A (n + m) 161 | append Nil ys = ys 162 | append (Cons x xs) ys = Cons x (append xs ys) 163 | \end{lstlisting} 164 | \end{frame} 165 | 166 | \begin{frame}{Dependent types} 167 | What have we done? 168 | \begin{itemize} 169 | \item We defined a type with a {\bf term} as parameter: 170 | \lstinline[language=Agda,basicstyle=\ttfamily]{Vec A n}.\pause 171 | \item We used these values to enforce properties.\pause.. by type-checking.\pause 172 | \item We manipulated these values inside the type: \lstinline[language=Agda,basicstyle=\ttfamily]{Vec A (n+m)}. 173 | \end{itemize}\pause 174 | 175 | Types depends on terms. 176 | \end{frame} 177 | 178 | \begin{frame}{Dependent types} 179 | \begin{columns} 180 | \column{0.6\textwidth} 181 | Dependent types: 182 | \begin{itemize} 183 | \item<2-> Strongly related to Curry-Howard Isomorphism. 184 | \item<3-> Introduce as a type-theory by Martin-Löf in 1971. Proposed as foundation of mathematics. 185 | \item<4-> Has gained popularity recently for theorem-proving with \coq, 186 | \item<5-> but also in programming: {\only<6>{\bf}\agda}, \mysc{Idris}, \mysc{ATS},\dots 187 | \end{itemize} 188 | \column{0.4\textwidth} 189 | \uncover<3->{ 190 | \begin{figure}[htbp] 191 | \centering 192 | \includegraphics[width=\linewidth]{../martin-lof} 193 | 194 | Martin-Löf 195 | \end{figure} 196 | } 197 | \end{columns} 198 | \end{frame} 199 | 200 | \section{Limitations of current typecheckers} 201 | \begin{frame} 202 | \frametitle{Limitations of current typecheckers} 203 | \tableofcontents[currentsection] 204 | \end{frame} 205 | 206 | \subsection{Efficiency issues} 207 | \begin{frame}{Efficiency issues} 208 | \agda's type checker uses a natural deduction style: 209 | \begin{itemize} 210 | \item Inference duplicates parts of terms. 211 | \item These parts are not shared in the \agda core representation anymore. 212 | \item Typechecking must be done multiple times, causing performance penalties. 213 | \end{itemize} 214 | \begin{figure} 215 | \begin{tikzpicture} 216 | \node[anchor=center] (lambda) at (0,0) { 217 | \texttt{$\lambda{}x$.($f$ $x$ $x$) {\color{Gray}(.\tikzcoord{bt}..)}} 218 | }; 219 | \node[anchor=center, text centered] (lambda2) at (0,-2) { 220 | \texttt{$f$\ {\color{Gray}(.\tikzcoord{bt2}..)\ (.\tikzcoord{bt3}..)}} 221 | }; 222 | \end{tikzpicture} 223 | \centering 224 | \end{figure} 225 | \begin{tikzpicture}[remember picture, overlay] 226 | \node[xshift=0.1cm,yshift=-0.2cm, coordinate] (bt') at (bt) {} ; 227 | \draw[remember picture, -latex] (bt') to[out=-90,in=90] ($(bt2)+(0.1,0.3)$) ; 228 | \draw[remember picture, -latex] (bt') to[out=-90,in=90] ($(bt3)+(0.1,0.3)$) ; 229 | \end{tikzpicture} 230 | \end{frame} 231 | 232 | \subsection{The ``case decomposition'' issue} 233 | \begin{frame}{The ``case decomposition'' issue} 234 | Natural deduction style makes propagating typing constraints to subterms difficult. 235 | 236 | For example, \agda's typechecker has no knowledge of which branch was taken while it typechecks the body of a case. 237 | \begin{center} 238 | \begin{minipage}{0.9\textwidth} 239 | \lstinputlisting[basicstyle=\ttfamily,language=Agda]{../poster/case.agda} 240 | \end{minipage} 241 | \end{center} 242 | \end{frame} 243 | 244 | \subsection{The monolithic approach} 245 | \begin{frame}{The monolithic approach} 246 | \agda currently does not have a core language that can be reasoned about and formally verified. 247 | 248 | \coq, on the other hand, is built as successive extensions of a core language (CIC). 249 | \begin{figure}[htbp] 250 | \begin{tikzpicture}[yscale=0.4, xscale=0.4] 251 | \node[draw, circle, scale=1.6] (Coq) at (8,3) {\coq} ; 252 | \node[draw, circle, scale=0.9] (CCC) at (8,-3.8) {CIC} ; 253 | \node[draw, circle, scale=1.5] (agda) at (-7,3) {\agda} ; 254 | % \node[draw, ellipse, scale=0.9] (ma) at (-7,3) {\ma} ; 255 | \node<2>[draw, ellipse, scale=0.7] (na) at (-7,-3.8) {\na} ; 256 | \draw[-latex,thick] (Coq) -- (CCC) ; 257 | % \draw[-latex,thick] (ma) -- (na) ; 258 | \draw<2>[-latex,thick, loosely dashed] (agda) to (na) ; 259 | \draw<2>[-latex,thick, loosely dashed] (CCC) to (na) ; 260 | \end{tikzpicture} 261 | \centering 262 | \end{figure} 263 | \end{frame} 264 | 265 | \section{\na and \ma} 266 | 267 | \subsection{Goals} 268 | \begin{frame}{Goals} 269 | Our goals are to have a language that is: 270 | \begin{itemize} 271 | \item<1-> A type-theory: Correctness should be expressible via types. 272 | \item<2-> Low-level: One should be able to translate high-level languages into this language while retaining properties such as run-time behaviour, complexity, etc. 273 | \item<3-> Minimal: The language should be well defined and it should be possible to formally verify the type-checking algorithm. 274 | \end{itemize} 275 | \end{frame} 276 | 277 | \subsection{\na} 278 | \begin{frame}[fragile]{\na} 279 | \begin{columns} 280 | \column{0.5\textwidth} 281 | \begin{lstlisting}[language=Agda] 282 | id : !\tikzcoord{a1}!(a : Set) -> !\tikzcoord{b1}!a -> !\tikzcoord{c1}!a!\tikzcoord{a2}! 283 | id _ x = x 284 | \end{lstlisting} 285 | \centering in \agda 286 | \column{0.6\textwidth} 287 | \pause 288 | \begin{lstlisting}[basicstyle=\scriptsize\ttfamily,language=nanoAgda] 289 | TERM 290 | f = \a -> ( 291 | f' = \x -> (r=x; r); 292 | f' ) ; 293 | f 294 | TYPE 295 | set = *0 ; 296 | !\tikzcoord{z1}!f_ty = (a : set) -> ( 297 | !\tikzcoord{x1}!a' = a ; 298 | a2a = (x : a') -> !\tikzcoord{y1}!a';!\tikzcoord{x2}! 299 | a2a!\tikzcoord{x3}! 300 | ) ; 301 | f_ty!\tikzcoord{z2}! 302 | \end{lstlisting} 303 | \centering in \na 304 | \end{columns} 305 | \begin{tikzpicture}[remember picture, overlay] 306 | \coordinate (a2') at ($(a2)+(0,1.3mm)$) {} ; 307 | \coordinate (x1') at ($(x1)+(0,1.3mm)$) {} ; 308 | \coordinate (z1') at ($(z1)+(0,1.3mm)$) {} ; 309 | \coordinate (x2') at ($(x2)+(0,1.3mm)$) {} ; 310 | \node<3>[opacity=0.4, fill=yellow, fit=(a1) (a2')] {}; 311 | \node<3>[opacity=0.4, fill=yellow, fit=(z1') (z2) (x2)] {}; 312 | \node<4>[opacity=0.4, fill=yellow, fit=(b1) (a2')] {}; 313 | \node<4>[opacity=0.4, fill=yellow, fit=(x1') (x2) (x3)] {}; 314 | \node<5>[opacity=0.4, fill=yellow, fit=(c1) (a2')] {}; 315 | \node<5>[opacity=0.4, fill=yellow, fit=(y1) (x2')] {}; 316 | \end{tikzpicture} 317 | \end{frame} 318 | 319 | \begin{frame}{Sequent calculus} 320 | There are various definitions of sequent calculus. Here, we mean that every intermediate result or sub-term are bound to a variable. 321 | % \alt<2->{ 322 | \begin{figure} 323 | \begin{tikzpicture}[yscale=1.7] 324 | \node (lambda) at (0,0) { 325 | \texttt{$\lambda{}x$.($f$ $x$ $x$) {\color{Gray}(.\tikzcoord{bt}..)}} 326 | }; 327 | \node[text centered, text width=5cm] (lambda2) at (-3,-1.5) { 328 | \texttt{$f$\ {\color{Gray}(.\tikzcoord{bt2}..)\ (.\tikzcoord{bt3}..)}}\\ 329 | in natural deduction style 330 | }; 331 | \uncover<2->{\node[text centered, text width=5cm] (lambda3) at (3,-1.5) { 332 | \texttt{let $x'$ = {\color{Gray}(.\tikzcoord{bt4}..)}\ in $f$ $x'$ $x'$}\\ 333 | in sequent calculus style 334 | };} 335 | \end{tikzpicture} 336 | \centering 337 | \end{figure} 338 | \begin{tikzpicture}[remember picture, overlay] 339 | \node[xshift=0.1cm,yshift=-0.2cm, coordinate] (bt') at (bt) {} ; 340 | \draw[remember picture, -latex] (bt') to[out=-90,in=80] ($(bt2)+(0.1,0.3)$) ; 341 | \draw[remember picture, -latex] (bt') to[out=-90,in=60] ($(bt3)+(0.1,0.3)$) ; 342 | \draw<2->[remember picture, -latex] (bt') to[out=-90,in=90] ($(bt4)+(0.1,0.3)$) ; 343 | \end{tikzpicture} 344 | % }{ 345 | % \begin{figure}[htbp] 346 | % \lstinputlisting[language=Caml]{seq.ml} 347 | % \centering 348 | % \end{figure} 349 | % } 350 | \end{frame} 351 | 352 | \begin{frame}[shrink,fragile]{Presentation of the language} 353 | \begin{itemize} 354 | \item \textbf{Variables}: 355 | \textbf{Hypotheses} $x$ and \textbf{Conclusions} $\overline{x}$ 356 | \end{itemize} 357 | \begin{center} 358 | \setlength{\tabcolsep}{10pt} 359 | \vspace{-10pt} 360 | \begin{tabular}{r l l l} 361 | \uncover<2->{{\bf Functions} & $\lambda x. t$ & $(f\ \overline{x})$ & $(x:\overline{Y})\to T$ \\} 362 | \uncover<3->{{\bf Pairs} & 363 | $(\overline{x},\overline{y})$ & $x.1$ &\ $(x:\overline{Y})\times T$\\} 364 | \uncover<4->{{\bf Enumerations} & $`l$ & \texttt{case} & $\{`l_1,`l_2,\dots\}$\\} 365 | \end{tabular} 366 | \vspace{-30pt} 367 | \end{center} 368 | \begin{itemize} 369 | \item<5-> {\bf Constructions and Destructions}:\\ 370 | {\tt let} $\overline{x} = c$ \ and\ {\tt let} $x = d$ 371 | \item<6-> {\bf Universes}:\\ 372 | $\star_i$ with $i\in\mathbb{N}$ \qquad $\star_0$ is equivalent to \texttt{Set} 373 | \item<7-> {\bf Relation between Conclusions and Hypotheses}:\\ 374 | \texttt{let} $\overline{x}$ = $y$ {\color{Gray}A conclusion can be defined as an hypothesis.} 375 | 376 | \uncover<8->{\texttt{let} $x$ = $(\overline{y}:\overline{Z})$ {\color{Gray}The cut construction.}} 377 | \end{itemize} 378 | \end{frame} 379 | 380 | \subsection{\ma} 381 | \begin{frame}[fragile]{\ma} 382 | A new syntax, easier to manipulate, and that can be translated easily into \na.\pause 383 | 384 | \ \\ 385 | \begin{columns} 386 | \column{0.5\textwidth} 387 | \begin{lstlisting}[language=Agda] 388 | id : !\tikzcoord{a1}!(a : Set) -> !\tikzcoord{b1}!a -> !\tikzcoord{c1}!a!\tikzcoord{a2}! 389 | id _ x = x 390 | \end{lstlisting} 391 | \centering in \agda 392 | 393 | \ \\ 394 | \begin{lstlisting}[language=nanoAgda] 395 | TERM 396 | \a -> \x -> x 397 | 398 | TYPE 399 | !\tikzcoord{e1}!(a : *1) -> !\tikzcoord{f1}!(x : a) -> !\tikzcoord{g1}!a!\tikzcoord{e2}! 400 | \end{lstlisting} 401 | \centering in \ma 402 | \column{0.6\textwidth} 403 | \pause 404 | \begin{lstlisting}[basicstyle=\scriptsize\ttfamily,language=nanoAgda] 405 | TERM 406 | f = \a -> ( 407 | f' = \x -> (r=x; r); 408 | f' ) ; 409 | f 410 | TYPE 411 | set = *0 ; 412 | !\tikzcoord{z1}!f_ty = (a : set) -> ( 413 | !\tikzcoord{x1}!a' = a ; 414 | a2a = (x : a') -> !\tikzcoord{y1}!a';!\tikzcoord{x2}! 415 | a2a!\tikzcoord{x3}! 416 | ) ; 417 | f_ty!\tikzcoord{z2}! 418 | \end{lstlisting} 419 | \centering in \na 420 | \end{columns} 421 | \begin{tikzpicture}[remember picture, overlay] 422 | \coordinate (a2') at ($(a2)+(0,1.3mm)$) {} ; 423 | \coordinate (e2') at ($(e2)+(0,1.3mm)$) {} ; 424 | \coordinate (x1') at ($(x1)+(0,1.3mm)$) {} ; 425 | \coordinate (z1') at ($(z1)+(0,1.3mm)$) {} ; 426 | \coordinate (x2') at ($(x2)+(0,1.3mm)$) {} ; 427 | \node<4>[opacity=0.4, fill=yellow, fit=(e1) (e2')] {}; 428 | \node<4>[opacity=0.4, fill=yellow, fit=(a1) (a2')] {}; 429 | \node<4>[opacity=0.4, fill=yellow, fit=(z1') (z2) (x2)] {}; 430 | \node<5>[opacity=0.4, fill=yellow, fit=(f1) (e2')] {}; 431 | \node<5>[opacity=0.4, fill=yellow, fit=(b1) (a2')] {}; 432 | \node<5>[opacity=0.4, fill=yellow, fit=(x1') (x2) (x3)] {}; 433 | \node<6>[opacity=0.4, fill=yellow, fit=(g1) (e2')] {}; 434 | \node<6>[opacity=0.4, fill=yellow, fit=(c1) (a2')] {}; 435 | \node<6>[opacity=0.4, fill=yellow, fit=(y1) (x2')] {}; 436 | \end{tikzpicture} 437 | \end{frame} 438 | 439 | \subsection{Results} 440 | \begin{frame}{Results} 441 | \begin{itemize} 442 | \item We implemented a typechecker and evaluator for \na. 443 | \item We introduced a new intermediate language: \ma. 444 | \item We exhibited some examples that don't typecheck in \agda but typecheck in \na. 445 | \end{itemize} 446 | \end{frame} 447 | 448 | \section{Conclusion} 449 | \begin{frame}{Future work} 450 | \begin{itemize} 451 | \item Precisely evaluate the efficiency of this new approach. 452 | \item Prove subject-reduction of \na (in \coq). 453 | \item Introduce recursion. 454 | \item Experiment with extensions of the type system (linear, colors,\dots). 455 | \end{itemize} 456 | \end{frame} 457 | 458 | \begin{frame}[plain] 459 | \begin{center} 460 | \Huge Questions ? 461 | \end{center} 462 | \end{frame} 463 | 464 | 465 | \begin{frame}[plain] 466 | \begin{center} 467 | \Huge Questions ? 468 | \end{center} 469 | \end{frame} 470 | 471 | 472 | 473 | 474 | 475 | 476 | \begin{frame}[fragile]{How to encode sum types} 477 | \begin{columns} 478 | \column{0.5\textwidth} 479 | \begin{lstlisting}[language=Agda] 480 | data MySumtype (s : Set) : Set where 481 | Foo : s -> MySumtype s 482 | Bar : MySumtype s 483 | \end{lstlisting} 484 | \centering in \agda 485 | \column{0.6\textwidth}\pause 486 | \begin{lstlisting}[basicstyle=\scriptsize\ttfamily,language=nanoAgda] 487 | TERM 488 | Unit_t = { 'unit } ; 489 | Unit_ty = *0 ; 490 | Unit = Unit_t : Unit_ty ; 491 | f = \s -> ( 492 | tag = { 'Foo , 'Bar } ; 493 | f' = (c : tag) * 494 | (case c of { 495 | 'Foo -> s' = s ; s' . 496 | 'Bar -> Unit' = Unit ; 497 | Unit' 498 | }) ; 499 | f') ; 500 | f 501 | TYPE 502 | star0 = *0 ; 503 | f_ty = ( s : star0) -> star0 ; 504 | f_ty 505 | \end{lstlisting} 506 | \end{columns} 507 | \end{frame} 508 | 509 | \begin{frame}[fragile]{How to encode stupidly simple sum types} 510 | \begin{columns} 511 | \column{0.56\textwidth} 512 | \begin{lstlisting}[language=Agda] 513 | data SimpleSum : Set where 514 | !\tikzcoord{x1}!Foo : !\tikzcoord{y1}!Nat -> SimpleSum 515 | Bar!\tikzcoord{x2}! : Nat!\tikzcoord{y2}! -> SimpleSum 516 | \end{lstlisting} 517 | \centering in \agda 518 | \column{0.56\textwidth}\pause 519 | \begin{lstlisting}[language=nanoAgda] 520 | TERM 521 | !\tikzcoord{a1}!{ 'Foo ; 'Bar }!\tikzcoord{a2}! * !\tikzcoord{b1}!Nat!\tikzcoord{b2}! 522 | Type 523 | *0 524 | \end{lstlisting} 525 | \centering in \ma 526 | \end{columns} 527 | \begin{tikzpicture}[remember picture, overlay] 528 | \coordinate (a1') at ($(a1)+(0,1.3mm)$) {} ; 529 | \coordinate (b1') at ($(b1)+(0,1.3mm)$) {} ; 530 | \coordinate (x1') at ($(x1)+(0,1.3mm)$) {} ; 531 | \coordinate (y1') at ($(y1)+(0,1.3mm)$) {} ; 532 | \node<3>[opacity=0.4, fill=yellow, fit=(a1') (a2)] {}; 533 | \node<3>[opacity=0.4, fill=green, fit=(b1') (b2)] {}; 534 | \node<3>[opacity=0.4, fill=yellow, fit=(x1') (x2)] {}; 535 | \node<3>[opacity=0.4, fill=green, fit=(y1') (y2)] {}; 536 | \end{tikzpicture} 537 | \end{frame} 538 | 539 | \begin{frame}[fragile]{How to encode sum types -- $2^{nd}$ edition} 540 | \begin{columns} 541 | \column{0.5\textwidth} 542 | \begin{lstlisting}[language=Agda] 543 | data MySumtype (s : Set) : Set where 544 | !\tikzcoord{a1}!Foo : !\tikzcoord{b1}!s -> MySumtype s!\tikzcoord{b3}! 545 | Bar!\tikzcoord{a2}! : !\tikzcoord{b2}!MySumtype s 546 | \end{lstlisting} 547 | \centering in \agda 548 | \column{0.56\textwidth}\pause 549 | \begin{lstlisting}[basicstyle=\scriptsize\ttfamily,language=nanoAgda] 550 | TERM 551 | Unit = { 'unit } : *0 ; 552 | \s -> !\tikzcoord{x1}!( c : { 'Foo , 'Bar } )!\tikzcoord{x2}! * 553 | ( case c of { 554 | !\tikzcoord{y1}!'Foo -> s. 555 | 'Bar -> Unit.!\tikzcoord{y2}! 556 | } ) 557 | TYPE 558 | *0 -> *0 559 | \end{lstlisting} 560 | \centering in \ma 561 | \end{columns} 562 | \begin{tikzpicture}[remember picture, overlay] 563 | \coordinate (a1') at ($(a1)+(0,1.3mm)$) {} ; 564 | \coordinate (b1') at ($(b1)+(0,1.3mm)$) {} ; 565 | \coordinate (x1') at ($(x1)+(0,1.3mm)$) {} ; 566 | \coordinate (y1') at ($(y1)+(0,1.3mm)$) {} ; 567 | \node<3>[opacity=0.4, fill=yellow, fit=(a1') (a2)] {}; 568 | \node<3>[opacity=0.4, fill=green, fit=(b1') (b2) (b3)] {}; 569 | \node<3>[opacity=0.4, fill=yellow, fit=(x1') (x2)] {}; 570 | \node<3>[opacity=0.4, fill=green, fit=(y1') (y2)] {}; 571 | \end{tikzpicture} 572 | \end{frame} 573 | 574 | 575 | 576 | \begin{frame}[plain] 577 | \begin{center} 578 | \Huge Questions ? 579 | \end{center} 580 | \end{frame} 581 | 582 | 583 | \begin{frame}[plain]{Environment extension} 584 | \small 585 | \begin{align*}\ensuremath{\Gamma{{}}}&: \ensuremath{\text{x}\mapsto{}\overline{\text{y}}}&\ensuremath{\text{The context heap, containing the type of hypotheses.}}\\\ensuremath{\gamma_c{{}}}&: \ensuremath{\overline{\text{x}}\mapsto{}\text{c}}&\ensuremath{\text{The heap from conclusion to constructions.}}\\\ensuremath{\gamma_a{{}}}&: \ensuremath{\text{x}\mapsto{}\text{y}}&\ensuremath{\text{The heap for aliases on hypotheses.}}\\\ensuremath{\gamma_d{{}}}&: \ensuremath{\text{x}\mapsto{}\text{d}}&\ensuremath{\text{The heap from hypotheses to cuts and destructions.}}\end{align*} 586 | \begin{align*}\ensuremath{\gamma{{}}+(\text{x}:\overline{\text{Y}})}&= \ensuremath{\gamma{{}}} \ensuremath{\text{ with }} \ensuremath{\Gamma{{}}\gets{}(\text{x}:\overline{\text{Y}})}\end{align*} 587 | \begin{align*}\ensuremath{\gamma{{}}+(\text{x}=\text{d})}&= \ensuremath{\gamma{{}}} \ensuremath{\text{ with }} \ensuremath{\gamma_a{{}}\gets{}(\text{x}=\text{y})}&\ensuremath{\mathsf{if }\:(\text{y}=\text{d})\in{}\gamma_d{{}}}\\&= \ensuremath{\gamma{{}}} \ensuremath{\text{ with }} \ensuremath{\gamma_d{{}}\gets{}(\text{x}=\text{d})}&\ensuremath{\text{otherwise}}\end{align*} 588 | \begin{align*}\ensuremath{\gamma{{}}} + \ensuremath{(\overline{\text{x}}=\text{c})}&= \ensuremath{\gamma{{}}} \ensuremath{\text{ with }} \ensuremath{\gamma_c{{}}\gets{}(\overline{\text{x}}=\text{c})}\end{align*} 589 | \begin{align*}\ensuremath{\gamma{{}}} + \ensuremath{(\text{`l}=\text{x})}&= \ensuremath{\gamma{{}}}&\ensuremath{\mathsf{if }\:(\text{`l}=\text{x})\in{}\gamma_c{{}}}\\&= \ensuremath{\bot{{}}}&\ensuremath{\mathsf{if }\:(\text{`m}=\text{x})\in{}\gamma_c{{}}} \ensuremath{\text{ for }} \ensuremath{\text{`l}\neq{}\text{`m}}\\&= \ensuremath{\gamma{{}}} \ensuremath{\text{ with }} \ensuremath{\gamma_c{{}}\gets{}(\text{`l}=\text{x})}&\ensuremath{\text{otherwise}}\end{align*} 590 | 591 | \end{frame} 592 | 593 | \begin{frame}[plain]{Reduction rules} 594 | \tiny 595 | \begin{mathpar}\inferrule[\tiny{}EvalCase]{\ensuremath{\text{h}{}(\text{x})=(\text{`l}_{\text{i}}:\text{\_})}\\\ensuremath{\text{h}+(\text{`l}_{\text{i}}=\text{x})\vdash{}\text{t}_{\text{i}}\rightsquigarrow{}\text{h}'\vdash{}\overline{\text{x}}}}{\ensuremath{\text{h}\vdash{}\text{\texttt{case}}\:\text{x}\:\text{\texttt{of}}\:\{\text{\ensuremath{\text{`l}_{\text{i}}\mapsto{}\text{t}_{\text{i}}}}\}\rightsquigarrow{}\text{h}'\vdash{}\overline{\text{x}}}} \and{}\inferrule[\tiny{}AbstractCase]{\ensuremath{\text{h}{}(\text{x})\neq(\text{`l}_{\text{i}}:\text{\_})}\\\forall \ensuremath{\text{i}} \quad{} \ensuremath{\text{h}+(\text{`l}_{\text{i}}=\text{x})\vdash{}\text{t}_{\text{i}}\rightsquigarrow{}\text{h}'_{\text{i}}\vdash{}\overline{\text{x}}_{\text{i}}}}{\ensuremath{\text{h}\vdash{}\text{\texttt{case}}\:\text{x}\:\text{\texttt{of}}\:\{\text{\ensuremath{\text{`l}_{\text{i}}\mapsto{}\text{t}_{\text{i}}}}\}\rightsquigarrow{}\{\text{h}_{\text{i}}\vdash{}\overline{\text{x}}_{\text{i}}\}}} \and{}\inferrule[\tiny{}EvalDestr]{\ensuremath{\text{h}\vdash{}\text{d}\rightsquigarrow{}\text{h}'\vdash{}\text{t}'}\\\ensuremath{\text{h}'+(\text{x}=\text{t}')\vdash{}\text{t}\rightsquigarrow{}\text{h}''\vdash{}\overline{\text{x}}}}{\ensuremath{\text{h}\vdash{}\text{\texttt{let}}\:\text{x}=\text{d}\:\text{\texttt{in}}\:\text{t}\rightsquigarrow{}\text{h}''\vdash{}\overline{\text{x}}}} \and{}\inferrule[\tiny{}AddDestr]{\ensuremath{\text{h}\vdash{}\text{d}\not \rightsquigarrow{}\text{h}'\vdash{}\text{t}'}\\\ensuremath{\text{h}+(\text{x}=\text{d})\vdash{}\text{t}\rightsquigarrow{}\text{h}'\vdash{}\overline{\text{x}}}}{\ensuremath{\text{h}\vdash{}\text{\texttt{let}}\:\text{x}=\text{d}\:\text{\texttt{in}}\:\text{t}\rightsquigarrow{}\text{h}'\vdash{}\overline{\text{x}}}} \and{}\inferrule[\tiny{}AddConstr]{\ensuremath{\text{h}+(\overline{\text{x}}=\text{c})\vdash{}\text{t}\rightsquigarrow{}\text{h}'\vdash{}\overline{\text{x}}}}{\ensuremath{\text{h}\vdash{}\text{\texttt{let}}\:\overline{\text{x}}=\text{c}\:\text{\texttt{in}}\:\text{t}\rightsquigarrow{}\text{h}'\vdash{}\overline{\text{x}}}} \and{}\inferrule[\tiny{}Concl]{ }{\ensuremath{\text{h}\vdash{}\overline{\text{x}}\rightsquigarrow{}\text{h}\vdash{}\overline{\text{x}}}} \end{mathpar} 596 | \begin{mathpar}\inferrule[\tiny{}EvalProj\ensuremath{_{1}}]{\ensuremath{\text{h}{}(\text{y})=((\overline{\text{z}},\overline{\text{w}}):\text{\_})}}{\ensuremath{\text{h}\vdash{}\text{y}\mathsf{.1}\rightsquigarrow{}\text{h}\vdash{}\overline{\text{z}}}} \and{}\inferrule[\tiny{}EvalProj\ensuremath{_{2}}]{\ensuremath{\text{h}{}(\text{y})=((\overline{\text{z}},\overline{\text{w}}):\text{\_})}}{\ensuremath{\text{h}\vdash{}\text{y}\mathsf{.2}\rightsquigarrow{}\text{h}\vdash{}\overline{\text{w}}}} \and{}\inferrule[\tiny{}EvalApp]{\ensuremath{\text{h}{}(\text{y})=(\lambda{{}}{}\text{w}.\text{t}:\text{\_})}\\\ensuremath{\text{h}\vdash{}\text{t}{}[\overline{\text{z}}/\text{w}]\rightsquigarrow{}\text{h}'\vdash{}\overline{\text{x}}}}{\ensuremath{\text{h}\vdash{}(\text{y}\:\overline{\text{z}})\rightsquigarrow{}\text{h}'\vdash{}\overline{\text{x}}}} \end{mathpar} 597 | \end{frame} 598 | 599 | \begin{frame}[plain]{Equality rules} 600 | \tiny 601 | 602 | \begin{align*}\ensuremath{\gamma{{}}\vdash{}\text{\texttt{let}}\:\text{x}=\text{d}\:\text{\texttt{in}}\:\text{t}=\text{t}'}&\longrightarrow{} \ensuremath{\gamma'{{}}+(\text{x}=\text{t}'')\vdash{}\text{t}=\text{t}'}\\\ensuremath{\gamma{{}}\vdash{}\text{\texttt{let}}\:\overline{\text{x}}=\text{c}\:\text{\texttt{in}}\:\text{t}=\text{t}'}&\longrightarrow{} \ensuremath{\gamma{{}}+(\overline{\text{x}}=\text{c})\vdash{}\text{t}=\text{t}'}\\\ensuremath{\gamma{{}}\vdash{}\text{\texttt{case}}\:\text{x}\:\text{\texttt{of}}\:\{\text{\ensuremath{\text{`l}_{\text{i}}\mapsto{}\text{t}_{\text{i}}}}\}=\text{t}}&\longrightarrow{} \ensuremath{\forall{{}}} \ensuremath{\text{i}} \quad{} \ensuremath{\gamma{{}}+(\text{x}=\text{`l}_{\text{i}})\vdash{}\text{t}_{\text{i}}=\text{t}}\\\ensuremath{\gamma{{}}\vdash{}\overline{\text{x}}=\overline{\text{y}}}&\longrightarrow{} \ensuremath{\overline{\text{x}}\equiv{}\overline{\text{y}}\land\gamma{{}}\vdash{}\gamma_c{{}}{}(\overline{\text{x}})=\gamma_c{{}}{}(\overline{\text{y}})}\end{align*} 603 | \begin{align*}\ensuremath{\gamma{{}}\vdash{}\text{`l}=\text{`l}}&\longrightarrow{} true\\\ensuremath{\gamma{{}}\vdash{}\star{}_{\text{i}}=\star{}_{\text{j}}}&\longrightarrow{} \ensuremath{\text{i}=\text{j}}\\\ensuremath{\gamma{{}}\vdash{}\text{x}=\text{y}}&\longrightarrow{} \ensuremath{\text{x}\cong{}\text{y}}\\\ensuremath{\gamma{{}}\vdash{}\lambda{{}}{}\text{x}.\text{t}=\lambda{{}}{}\text{y}.\text{t}'}&\longrightarrow{} \ensuremath{\gamma{{}}+(\text{x}=\text{y})\vdash{}\text{t}=\text{t}'}\\\ensuremath{\gamma{{}}\vdash{}(\overline{\text{x}},\overline{\text{y}})=(\overline{\text{x}'},\overline{\text{y}'})}&\longrightarrow{} \ensuremath{\gamma{{}}\vdash{}\overline{\text{x}}=\overline{\text{x}'}\land\gamma{{}}\vdash{}\overline{\text{y}}=\overline{\text{y}'}} \\\ensuremath{\gamma{{}}\vdash{}(\text{x}:\overline{\text{y}})\to{}\text{t}=(\text{x}':\overline{\text{y}'})\to{}\text{t}'}&\longrightarrow{} \ensuremath{\gamma{{}}\vdash{}\overline{\text{y}}=\overline{\text{y}'}\land\gamma{{}}+(\text{x}=\text{x}')\vdash{}\text{t}=\text{t}'} \\\ensuremath{\gamma{{}}\vdash{}(\text{x}:\overline{\text{y}})\times{}\text{t}=(\text{x}':\overline{\text{y}'})\times{}\text{t}'}&\longrightarrow{} \ensuremath{\gamma{{}}\vdash{}\overline{\text{y}}=\overline{\text{y}'}\land\gamma{{}}+(\text{x}=\text{x}')\vdash{}\text{t}=\text{t}'} \\\ensuremath{\gamma{{}}\vdash{}\{\text{`l}_{\text{i}}\}=\{\text{`m}_{\text{i}}\}}&\longrightarrow{} \ensuremath{\forall{{}}} \ensuremath{\text{i}} \quad{} \ensuremath{\text{`l}_{\text{i}}=\text{`m}_{\text{i}}}\end{align*} 604 | \begin{align*}\ensuremath{\gamma{{}}\vdash{}\lambda{{}}{}\text{x}.\text{t}=\text{y}}&\longrightarrow{} \ensuremath{\gamma{{}}+(\overline{\text{x}}=\text{x})+(\text{z}=\text{y}\:\overline{\text{x}})\vdash{}\text{t}=\text{z}}\\\ensuremath{\gamma{{}}\vdash{}(\overline{\text{x}},\overline{\text{x}'})=\text{y}}&\longrightarrow{} \ensuremath{\gamma{{}}+(\text{z}=\text{y}\mathsf{.1})\vdash{}\overline{\text{x}}=\text{z}\land\gamma{{}}+(\text{z}=\text{y}\mathsf{.2})\vdash{}\overline{\text{x}'}=\text{z}} \end{align*} 605 | 606 | \end{frame} 607 | 608 | 609 | \begin{frame}[plain]{Typing rules} 610 | \tiny 611 | \begin{figure}[!h] 612 | \begin{mathpar}\inferrule[\tiny{}Case]{\ensuremath{\forall{{}}\:\text{i}} \quad{} \ensuremath{\gamma{{}}+(\text{`l}_{\text{i}}=\text{x})\vdash{}\text{t}_{\text{i}}\leftleftarrows{}\text{T}}\\\ensuremath{\Gamma{{}}} (x) = \ensuremath{\{\text{`l}_{\text{i}}\}}}{\ensuremath{\gamma{{}}\vdash{}\text{\texttt{case}}\:\text{x}\:\text{\texttt{of}}\:\{\text{\ensuremath{\text{`l}_{\text{i}}\mapsto{}\text{t}_{\text{i}}}}\}\leftleftarrows{}\text{T}}} \and{}\inferrule[\tiny{}Constr]{\ensuremath{\gamma{{}}+(\overline{\text{x}}=\text{c})\vdash{}\text{t}\leftleftarrows{}\text{T}}}{\ensuremath{\gamma{{}}\vdash{}\text{\texttt{let}}\:\overline{\text{x}}=\text{c}\:\text{\texttt{in}}\:\text{t}\leftleftarrows{}\text{T}}} \and{}\inferrule[\tiny{}Concl]{\ensuremath{\gamma_c{{}}} (\ensuremath{\overline{\text{x}}}) = \ensuremath{\text{c}}\\\ensuremath{\gamma{{}}\vdash{}\text{c}\leftleftarrows{}\text{T}}}{\ensuremath{\gamma{{}}\vdash{}\overline{\text{x}}\leftleftarrows{}\text{T}}} \and{}\inferrule[\tiny{}Destr]{\ensuremath{\gamma{{}}\vdash{}\text{d}\rightrightarrows{}\text{T}'}\\\ensuremath{\gamma{{}}\vdash{}\text{d}\rightsquigarrow{}\gamma'{{}}\vdash{}\text{t}'}\\\ensuremath{\gamma'{{}}+(\text{x}=\text{t}')+(\text{x}:\text{T}')\vdash{}\text{t}\leftleftarrows{}\text{T}}}{\ensuremath{\gamma{{}}\vdash{}\text{\texttt{let}}\:\text{x}=\text{d}\:\text{\texttt{in}}\:\text{t}\leftleftarrows{}\text{T}}} \and{}\inferrule[\tiny{}Eval]{\ensuremath{\gamma{{}}\vdash{}\text{T}\rightsquigarrow{}\{\gamma'{{}}_{\text{i}}\vdash{}\overline{\text{X}}_{\text{i}}\}}\\\forall \ensuremath{\text{i}} \quad{} \ensuremath{\gamma'{{}}_{\text{i}}\vdash{}\overline{\text{X}}_{\text{i}}\leftleftarrows{}\overline{\text{X}}}}{\ensuremath{\gamma{{}}\vdash{}\text{t}\leftleftarrows{}\text{T}}} \end{mathpar}\caption{Typechecking a term: \ensuremath{\gamma{{}}\vdash{}\text{t}\leftleftarrows{}\text{T}}}\label{24}\vspace{-0.3cm}\end{figure} 613 | \end{frame} 614 | 615 | \begin{frame}[plain]{Typing rules} 616 | \tiny 617 | \begin{figure}[!h] 618 | \begin{mathpar}\inferrule[\tiny{}App]{\ensuremath{\Gamma{{}}{}(\text{y})=(\text{z}:\overline{\text{X}})\to{}\text{T}}\\\ensuremath{\gamma{{}}\vdash{}\overline{\text{z}}\leftleftarrows{}\overline{\text{X}}}}{\ensuremath{\gamma{{}}\vdash{}\text{y}\:\overline{\text{z}}\rightrightarrows{}\text{T}}} \and{}\inferrule[\tiny{}Proj\ensuremath{_{1}}]{\ensuremath{\Gamma{{}}{}(\text{y})=(\text{z}:\overline{\text{X}})\times{}\text{T}}}{\ensuremath{\gamma{{}}\vdash{}\text{y}\mathsf{.1}\rightrightarrows{}\overline{\text{X}}}} \and{}\inferrule[\tiny{}Proj\ensuremath{_{2}}]{\ensuremath{\Gamma{{}}{}(\text{y})=(\text{z}:\overline{\text{X}})\times{}\text{T}}}{\ensuremath{\gamma{{}}\vdash{}\text{y}\mathsf{.2}\rightrightarrows{}\text{T}}} \and{}\inferrule[\tiny{}Cut]{\ensuremath{\gamma{{}}\vdash{}\overline{\text{x}}\leftleftarrows{}\overline{\text{X}}}}{\ensuremath{\gamma{{}}\vdash{}(\overline{\text{x}}:\overline{\text{X}})\rightrightarrows{}\overline{\text{X}}}} \end{mathpar}\caption{Inferring the type of a destruction: \ensuremath{\gamma{{}}\vdash{}\text{d}\rightrightarrows{}\text{T}}.}\label{25}\vspace{-0.3cm}\end{figure} 619 | 620 | \begin{figure}[!h] 621 | \begin{mathpar}\inferrule[\tiny{}TyDestr]{\ensuremath{\gamma{{}}+(\text{x}=\text{d})\vdash{}\text{c}\leftleftarrows{}\text{T}}}{\ensuremath{\gamma{{}}\vdash{}\text{c}\leftleftarrows{}\text{\texttt{let}}\:\text{x}=\text{d}\:\text{\texttt{in}}\:\text{T}}} \and{}\inferrule[\tiny{}TyConstr]{\ensuremath{\gamma{{}}+(\overline{\text{x}}=\text{c})\vdash{}\text{c}\leftleftarrows{}\text{T}}}{\ensuremath{\gamma{{}}\vdash{}\text{c}\leftleftarrows{}\text{\texttt{let}}\:\overline{\text{x}}=\text{c}\:\text{\texttt{in}}\:\text{T}}} \and{}\inferrule[\tiny{}TyCase]{\ensuremath{\forall{{}}\:\text{i}} \quad{} \ensuremath{\gamma{{}}+(\text{`l}_{\text{i}}=\text{x})\vdash{}\text{c}\leftleftarrows{}\text{T}_{\text{i}}}\\\ensuremath{\gamma{{}}\vdash{}\text{x}\rightrightarrows{}\{\text{`l}_{\text{i}}\}}}{\ensuremath{\gamma{{}}\vdash{}\text{c}\leftleftarrows{}\text{\texttt{case}}\:\text{x}\:\text{\texttt{of}}\:\{\text{\ensuremath{\text{`l}_{\text{i}}\mapsto{}\text{T}_{\text{i}}}}\}}} \and{}\inferrule[\tiny{}TyConcl]{\ensuremath{\gamma_c{{}}} (\ensuremath{\overline{\text{x}}}) = \ensuremath{\text{C}}\\\ensuremath{\gamma{{}}\vdash{}\text{c}\leftleftarrows{}\text{C}}}{\ensuremath{\gamma{{}}\vdash{}\text{c}\leftleftarrows{}\overline{\text{x}}}} \and{}\inferrule[\tiny{}Infer]{\ensuremath{\Gamma{{}}{}(\text{x})=\overline{\text{X}}}\\\ensuremath{\gamma{{}}\vdash{}\overline{\text{X}}=\text{T}}}{\ensuremath{\gamma{{}}\vdash{}\text{x}\leftleftarrows{}\text{T}}} \end{mathpar}\caption{Typechecking a construction against a term: \ensuremath{\gamma{{}}\vdash{}\text{c}\leftleftarrows{}\text{T}}.}\label{26}\vspace{-0.3cm}\end{figure} 622 | \end{frame} 623 | 624 | \begin{frame}[plain]{Typing rules} 625 | \tiny 626 | \begin{figure}[!h] 627 | \begin{mathpar}\inferrule[\tiny{}Pair]{\ensuremath{\gamma{{}}\vdash{}\overline{\text{y}}\leftleftarrows{}\overline{\text{X}}}\\\ensuremath{\gamma{{}}+(\text{x}=(\overline{\text{y}}:\overline{\text{X}}))\vdash{}\overline{\text{z}}\leftleftarrows{}\text{T}}}{\ensuremath{\gamma{{}}\vdash{}(\overline{\text{y}},\overline{\text{z}})\leftleftarrows{}(\text{x}:\overline{\text{X}})\times{}\text{T}}} \and{}\inferrule[\tiny{}Lambda]{\ensuremath{\gamma{{}}+(\text{y}:\overline{\text{X}})+(\text{x}=\text{y})\vdash{}\text{t}\leftleftarrows{}\text{T}}}{\ensuremath{\gamma{{}}\vdash{}\lambda{{}}{}\text{y}.\text{t}\leftleftarrows{}(\text{x}:\overline{\text{X}})\to{}\text{T}}} \and{}\inferrule[\tiny{}Label]{\ensuremath{\text{`l}} \in \ensuremath{\{\text{`l}_{\text{i}}\}}}{\ensuremath{\gamma{{}}\vdash{}\text{`l}\leftleftarrows{}\{\text{`l}_{\text{i}}\}}} \and{}\inferrule[\tiny{}Sigma]{\ensuremath{\gamma{{}}\vdash{}\overline{\text{y}}\leftleftarrows{}\star{}_{\text{i}}}\\\ensuremath{\gamma{{}}+(\text{x}:\overline{\text{y}})\vdash{}\text{t}\leftleftarrows{}\star{}_{\text{i}}}}{\ensuremath{\gamma{{}}\vdash{}(\text{x}:\overline{\text{y}})\times{}\text{t}\leftleftarrows{}\star{}_{\text{i}}}} \and{}\inferrule[\tiny{}Pi]{\ensuremath{\gamma{{}}\vdash{}\overline{\text{y}}\leftleftarrows{}\star{}_{\text{i}}}\\\ensuremath{\gamma{{}}+(\text{x}:\overline{\text{y}})\vdash{}\text{t}\leftleftarrows{}\star{}_{\text{i}}}}{\ensuremath{\gamma{{}}\vdash{}(\text{x}:\overline{\text{y}})\to{}\text{t}\leftleftarrows{}\star{}_{\text{i}}}} \and{}\inferrule[\tiny{}Fin]{ }{\ensuremath{\gamma{{}}\vdash{}\{\text{`l}_{\text{i}}\}\leftleftarrows{}\star{}_{\text{i}}}} \and{}\inferrule[\tiny{}Universe]{\ensuremath{\text{i}\ensuremath{<}\text{j}}}{\ensuremath{\gamma{{}}\vdash{}\star{}_{\text{i}}\leftleftarrows{}\star{}_{\text{j}}}} \end{mathpar}\caption{Typechecking a construction against a construction: \ensuremath{\gamma{{}}\vdash{}\text{c}\leftleftarrows{}\text{C}}.}\label{27}\vspace{-0.3cm}\end{figure} 628 | \end{frame} 629 | 630 | \end{document} 631 | -------------------------------------------------------------------------------- /report-radanne/Report.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -i../src/ -XTypeSynonymInstances -XOverloadedStrings -XRecursiveDo -pgmF marxup3 -F #-} 2 | 3 | import MarXup 4 | import MarXup.Math 5 | import MarXup.Latex 6 | import MarXup.Latex.Math 7 | import MarXup.Latex.Bib 8 | import MarXup.Tex 9 | 10 | -- import qualified Terms as T 11 | -- import qualified TexnAgda as TA 12 | -- import qualified Ident 13 | 14 | import Common 15 | 16 | import Control.Monad 17 | import Data.Monoid 18 | import Data.List (sort,intersperse) 19 | 20 | classUsed :: ClassFile 21 | classUsed = LNCS 22 | 23 | classFile LNCS = "llncs" 24 | 25 | math = ensureMath 26 | 27 | preamble :: Bool -> Tex () 28 | preamble inMetaPost = do 29 | if inMetaPost 30 | then documentClass "article" ["13pt"] 31 | else documentClass (classFile classUsed) [] -- ["authoryear","preprint"] 32 | stdPreamble 33 | mathpreamble classUsed 34 | cmd "input" (tex "../../PaperTools/latex/unicodedefs") 35 | unless inMetaPost $ do 36 | usepackage "natbib" ["sectionbib"] 37 | usepackage "tikz" [] 38 | usepackage "fullpage" [] 39 | usepackage "mathpartir" [] 40 | usepackage "subcaption" [] 41 | usepackage "listings" [] 42 | cmd "input" (tex "lst") 43 | usepackage "hyperref" ["colorlinks","citecolor=blue"] 44 | cmd "usetikzlibrary" $ tex "shapes,arrows" 45 | usepackage "tabularx" [] 46 | 47 | title "A sequent-calculus presentation of type-theory" 48 | authorinfo classUsed authors 49 | 50 | 51 | -- | Document 52 | 53 | abstract :: TeX 54 | abstract = env "abstract" «Dependent types are an active area of research as foundations of mathematics but also as a programming language where many invariants can be internalized as types. Both these objectives argue for a minimal and efficient dependently typed core language. 55 | Most implementations so far use a core language based on ``natural deduction'', which we argue is ill-suited for a type-checking backend. 56 | We think that ``natural deduction'' style is the source of efficiency issues in the presence of inference, it makes terms grows so large that it causes efficiency issues and make the output of the typechecker to large to be verified. 57 | Following some ideas from PiSigma, we propose to use a core language in sequent calculus style to solve those issues. 58 | We believe that this alternative solves theses efficiency issues.» 59 | 60 | header = do 61 | maketitle 62 | abstract 63 | keywords classUsed $ sort 64 | [ "Dependent types", "Type theory", "Sequent calculus"] 65 | 66 | 67 | main = writeFile "Report.tex" =<< renderTex preamble « 68 | 69 | @header 70 | 71 | @(cmd"section*"«Forewords») 72 | 73 | This report aims to present the work accomplished during my 6 month internship in Chalmers, under the supervision of Jean-Philippe Bernardy and with the collaboration of Guilhem Moulin and Andreas Abel. 74 | 75 | This report presupposes some familiarity with a statically typed functional language like OCaml or Haskell, but no intimate knowledge of type theory or dependent types is required. 76 | 77 | @section«A crash-course in dependent types» 78 | 79 | In most programming languages, terms and types live in two different worlds: one cannot refer to terms in types and types can not be manipulated like terms. 80 | On the other hand, in a dependently typed programming language, types can depend on terms. 81 | This addition may sound modest at first, but it makes the language more powerful... and harder to typecheck. 82 | Dependent types were previously mostly used for theorem proving (in Coq, for example). However they have since gained some popularity as a base for programming languages with Agda @citep"norell_practical_2007", Idris @citep"brady_idris_2013" or ATS @citep"chen_ats_2005". It is also related to the recent addition of some features in more mainstream programming languages, such as Generalized Algebraic Datatypes (GADTs) in OCaml or Haskell. 83 | 84 | GADTs @citep"xi_guarded_2003" allows to encode some properties in a non dependent type systems that would otherwise need dependent types. For example it is possible to use GADTs to encode vectors shown in @example. However in the presence of GADTs, terms and types still live in two different worlds: GADTs are not as powerful as dependent types, as we show @example. 85 | 86 | @example<-subsection«An example in Agda» 87 | 88 | Numerous examples have been presented to motivate the use of dependent types in mainstream programming @citep"oury_power_2008, brady_invariants_2008". We give here a short and simple example to outline the specificity of dependent type languages from the user point of view but also from the typechecking point of view. 89 | 90 | For this example, we use the Agda programming language. The syntax should be familiar enough given some knowledge of a statically-typed functional language. 91 | 92 | Let us first define the @agdai«Nat» datatype. A natural number is either zero or a successor of a natural number: 93 | @agdacode« 94 | data Nat : Set where 95 | Zero : Nat 96 | Succ : Nat -> Nat 97 | » 98 | This definition is similar to a definition using GADTs in OCaml or Haskell. @agdai«Set» show that @agdai«Nat» is a base type, as @agdai«Int» or @agdai«Char» would be in OCaml or Haskell. 99 | 100 | Let us now move on to a more interesting datatype: vectors with fixed length. 101 | @agdacode« 102 | data Vec (A : Set) : Nat -> Set where 103 | Nil : Vec A Zero 104 | Cons : {n : Nat} -> A -> Vec A n -> Vec A (Succ n) 105 | » 106 | One can see in the signature of the above datatype that @agdai«Vec» is parametrized by type @agdai«A», the type of the elements, and indexed by a natural number which is the length of the vector. The declaration of @agdai«Cons» exhibits a useful feature of Agda: the argument @agdai«{n : Nat}» is implicit. The compiler infers this argument whenever possible and the length of the vector we are consing to is not needed. Providing the length on every call of @agdai«Cons» would be cumbersome. 107 | 108 | We can use this type information to implement a type-safe @agdai«head» function: 109 | @agdacode« 110 | head : {A : Set} {n : Nat} -> Vec A (Succ n) -> A 111 | head (Cons x xs) = x 112 | » 113 | Again, arguments @agdai«A» and @agdai«n» are declared implicit and it is left to the typechecker to infer them. The compiler knows that the @agdai«Nil» case cannot happen because the length of the provided vector is at least one. Any call to @agdai«head» with a empty vector argument does not typecheck. 114 | 115 | Assuming an infix addition function @agdai«(+): Nat -> Nat -> Nat», we can also implement the append function, which requires us to manipulate the natural numbers embedded in the type: 116 | @agdacode« 117 | append : forall { n m A } -> Vec A n -> Vec A m -> Vec A (n + m) 118 | append Nil ys = ys 119 | append (Cons x xs) ys = Cons x (append xs ys) 120 | » 121 | In the type, we assert that the length of the results is the sum of the lengths of the operands. We use the @agdai«forall» quantifier to declare the implicit arguments without specifying their types, as Agda is able to infer them. 122 | 123 | For now, we have seen that dependent types can be useful to assert properties on some datatype. Those simple examples could be encoded with GADTs although it would need additional burden and be far less easy to manipulate. We could go on and declare some other functions on vectors, however to further motivate the use of dependent types, we rather present something difficult or impossible to do using GADTs. 124 | 125 | We present an embedding of relational algebra that was first discussed by @citet"oury_power_2008". A typed embedded domain specific language for relational databases present interesting difficulties: relational algebra operators are hard to type, especially the join and cartesian product, and type safety usually relies on the static declaration of a schema. We use dependent types to overcome these two issues. 126 | 127 | Let us first considerate the definition of a table schema: 128 | @agdacode« 129 | data U : Set where 130 | BOOL : U 131 | CHAR : U 132 | NAT : U 133 | VEC : U -> Nat -> U 134 | 135 | Schema : Set 136 | Schema = List (String xx U) 137 | » 138 | Here, @agdai«xx» is simply the type of pairs. The @agdai«U» type is the universe type for the values inside our database. Databases are restricted to what type of value they can handle so this restriction is perfectly valid. A schema here is simply a list of columns with a name and a type. 139 | 140 | We need to decode the constructors of @agdai«U» to their representation as Agda types: 141 | @agdacode« 142 | el : U -> Set 143 | el BOOL = Bool 144 | el CHAR = Char 145 | el NAT = Nat 146 | el (VEC x n) = Vec (el x) n 147 | » 148 | 149 | A table is composed of rows which follow some schema. 150 | @agdacode« 151 | data Row : Schema -> Set where 152 | EmptyRow : Row [] 153 | ConsRow : forall {name u s} -> el u -> Row s -> Row ((name , u) :: s) 154 | 155 | Table : Schema -> Set 156 | Table s = List (Row s) 157 | » 158 | A @agdai«Row» is a list with added type information about the schema. Notice how the table is parametrized by the schema it instantiates. 159 | 160 | We can now define a datatype for relational algebra operators: 161 | @agdacode« 162 | data RA : Schema -> Set where 163 | Read : forall { s } -> Table s -> RA s 164 | Union : forall { s } -> RA s -> RA s -> RA s 165 | Product : forall { s s' } -> {So (disjoint s s')} -> RA s -> RA s' -> RA (s ++ s') 166 | Select : forall { s } -> Expr s BOOL -> RA s -> RA s 167 | ... 168 | » 169 | The first two constructors are straightforward, @agdai«Read» read a given table and @agdai«Union» merge two tables following the same schema. The @agdai«Product» constructor, however, is much more interesting. To be able to compute the cartesian product of two tables, their columns must be disjoint. We can easily provide a function checking that two schema are disjoint, of type: 170 | @agdacode« 171 | disjoint : Schema -> Schema -> Bool 172 | » 173 | We now want to ensure that this property is checked at the type level. In order to do so, we define a bridge from @agdai«Bool» to types: 174 | @agdacode« 175 | So : Bool -> Set 176 | So false = Empty 177 | So true = Unit 178 | » 179 | @agdai«So» takes a @agdai«Bool» and returns a type. The @agdai«Empty» type is, as its names indicates, a type with no elements. @agdai«Unit» being the type with only one element. The type @agdai«So x» has an element if and only if it is @agdai«Unit», in other word if @agdai«x» is @agdai«true». If the type has no element, it is impossible to find an expression that have this type and hence the program cannot typecheck. 180 | 181 | The @agdai«Product» constructor takes @agdai«So (disjoint s s')» as argument: this is a proof that @agdai«s» and @agdai«s'» are indeed disjoint. 182 | 183 | Finally we define expressions used by @agdai«Select»: 184 | @agdacode« 185 | data Expr : Schema -> U -> Set where 186 | _!_ : (s : Schema) -> (column : String) -> {p : So (occurs column s)} -> Expr s (lookup column s p) 187 | equal : forall { u s } -> Expr s u -> Expr s u -> Expr s BOOL 188 | literal : forall { u s } -> el u -> Expr s u 189 | ... 190 | » 191 | Constructors @agdai«equal» and @agdai«literal» are simple and could be encoded easily with GADTs. However, the @agdai«_!_» constructor, which allows to get the value of a column, takes as argument @agdai«So (occurs column s)» for some schema @agdai«s». This is a proof that the column appears in the schema. The @agdai«occurs» function would have the type: 192 | @agdacode« 193 | occurs : String -> Schema -> Bool 194 | » 195 | We want the @agdai«_!_» constructor to return an expression of the type of the return column, hence we could be tempted to use a mere lookup operator: 196 | @agdacode« 197 | lookup : (col : String) -> (s : Schema) -> U 198 | » 199 | However, Agda only accepts terminating functions. The @agdai«lookup» function, as defined here, is not guaranteed to terminate. Fortunately, we know that, in the context of selects, this lookup always terminate thanks to the proof object @agdai«{p : So (occurs column s)}». Hence we define the lookup function with this type instead: 200 | @agdacode« 201 | lookup : (col : String) -> (s : Schema) -> So (occurs col s) -> U 202 | » 203 | 204 | We can see in this example multiple characteristics of dependently typed programming languages. 205 | First, types and terms evolve in the same word and there is little to no distinction between them. 206 | Second, terms inhabiting a type are proofs of the proposition expressed by this type. This is a literal translation of the Curry-Howard isomorphism. In other theorem provers, like Coq, users can make use of a separate language to construct proofs, which are λ-terms. 207 | 208 | Finally, the typechecker must evaluate terms in order to typecheck. 209 | This make the typechecking more complicated and is the source of some limitation in current typecheckers. It is also part of the focus of this work. 210 | 211 | @section«Limitations of current implementations» 212 | 213 | The Agda typechecker contains some well known issues that the dependent type theory community has been trying to solve: 214 | @itemize« 215 | @item The ``case decomposition'' issue, which is presented later on, @example_ulf. This issue comes from the fact that natural deduction style makes propagating typing constraints to subterms difficult. 216 | @item Agda's type checker is using a natural deduction style and we believe this is why it suffers efficiency issues. Inference duplicates parts of terms which are not shared in the Agda core representation of terms. Therefore typechecking must be done multiple time, causing performance penalties. 217 | @item Agda currently does not have a core language that can be reasoned about and formally verified. 218 | » 219 | 220 | Various new languages have been proposed to solve these issues, including Mini-TT @citep"Coquand_minitt_2009" and PiSigma @citep"AltenkirchDLO10". 221 | 222 | PiSigma is especially interesting because it tackles those problems by putting some constructions of the language in sequent calculus style, as explained in the next section. Unfortunately, although this solve some issues, others are introduced. In particular, the language lacks subject reduction (that is, evaluation does not preserve typing). We believe that these issues are present mostly because part of the language is still in natural deduction style. 223 | 224 | @sec_seqstyle<-subsection«Sequent calculus presentation» 225 | 226 | There are various definitions of sequent calculus. In this report, we mean that every intermediate results or sub-terms are bound to a variable. 227 | Sequent calculus is a well known presentation for classical logic but so far has not been evaluated as a presentation of a type theory. 228 | The translation from natural deduction to sequent calculus can be mechanized for non-dependent type systems @citep"Puech_proof-upside_2013". It is interesting to investigate a sequent calculus presentation of dependent types, because it presents interesting properties: 229 | @itemize« 230 | @item It is low-level, which makes it suitable as a back-end for dependently-typed languages. 231 | @item Sharing is expressible @citep"Launchbury_sharing_1993". In natural deduction style, we cannot express the fact that two subterms are identical. This is however desired as it would help solving some efficiency issues encountered in Agda, for example. 232 | » 233 | 234 | @sec_goals<-subsection«Goals» 235 | 236 | We aim to construct a type-theory which can be used as a back-end for dependently-typed languages such as Agda or Coq. We call this language @na. Concretely, our goals are to have a language that is: 237 | @itemize« 238 | @item A type-theory: correctness should be expressible via types. 239 | @item Low-level: one should be able to translate high-level languages into this language while retaining properties such as run-time behaviour, complexity, etc. 240 | @item Minimal: It should well defined and be possible to formally verify the type-checking algorithm. 241 | » 242 | 243 | @sec_lang<-section«Description of the language» 244 | 245 | Before describing the language itself, we define some common notions in type theory that we manipulate in the rest of this report. 246 | 247 | @subsection«Preliminary vocabulary» 248 | 249 | @paragraph«Constructor and destructors@newline» 250 | A language is often separated into destructors (also called eliminations) and constructors (also called introductions). For example in the lambda calculus, lambda abstractions are constructors and applications are destructors. A destruction of construction (called redex), can be reduced through β-reduction. In a more complicated language (including @na) there are also pairs (constructors) and projections (destructors). The projection of a pair can be similarly reduced. In sequent calculus, redexes are implement as cuts. 251 | 252 | @paragraph«Universes@newline» 253 | In regular programming languages, one has types in the one's hand and the set of types in the other hand. The latter cannot be manipulated directly but because terms cannot be used as types (or vice-versa), this is not an issue. However, in a dependently typed programming language, terms and types live together, therefore one can theoretically manipulate the set of types. One may wonder: is this set of types a type itself? For technical reasons @citep"benke_universes_2003" and in order to preserve the consistency of the type system, the answer has to be negative. The problem is essentially the same as Russel's paradox in mathematics: the set of sets cannot be set. 254 | 255 | Therefore, types are stratified in universes (also called ``sorts'' or ``kinds'') indexed by natural numbers. 256 | We note these universes @(kind i) with @i ∈ @nat. 257 | Base types, like @nai«Int», are in @(kind 0) while @(kind i) is in @(kind (i + 1)). 258 | Types composed of other types live in the highest universe of their components@footnote«Only true in predicative theories, which are the ones we focus on in this report.». For example @nai«(Char, Int)» live in @(kind 0) while @nai«(Int, *0)» is in @(kind 1). Finally, for ease of manipulation, any element in @(kind i) is in @(kind j) whenever @i ≤ @j. In the case of @na, typing rules for universes are given @tr_constr_concl. 259 | 260 | @subsection«@na» 261 | 262 | As explained in @sec_seqstyle, every variable is bound. We can separate elements of the languages, presented @grammar_na, into the following categories: 263 | 264 | @description« 265 | 266 | @item'«Variables» are separated in two categories: conclusions and hypotheses. 267 | @description« 268 | @item'«Hypotheses» are available in the beginning of the program or are the result of an abstraction. It is not possible to construct an hypothesis. 269 | 270 | @item'«Conclusions» are either an hypothesis or the result of a construction of conclusions. Variables with a bar on the top @(concl x) are meta-syntactic variables for Conclusions. 271 | » 272 | @item'«Destructions», ranged over by the letter @d in @grammar_na, can be either a @texttt«case» (a pattern match) or of the form @d as shown in @grammar_na: an application, a projection or a cut. We do not bind the result of a @texttt«case», as opposed to other destructions. 273 | 274 | @item'«Dependent functions and products» follow the same pattern: @(pi_ x (concl y) t) and @(sigma_ x (concl y) t). The type on the left hand side can be a conclusion, because it does not depend on the element @x, hence it is possible to bind it before. However, the right hand side must be a term, because it depends on @x. @x is an hypothesis because it is abstract here. 275 | 276 | @item'«Enumerations» are a set of scopeless and non-unique labels. Labels are plain strings starting with an apostrophe. We use the meta-syntactic variables @l and @l2. 277 | 278 | @item'«Universes» are arranged in a tower, starting at 0, as explained above. We additionally use the shorthand @star for @(star @- 0). 279 | 280 | @item'«Constructions», ranged over by the letter @c and detailed @grammar_na, are either a conclusion, a universe, a type or a construction of pair, enumeration or function. The result must be bound to a conclusion. 281 | » 282 | @grammar_na<-figure'«Abstract syntax for @na»« 283 | @vspace"-35pt" 284 | @subfigure"b"«0.3»«Terms»«@align( 285 | (\(x:xs) -> [ «@t ::=@space», x ] : map (\y -> [ « |@space» , y ]) xs) 286 | [ «@(concl x)», 287 | «@(let_ x d t)» , 288 | «@(case_ x [ «@(mparen $ l → t) @text«*»» ])» , 289 | «@(let_ (concl x) c t)» 290 | ])» 291 | @subfigure"b"«0.3»«Destructions»«@align( 292 | (\(x:xs) -> [ «@d ::=@space», x ] : map (\y -> [ « |@space» , y ]) xs) 293 | [ «@x @space @(concl y)» , 294 | «@(proj1 x) @space | @space @(proj2 x) » , 295 | «@(color "red" «@(concl x <:> concl y)»)» 296 | ])» 297 | @subfigure"b"«0.3»«Constructions»«@align( 298 | (\(x:xs) -> [ «@c ::=@space», x ] : map (\y -> [ «|@space» , y ]) xs) $ 299 | map (mconcat . intersperse «@space|@space») 300 | [ [ «@x» ], 301 | [ «@λ @x . @t», «@(pi_ x (concl y) t)» ], 302 | [ «(@(concl x),@(concl y))»,«@(sigma_ x (concl y) t)» ], 303 | [ «@l», «@(fin_ l)» ], 304 | [ «@star @(indice i)» ] 305 | ])» 306 | @vspace"-0.2cm" 307 | » 308 | Conclusions are the result of constructions of conclusions and hypothesis is the base case of constructions. An hypothesis is the result of destructions of hypotheses. This means that we can only produce constructions of destructions, hence there is no reduction possible and the program is in normal form. 309 | 310 | Obviously we do not only want to write programs already in normal form, so we need a way to construct hypotheses from conclusions. That is the purpose of the cut construction, shown in red in @grammar_na. It allows to declare a new hypothesis, given a conclusion and its type. The type is needed for type checking purposes. 311 | 312 | @subsection«A bit of sugar» 313 | 314 | Of course, it is tedious to write reasonable programs with this syntax as it is far too verbose for humans. We introduce another simpler syntax that can be seen below. This new syntax can be translated to the one defined in the previous section. The translation can be done even on ill-typed terms and hence do not need preliminary typechecking. It is similar to the transformation in continuation passing style defined by @citet"plotkin_call-by-name_1975". The translation binds every intermediate terms to a fresh variable and replaces the subterm by this variable. 315 | 316 | Every program is composed of two parts: a term and its type. The typechecker checks that the type lives in a universe and then checks the term against its type. 317 | @fig_syntaxes is an example of a program in high-level syntax and its translation to the low-level syntax. The low-level version is verbose, which argues for the need of a high-level one. 318 | @fig_syntaxes<-figure'«The polymorphic identity, in both high-level and low-level syntax.»« 319 | @vspace"-0.6cm" 320 | @minipage"c"«0.4»«@nacode"../examples/010-Lam.ma"» 321 | @minipage"c"«0.55»«@nacode"../examples/010-Lam.na"» 322 | @vspace"-0.2cm" 323 | @centering 324 | » 325 | 326 | @subsection«An encoding for sum types» 327 | 328 | Before giving the details of our type system and evaluation strategy, let us consider a small example: we want to create a non-dependent sum type, as used in Agda, Haskell or OCaml, in @na. We only have enumerations, dependent products and dependent functions but this is enough to encode sum types. @fig_iex shows a simple Agda sum type and the equivalent code in @na. 329 | 330 | The trick in this encoding is to separate the tag part (@nai«Foo» and @nai«Bar») from the type part. The tag part can be easily encoded as an enumeration. As for the type part, we take advantage of the dependent product to pattern match the tag and return the appropriate type. In this case, we have a sum type with a parameter, which is translated into a function. 331 | 332 | @fig_iex<-figure'«A sum type in Agda and @na.»« 333 | @vspace"-0.4cm" 334 | @minipage"c"«0.4»« 335 | @agdacode« 336 | data MySumtype (s : Set) : Set where 337 | Foo : s -> MySumtype s 338 | Bar : MySumtype s 339 | »» 340 | @minipage"c"«0.4»«@nacode"../examples/datatype.ma"» 341 | @vspace"-0.2cm" 342 | @centering» 343 | 344 | This example shows the fact that, in a dependently typed programming language, enumerations are enough to simulate sum types, which is not possible in a non dependently typed programming language. Here, a more powerful type system allows us to use an arguably simpler core language. 345 | 346 | 347 | @sec_type_eval<-section«Evaluation and type system» 348 | 349 | The typing rules for @na are usual, most of the cleverness is contained in the way the environment is updated. Hence we start by presenting environment and environment extensions. 350 | 351 | We use the same notation as in @sec_heap: @x for hypotheses, @(concl x) for conclusions, @c for constructions and @d for destructions. For the sake of clarity, elements used as types are capitalized. 352 | 353 | @sec_heap<-subsection«The heap» 354 | 355 | Because the language is based on variables and bindings, we need a notion of environment. This notion is captured in a heap composed of four mappings: 356 | @align[ 357 | [element γty , «: @(x |-> concl y )», «@text«The context heap, containing the type of hypotheses.»» ], 358 | [element γc , «: @(concl x |-> c)», «@text«The heap from conclusion to constructions.»» ], 359 | [element γa , «: @(x |-> y)», «@text«The heap for aliases on hypotheses.»» ], 360 | [element γd , «: @(x |-> d)», «@text«The heap from hypotheses to cuts and destructions.»» ] 361 | ] 362 | We also note @h = (@γc, @γa, @γd) for the heap alone and @γ = (@γty, @γc, @γa, @γd) for the heap with type information in the context. 363 | 364 | @envext<-subsection«Environment extensions» 365 | 366 | Here are details of how to update the heap when registering new information. We use the @(math $ cmd0 "gets")operator to denote an update and the + operator to denote environment extensions. 367 | 368 | When typechecking abstractions, like lambda or dependent functions and products, we need to introduce new hypotheses in the context without any value. 369 | @align[ 370 | [ «@(γ + cut_ x (concl yty))», «= @γ @text« with » @(γty ← cut_ x (concl yty))» ] 371 | ] 372 | 373 | When adding a destruction definition, we check if a similar destruction definition exist using @γd. This allows automatic recovery of sharing for multiple application of a function to the same argument. Searching for a specific destruction can be implemented efficiently by using a reversed map of @γd, from destructions to hypotheses. 374 | @align[ 375 | [ «@(γ + (x \== d))», «= @γ @text« with » @(γa ← (x \== y))», «@(iff $ (y \== d) ∈ γd)» ], 376 | [ «» , «= @γ @text« with » @(γd ← (x \== d))», «@text«otherwise»» ] 377 | ] 378 | 379 | The rule for conclusions is straightforward, because we do not handle automatic sharing for conclusions as we do for destructions. Automatic sharing rediscovering for constructions is more costly than for destructions, because there are at most two components in a destruction whereas there can be far more in constructions. This additional cost should be evaluated but this is left for future work. 380 | @align[ 381 | [«@γ + @(concl x \== c)», «= @γ @text« with » @(γc ← (concl x \== c))»] 382 | ] 383 | 384 | When checking or evaluating a case, we keep track of constraints on the variable decomposed by the case, allowing us to know inside the body of a case which branch we took. Of course, if two incompatible branches are taken, we abort the typechecking, because that means the context is inconsistent. 385 | @align[ 386 | [ «@γ + @(l \== x)», «= @γ» , «@(iff $ l \== x ∈ γc)» ], 387 | [ «» , «= @bot» , «@(iff $ l2 \== x ∈ γc) @text" for " @(l ≠ l2)» ], 388 | [ «» , «= @γ @text« with » @(γc ← (l \== x))», «@text«otherwise»» ] 389 | ] 390 | 391 | @sec_eval<-subsection«Evaluation strategy» 392 | 393 | We use the @squig operator to denote the reduction relation. Reduction rules operate both on a term and a heap. 394 | For clarity, we use shortcuts for lookup operations, for example we note @(app γ (concl x) \= z concl y) instead of 395 | @(app γc (concl x) \= x' text«and» app γd (concl x') \= z concl y). 396 | 397 | The evaluation relation is presented as a big step operational semantic from a heap and a term to a multiset of heap and terms. As a short hand, we drop the multiset notation when we return only one value. We use a multiset as return value to handle case decomposition on an abstract variable. 398 | 399 | As for every relation involving terms, we traverse the term and add every binding to the heap. When we encounter a case on a tag, we reduce it by taking the matching branch. If the variable is abstract, we return the multiset of the evaluation of each branches. 400 | @mathpar[[ 401 | «@(rule «EvalCase» [ 402 | «@(app h x \= cut_ (l @- i) (text "_"))», 403 | «@(h + ((l @- i) \== x) ⊢ (t @- i) ~> h' ⊢ concl x)» 404 | ] 405 | «@(h ⊢ case_ x [«@((l @- i) |-> (t @- i))»] ~> h' ⊢ concl x)») », 406 | «@(rule «AbstractCase» [ 407 | «@(app h x =/= cut_ (l @- i) (text "_"))», 408 | «∀ @i @quad @(h + ((l @- i) \== x) ⊢ (t @- i) ~> (h' @- i) ⊢ (concl x @- i))» 409 | ] 410 | «@(h ⊢ case_ x [«@((l @- i) |-> (t @- i))»] ~> mbracket ((h @- i) ⊢ (concl x @- i)))») », 411 | «@(rule «EvalDestr» [ 412 | «@(h ⊢ d ~> h' ⊢ t')», 413 | «@(h' + (x \== t') ⊢ t ~> h'' ⊢ concl x)» 414 | ] 415 | «@(h ⊢ let_ x d t ~> h'' ⊢ concl x)») », 416 | «@(rule «AddDestr» [ 417 | «@(h ⊢ d ~/> h' ⊢ t')», 418 | «@(h + (x \== d) ⊢ t ~> h' ⊢ concl x)» 419 | ] 420 | «@(h ⊢ let_ x d t ~> h' ⊢ concl x)») », 421 | «@(rule «AddConstr» [ 422 | «@(h + (concl x \== c) ⊢ t ~> h' ⊢ concl x)» 423 | ] 424 | «@(h ⊢ let_ (concl x) c t ~> h' ⊢ concl x)») », 425 | «@(rule «Concl» [ 426 | « » 427 | ] 428 | «@(h ⊢ concl x ~> h ⊢ concl x)») » 429 | ]] 430 | 431 | Destruction of construction are evaluated eagerly, hence we add special rules for each destructions and check if the destructed hypothesis is a cut with the relevant construction. We need to evaluate only one cut for projections, because all previous cuts have already been evaluated. However, a reduction on a lambda can reveal multiple cuts inside the lambda, which are then evaluated. 432 | @mathpar[[ 433 | «@(rule «EvalProj@(indice 1)» [ 434 | «@(app h y \= cut_ (pair_ (concl z) (concl w)) (text "_"))» 435 | ] 436 | «@(h ⊢ (proj1 y) ~> h ⊢ concl z)») », 437 | «@(rule «EvalProj@(indice 2)» [ 438 | «@(app h y \= cut_ (pair_ (concl z) (concl w)) (text "_"))» 439 | ] 440 | «@(h ⊢ (proj2 y) ~> h ⊢ concl w)») », 441 | «@(rule «EvalApp» [ 442 | «@(app h y \= cut_ (lambda_ w t) (text "_"))», 443 | «@(h ⊢ subst t (concl z) w ~> h' ⊢ concl x)» 444 | ] 445 | «@(h ⊢ (mparen $ y concl z) ~> h' ⊢ concl x)») » 446 | ]] 447 | 448 | When we have only a conclusion left, the evaluation is finished: every redexes has been evaluated while updating the heap. 449 | 450 | In the following section, we write @(γ ⊢ t ~> γ' ⊢ t') to mean @(γ \= pair_ γty h), @(h ⊢ t ~> h' ⊢ t') and @(γ' \= pair_ γty h'). 451 | 452 | @eqrules<-subsection«Equality rules» 453 | 454 | Equality rules can only be applied to normalized terms (without cuts). The equality relation, noted @(γ ⊢ t \= t') is commutative for @t and @t', hence the rules are given only in one way. Equality rules use the following two operators: 455 | @itemize« 456 | @item @(x ≡ y) is the equality between variables. It means @x and @y are the same variable. 457 | @item @(x ≅ y) is the variable equality modulo aliases. It is defined as @(x ≡ y ∨ app γa x ≅ y ∨ x ≅ app γa y). In other words, it tests whether two hypotheses are in the same class of aliases. The alias environment is only for hypotheses so this operator is not usable for conclusions.» 458 | These two operators are used to test equality between conclusions and hypotheses respectively. 459 | 460 | If the context is inconsistent, everything is true. this rule discards non-matching branches of a @nai«case». It fulfills the same purpose as the rule for environment extensions on labels presented @envext. 461 | @align[ 462 | [ «@(bot ⊢ text "_")», «@lra true»] 463 | ] 464 | 465 | To verify equality on terms, we traverse both terms until we reach the conclusions, then we compare the definition of the conclusions. If the conclusions are equal according to @(math $ cmd0"equiv"), we return directly. 466 | 467 | @align[ 468 | [ «@(γ ⊢ let_ x d t \= t')», «@lra @(γ' + x \== t'' ⊢ t \= t')»], 469 | [ «@(γ ⊢ let_ (concl x) c t \= t')», «@lra @(γ + (concl x) \== c ⊢ t \= t')»], 470 | [ «@(γ ⊢ case_ x [«@((l @- i) |-> (t @- i))»] \= t)», 471 | «@lra @fa @i @quad @(γ + x \== (l @- i) ⊢ (t @- i) \= t)»], 472 | [ «@(γ ⊢ concl x \= concl y)», 473 | «@lra @(concl x ≡ concl y ∧ γ ⊢ app γc (concl x) \= app γc (concl y))»] 474 | ] 475 | 476 | To verify that two constructions are equal, we proceed by induction on the structure of constructions. 477 | @align[ 478 | [ «@(γ ⊢ (l \= l))», «@lra true»], 479 | [ «@(γ ⊢ (kind i \= kind j))», «@lra @(i \= j)»], 480 | [ «@(γ ⊢ x \= y)», «@lra @(x ≅ y)»], 481 | [ «@(γ ⊢ lambda_ x t \= lambda_ y t')», «@lra @(γ + (x \== y) ⊢ t \= t')»], 482 | [ «@(γ ⊢ pair_ (concl x) (concl y) \= pair_ (concl x') (concl y'))», 483 | «@lra @(γ ⊢ concl x \= concl x' ∧ γ ⊢ concl y \= concl y') »], 484 | [ «@(γ ⊢ pi_ x (concl y) t \= pi_ x' (concl y') t')», 485 | «@lra @(γ ⊢ concl y \= concl y' ∧ γ + (x \== x') ⊢ t \= t') »], 486 | [ «@(γ ⊢ sigma_ x (concl y) t \= sigma_ x' (concl y') t')», 487 | «@lra @(γ ⊢ concl y \= concl y' ∧ γ + (x \== x') ⊢ t \= t') »], 488 | [ «@(γ ⊢ (fin_ (l @- i) \= fin_ (l2 @- i)))», 489 | «@lra @fa @i @quad @((l @- i) \= (l2 @- i))»] 490 | ] 491 | 492 | The last two rules are interesting in that they are asymmetric: a construction on the left and a variable on the right. To test the equality in this case, we need to introduce new variables and apply destructions on the left-hand side of the equality. This allows to have η-equality in the type theory, therefore we can prove that @(lambda_ x (mparen (text«f» x)) =: text«f»), even if @text«f» is abstract. 493 | @align[ 494 | [ «@(γ ⊢ lambda_ x t \= y)», 495 | «@lra @(γ + (concl x \== x) + (z \== (y concl x)) ⊢ t \= z)»], 496 | [ «@(γ ⊢ pair_ (concl x) (concl x') \= y)», 497 | «@lra @(γ + (z \== proj1 y) ⊢ concl x \= z ∧ γ + (z \== proj2 y) ⊢ concl x' \= z) »] 498 | ] 499 | 500 | @typerule<-subsection«Typing rules» 501 | 502 | The typing rules can be divided in three mutually defined relations. The two first relations, noted @(Con $ cmd0 "leftleftarrows"), are typechecking relations for respectively terms and constructions. The last relation, for destructions, is an inference, noted @(Con $ cmd0 "rightrightarrows"). 503 | 504 | We note typechecking for terms @(γ ⊢ t <@ tty), the rules are presented @tr_term. The type here is always a complete term and must have been checked beforehand. 505 | In the @ruleref«Constr» rules, we do not need to typecheck the construction in detail because any construction added this way is typechecked by either the @ruleref«Concl» rule or the @ruleref«Cut» rule. In the @ruleref«Destr» rule, we use the inference relation on destructions to ensure that every hypothesis has a type in the context. We also evaluate the destruction eagerly. 506 | 507 | @tr_term<-figure'«Typechecking a term: @(γ ⊢ t <@ tty)»« 508 | @mathpar[[ 509 | «@(rule «Case» [ 510 | «@(fa i) @quad @(γ + ((l @- i) \== x) ⊢ (t @- i) <@ tty)», 511 | «@γty (x) = @(fin_ $ (l @- i))» 512 | ] 513 | «@(γ ⊢ case_ x [«@((l @- i) |-> (t @- i))»] <@ tty)») », 514 | «@(rule «Constr» [ 515 | «@(γ + (concl x \== c) ⊢ t <@ tty)» 516 | ] 517 | «@(γ ⊢ let_ (concl x) c t <@ tty)») », 518 | «@(rule «Concl» [ 519 | «@γc (@concl(x)) = @c», 520 | «@(γ ⊢ c <@ tty)» 521 | ] 522 | «@(γ ⊢ concl x <@ tty)») », 523 | «@(rule «Destr» [ 524 | «@(γ ⊢ d @> tty')», 525 | «@(γ ⊢ d ~> γ' ⊢ t')», 526 | «@(γ' + (x \== t') + cut_ x tty' ⊢ t <@ tty)» 527 | ] 528 | «@(γ ⊢ let_ x d t <@ tty)») », 529 | «@(rule «Eval» [ 530 | «@(γ ⊢ tty ~> mbracket ((γ' @- i) ⊢ (concl xty @- i)))», 531 | «∀ @i @quad @((γ' @- i) ⊢ (concl xty @- i) <@ concl xty)» 532 | ] 533 | «@(γ ⊢ t <@ tty)») » 534 | ]]» 535 | 536 | The inference relation for destructions, presented @tr_destr, is noted @(γ ⊢ d @> tty). Most rules rely on the fact that every hypothesis has its type in the context. Once we know the type of the hypothesis part of the destruction, we check that the destruction is consistent and reconstruct the complete type. The @ruleref«Cut» destructions, on the other hand, are verified by typechecking the conclusion of the cut. 537 | 538 | @tr_destr<-figure'«Inferring the type of a destruction: @(γ ⊢ d @> tty).»« 539 | @mathpar[[ 540 | «@(rule «App» [ 541 | «@(app γty y \= pi_ z (concl xty) tty)», 542 | «@(γ ⊢ concl z <@ concl xty)» 543 | ] 544 | «@(γ ⊢ y concl z @> tty)») », 545 | «@(rule «Proj@(indice 1)» [ 546 | «@(app γty y \= sigma_ z (concl xty) tty)» 547 | ] 548 | «@(γ ⊢ proj1 y @> concl xty)») », 549 | «@(rule «Proj@(indice 2)» [ 550 | «@(app γty y \= sigma_ z (concl xty) tty)» 551 | ] 552 | «@(γ ⊢ proj2 y @> tty)») », 553 | «@(rule «Cut» [ 554 | «@(γ ⊢ concl x <@ concl xty)» 555 | ] 556 | «@(γ ⊢ cut_ (concl x) (concl xty) @> concl xty)») » 557 | ]]» 558 | 559 | A construction is checked against a term or a construction; it is noted respectively @(γ ⊢ c <@ tty) and @(γ ⊢ c <@ cty). Type checking a construction against a term is merely a matter of traversing the type to access the final conclusion, as shown in rules @tr_constr_term. When we reach the conclusion of the term, we can look up its definition, which has to be a construction, and continue typechecking. The @ruleref«Infer» rule is a bit different in that it uses the context for hypotheses and typechecks by unifying the two types. 560 | 561 | @tr_constr_term<-figure'«Typechecking a construction against a term: @(γ ⊢ c <@ tty).»« 562 | @mathpar[[ 563 | «@(rule «TyDestr» [ 564 | «@(γ + (x \== d) ⊢ c <@ tty )» 565 | ] 566 | «@(γ ⊢ c <@ (let_ x d tty) )») », 567 | «@(rule «TyConstr» [ 568 | «@(γ + (concl x \== c) ⊢ c <@ tty )» 569 | ] 570 | «@(γ ⊢ c <@ (let_ (concl x) c tty) )») », 571 | «@(rule «TyCase» [ 572 | «@(fa i) @quad @(γ + ((l @- i) \== x) ⊢ c <@ (tty @- i))», 573 | «@(γ ⊢ x @> fin_ (l @- i))» 574 | ] 575 | «@(γ ⊢ c <@ case_ x [«@((l @- i) |-> (tty @- i))»] )») », 576 | «@(rule «TyConcl» [ 577 | «@γc (@(concl x)) = @cty», 578 | «@(γ ⊢ c <@ cty)» 579 | ] 580 | «@(γ ⊢ c <@ concl x)») », 581 | «@(rule «Infer» [ 582 | «@(app γty x \= concl xty)», 583 | «@(γ ⊢ concl xty \= tty)» 584 | ] 585 | «@(γ ⊢ x <@ tty)») » 586 | ]]» 587 | 588 | The typechecking rules for constructions, shown in @tr_constr_concl, are similar to the typechecking rules for a language in natural deduction style, except that instead of subterms, we have conclusions. The definition of those conclusions play the role of subterms. @ruleref«Lazy»s rules can only be applied if the language is lazily evaluated. On the other hand, if the evaluation is strict, the redex has already been reduced to a normal form. 589 | 590 | @tr_constr_concl<-figure'«Typechecking a construction against a construction: @(γ ⊢ c <@ cty).»« 591 | @mathpar[[ 592 | «@(rule «Pair» [ 593 | «@(γ ⊢ concl y <@ concl xty)», 594 | «@(γ + x \== cut_ (concl y) (concl xty) ⊢ concl z <@ tty)» 595 | ] 596 | «@(γ ⊢ pair_ (concl y) (concl z) <@ sigma_ x (concl xty) tty)») », 597 | «@(rule «Lambda» [ 598 | «@(γ + cut_ y (concl xty) + (x \== y) ⊢ t <@ tty)» 599 | ] 600 | «@(γ ⊢ (lambda_ y t) <@ pi_ x (concl xty) tty)») », 601 | «@(rule «Label» [ 602 | «@l ∈ @(fin_ (l @- i))» 603 | ] 604 | «@(γ ⊢ l <@ fin_ (l @- i))») », 605 | «@(rule «Sigma» [ 606 | «@(γ ⊢ (concl y) <@ kind i)», 607 | «@(γ + cut_ x (concl y) ⊢ t <@ kind i)» 608 | ] 609 | «@(γ ⊢ (sigma_ x (concl y) t) <@ kind i)») », 610 | «@(rule «Pi» [ 611 | «@(γ ⊢ (concl y) <@ kind i)», 612 | «@(γ + cut_ x (concl y) ⊢ t <@ kind i)» 613 | ] 614 | «@(γ ⊢ (pi_ x (concl y) t) <@ kind i)») », 615 | «@(rule «Fin» [ 616 | « » 617 | ] 618 | «@(γ ⊢ (fin_ (l @- i)) <@ kind i)») », 619 | «@(rule «Universe» [ 620 | «@(binop 1 «<» i j)» 621 | ] 622 | «@(γ ⊢ kind i <@ kind j)») » 623 | ]]» 624 | 625 | 626 | @section«Properties on typing and reduction» 627 | 628 | In order for @na to be interesting as a core language for a dependently typed framework, we need to provide some guarantee about the behaviour of the execution of well typed terms. The proof for these properties are still being worked on and are left to be published in a future work. Because the language is not in natural deduction style, we present these classic properties in a slightly different way. 629 | 630 | A desirable property is that well-typeness is preserved by reduction rules. 631 | @proposition«@cmd"textbf"«Subject reduction»»« 632 | Let @h be a heap, @γty a context, @tty and @t be two terms. If there exists a heap @h' and a term @t' such that @(h ⊢ t ~> h' ⊢ t'). 633 | Then we have, @display(pair_ γty h ⊢ t <@ tty ==> (pair_ γty h' ⊢ t' <@ tty)) 634 | » 635 | 636 | Finally, we want to ensure that any successfully typechecked term evaluate to a normal form. This guarantees that the evaluation of typechecked terms always terminate. 637 | @proposition«@cmd"textbf"«Strong normalization»»« 638 | Let @h be a heap, @γty a context and @t, @tty terms such that 639 | @(pair_ γty h' ⊢ t' <@ tty). 640 | Then there exists a multiset of heaps and conclusions such that cuts are never referenced from destructors in the heaps @(h' @- i) and 641 | @display(h ⊢ t ~> mbracket ((h' @- i) ⊢ (concl x @- i))) 642 | » 643 | 644 | @section«Results and Examples» 645 | 646 | In @envext, we argue that sharing can be recovered by checking if a variable is already present in a destruction and recording the alias in this case. 647 | We show in @example_sharing an example where this feature is useful. The function in this example takes as argument a pair @agdai«p» and a binary predicate @agdai«P». 648 | We then force the typechecker to unify two versions of the same destructions, once at the term level and the other at the type level. To compare them, Agda's typechecker unfolds both terms which can be inefficient if the normal forms are large. 649 | In @na, we rediscover the sharing between the two versions of @agdai«u1» and @agdai«u2», hence the structures to compare are smaller. 650 | In particular, if @nai«p» in this piece of code was a big term instead of being abstract, the performance penalty for Agda would have been important. 651 | 652 | @example_sharing<-figure'«Recovering sharing in Agda and @na.»« 653 | @vspace"-0.4cm" 654 | @minipage"c"«0.45»« 655 | @agdacode« 656 | sharing : 657 | (A : Set) -> (B : Set) -> 658 | (P : A -> B -> Set) -> (p : A * B) -> 659 | let (u1 , u2) = p 660 | v = P u1 u2 661 | in v -> v 662 | sharing A B P (u1' , u2') = 663 | let v' = P u1' u2' 664 | in \(x : v') -> x 665 | »» 666 | @minipage"c"«0.5»«@nacode"../examples/032-Nisse.ma"» 667 | @centering 668 | » 669 | 670 | For the next examples, we need a notion of equality that we can use in type signatures. Leibniz' equality is defined by @agdai«Eq» and @agdai«refl» (for reflexivity) in the example @example_eq. The idea is to make the unification engine compute the equality. 671 | For example, provided @nai«Bool» and @nai«not», @nai«refl Bool 'true : Eq Bool 'true (not 'false)» make the unification engine compute the fact that @nai«'true» = @nai«not 'false». 672 | The only element of the @agdai«Eq» type is a proof by reflexivity. 673 | If the two arguments of @agdai«Eq» do not unify, the program does not typecheck. 674 | @example_eq<-figure'«Encoding equality at the type level»« 675 | @vspace"-0.4cm" 676 | @minipage"c"«0.43»« 677 | @agdacode« 678 | data _==_ 679 | {A:Set} (x:A) : A -> Set where 680 | refl : x == x 681 | »» 682 | @minipage"c"«0.54»«@listing["language=nanoAgda"]« 683 | Eq = 684 | (\A -> \x -> \y -> (P: A -> *0) -> P x -> P y) 685 | : (A : *0) -> A -> A -> *1; 686 | refl = (\A -> \x -> \P -> \p -> p) 687 | : (A : *0) -> (x:A) -> Eq A x x; 688 | »» 689 | @centering 690 | » 691 | 692 | We assume in the following examples that @nai«Eq» and @nai«refl» are in the scope. We also assume that we have @nai«Bool = { 'true, 'false } : *0». 693 | 694 | One of the long standing issue in Agda is that the typechecker has no knowledge of which branch was taken while it typechecks the body of a branch. In the example @example_ulf, the typechecker must verify that @nai«h (f x0) x0» = @nai«f x» in the branch were @nai«f x0 = 'true» (the equality is true only in this branch). In @na, @nai«f x0» is bound to an intermediate variable @nai«y» and the typechecker can express constraints on it (for instance the fact that @nai«'true = y»). 695 | On the contrary, the Agda typechecker unfolds each term but does not reconstruct the constraint on @agdai«f x». @example_ulf does not typecheck in Agda. 696 | The fact that this example typecheck in @na and not in Agda is a direct consequence of the sequent calculus presentation. Indeed the fact that each subterm is bound to a variable allows to express constraints on a much more precise level. 697 | 698 | @example_ulf<-figure'«Smart case»« 699 | @vspace"-0.7cm" 700 | @minipage"c"«0.5»« 701 | @agdacode« 702 | SmartCase : 703 | (A : Set) -> (A -> Bool) -> 704 | (A -> Bool) -> A -> Bool 705 | SmartCase A f g x = h' y 706 | where h : Bool -> A -> Bool 707 | h true = f 708 | h false = g 709 | 710 | x0 = x 711 | y = f x0 712 | 713 | h' : Bool -> Bool 714 | h' true = 715 | let z : (h y x0) == y 716 | z = refl 717 | in true 718 | h' false = false 719 | »»@minipage"c"«0.5»«@listing["language=nanoAgda"]« 720 | Bool = { 'true, 'false } : *0; 721 | A = *0 : *1; 722 | SmartCase = 723 | (\f -> \g -> \x -> 724 | (h = \b -> case b of { 725 | 'true -> f. 726 | 'false -> g. } 727 | : (b : Bool) -> A -> Bool; 728 | x0 := x; 729 | y = f x0; 730 | case y of { 731 | 'true -> 732 | z = (refl Bool y) 733 | : Eq Bool (h y x0) y; 734 | 'true. 735 | 'false -> 'false.} 736 | )) 737 | : (f : A -> Bool) -> (g : A -> Bool) -> 738 | A -> Bool 739 | »» 740 | @centering 741 | » 742 | 743 | A property of boolean functions is that if @nai«f» is of type @nai«Bool -> Bool», then @nai«f x» = @nai«f (f (f x))». This was introduced by @citet"Altenkirch_norm_2004" in the context of type theory. @example_triplef encode this property in Agda and @na using @nai«Eq» and @nai«refl». Neither Agda nor @na manage to typecheck this example, however we think that it is possible in @na with a better handling of nested cases. 744 | @example_triplef<-figure'«Triple application of a boolean function»« 745 | @vspace"0.3cm" 746 | @minipage"c"«0.5»« 747 | @agdacode« 748 | tripleF : 749 | (f : Bool -> Bool) -> (x : Bool) -> 750 | (f x) == (f (f (f x))) 751 | tripleF f x with x | f x 752 | ... | true | true = refl 753 | ... | true | false = refl 754 | ... | false | true = refl 755 | ... | false | false = refl 756 | »»@minipage"c"«0.5»«@listing["language=nanoAgda"]« 757 | tripleF = 758 | (\f -> \x -> ( 759 | case x of { 760 | 'true -> case f x of { 761 | 'true -> refl Bool 'true. 762 | 'false -> refl Bool 'false. 763 | }. 764 | 'false -> case f x of { 765 | 'true -> refl Bool 'true. 766 | 'false -> refl Bool 'false. 767 | }. 768 | })) 769 | : (f: Bool -> Bool) -> (x : Bool) -> 770 | Eq Bool (f x) (f (f (f x))) 771 | »» 772 | @centering 773 | » 774 | @cmd0"newpage" 775 | @section«Conclusion» 776 | 777 | The language presented in this report aims to solve some issues present in other dependently type languages, including the lack of fine constraints during typechecking and efficiency issues. To tackle this problems, we proposed a language in sequent calculus style. This work is still ongoing, but the current implementation is promising in that it achieve the goals stated in @sec_goals. We provided some example demonstrating the ability to encode complicated constructions in the language. Moreover, @na allows to typecheck some examples that could not be typechecked by previous systems. In the future we would like to use @na as a platform for experimental features of type theory, such as colors @citep"bernardy_type-theory_2013". However, there is still a lot to do, including: 778 | @itemize« 779 | @item Improve the typechecker to typecheck more examples, like the one presented in @example_triplef. 780 | @item As it stands, the language does not contain recursion. Of course, it makes the language not yet suitable as a backend. We would like to use size types @citep"Abel_sizetype_2006" to implement well-founded recursion. 781 | @item Evaluate the performance of the typechecker on some large programs and compare with Agda. 782 | @item Prove subject-reduction and normalization. 783 | » 784 | 785 | This internship was also the occasion for me to get a more detailed knowledge on type systems, especially dependently typed ones. On this aspect, I think this internship is successful: I have now a far better understanding of dependent types, both from a usage and a typechecking point of view, than 5 month ago and I think this knowledge will be helpful in the future. 786 | 787 | @bibliographyAll 788 | 789 | » 790 | --------------------------------------------------------------------------------