├── .gitignore ├── April17 ├── Command.hs ├── DefEq.hs ├── Elaborator.hs ├── Kernel.hs ├── Main.hs ├── OPE.hs ├── ProofState.hs ├── Raw.hs ├── Render.hs ├── Utils.hs └── guide │ └── guide.tex ├── Boxen └── Boxen.hs ├── Crossroads ├── Crossroads.bib ├── Crossroads.tex ├── MetaMeta.agda ├── board1.jpg ├── board2.jpg ├── board3.jpg ├── board4.jpg ├── pig.sty └── sigplanconf.cls ├── DeBruijnify.hs ├── Extrude ├── April.tex ├── Extrude.tex ├── Kankan.tex ├── Pointy.pdf ├── Pointy.tex └── pig.sty ├── Layout.hs ├── MetaZEUG ├── Layout.hs ├── MetaZOIG.tex └── Raw.hs ├── PHOAS.hs ├── PathEnds.txt ├── ProofState.hs ├── README.md ├── Raw.hs ├── Syntax.hs ├── Test.hs ├── TypeCheck.hs ├── Utils.hs └── tests └── tests.zeug /.gitignore: -------------------------------------------------------------------------------- 1 | dist 2 | cabal-dev 3 | *.o 4 | *.hi 5 | *.chi 6 | *.chs.h 7 | *.dyn_o 8 | *.dyn_hi 9 | .virtualenv 10 | .hpc 11 | .hsenv 12 | .cabal-sandbox/ 13 | cabal.sandbox.config 14 | *.prof 15 | *.aux 16 | *.hp 17 | *~ 18 | *.bbl 19 | *.blg 20 | *.log 21 | *\# -------------------------------------------------------------------------------- /April17/Command.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE GADTs, PatternGuards #-} 2 | 3 | module Command where 4 | 5 | import Data.List 6 | 7 | import Utils 8 | import Raw 9 | import Kernel 10 | import ProofState 11 | import Elaborator 12 | 13 | command :: ProofState -> Raw c -> ([String], Maybe ProofState) 14 | 15 | -- MAKING A DEFINITION 16 | command ps (RC (RA _ x) (Only b)) | isDefBody b = case newName ps x of 17 | Nothing -> (["I don't like the name " ++ x], Nothing) 18 | Just x -> case tryElab (ps, C0, x) (define b) of 19 | Just (_, ps) -> (["Aye."], Just ps) 20 | Nothing -> (["Naw."], Nothing) 21 | 22 | -- GIVING A REFINEMENT 23 | command (Cur ez u (EHole m@(Meta s x _Theta _I) : es)) 24 | (RC (RA _ "=") (Only b)) = case s of 25 | Pnty -> (["Try points another day."], Nothing) 26 | Syny -> case tryElab (Cur ez u es, _Theta, x) (chk _I b) of 27 | Nothing -> (["Naw."], Nothing) 28 | Just (t, Cur ez u es) -> 29 | ( ["Aye."] 30 | , Just (fwdToGoal $ Cur ez u (updates [m :=> Solve (mapIx IS t)] es)) 31 | ) 32 | 33 | -- ZOOMING TO A NAME 34 | command ps@(Cur ez u es) (RC (RA _ "/") (Only (RA _ x))) = 35 | case fwdToView (Cur B0 (fst (parseName ps x), (Nothing, Nothing)) 36 | (ez <>> es)) of 37 | Cur _ (LongName (_ : _), _) [] -> (["Where's " ++ x ++ "?"], Nothing) 38 | ps -> ([], Just (fwdToGoal ps)) 39 | 40 | -- ZOOMING OUT 41 | command (Cur ez u@(LongName p@(_ : _), _) es) (RA _ "<") = 42 | ([], Just (Cur ez (LongName (init p), (Nothing, Nothing)) es)) 43 | 44 | -- DOWN ONE 45 | command (Cur ez u@(p, (_, n)) (e : es)) (RA _ "next") 46 | | Just _ <- inView (p, n) e = ([], Just (Cur (ez :< e) u es)) 47 | | otherwise = (["Bump!"], Nothing) 48 | 49 | -- UP ONE 50 | command (Cur (ez :< e) u@(p, (n, _)) es) (RA _ "prev") 51 | | Just _ <- inView (p, n) e = ([], Just (Cur ez u (e : es))) 52 | | otherwise = (["Bump!"], Nothing) 53 | 54 | -- SETTING A RANGE 55 | command ps (RC (RA _ x) (RA _ "^" :-: Only (RA _ y))) 56 | = rerange ps (Just (fst (parseName ps x)), Just (fst (parseName ps y))) 57 | command ps (RC (RA _ x) (Only (RA _ "^"))) 58 | = rerange ps (Just (fst (parseName ps x)), Nothing) 59 | command ps (RC (RA _ "^") (Only (RA _ y))) 60 | = rerange ps (Nothing, Just (fst (parseName ps y))) 61 | command (Cur ez (p, _) es) (RA _ "^") 62 | = ([], Just (Cur ez (p, (Nothing, Nothing)) es)) 63 | 64 | -- BARF! 65 | command ps c = (["Try doing something else."], Nothing) 66 | 67 | isDefBody :: Raw c -> Bool 68 | isDefBody (RC (RA _ "=") (Only s)) = True 69 | isDefBody (RC _S (Only (RB _ x _T))) = isDefBody _T 70 | isDefBody (RC _S (Only _T)) = isDefBody _T 71 | isDefBody _ = False 72 | 73 | rerange :: ProofState -> (Maybe LongName, Maybe LongName) -> 74 | ([String], Maybe ProofState) 75 | rerange ps@(Cur ez (p@(LongName x), _) es) (a, b) = 76 | ([], Just (Cur ez (p, (a >>= tidy, b >>= tidy)) es)) where 77 | tidy (LongName y) = LongName <$> stripPrefix x y 78 | -------------------------------------------------------------------------------- /April17/DefEq.hs: -------------------------------------------------------------------------------- 1 | -- mind your... 2 | {-# LANGUAGE GADTs, DataKinds, TypeOperators, KindSignatures, TypeFamilies #-} 3 | module DefEq where 4 | 5 | import Data.Void 6 | import Prelude hiding ((^^)) 7 | 8 | import Utils 9 | import OPE 10 | import Kernel 11 | 12 | 13 | etaExpand :: Sorted gamma => 14 | Context gamma -> Radical gamma Syn -> Term Chk ^ gamma 15 | etaExpand gamma f@(_ ::: Pi _ST :^ _R) = _ST :^ _R >^< \_S _T -> 16 | mapIx Lam $ abstract "YKW" (etaExpand (gamma :\ (Syny,"YKW",_S)) 17 | (app (radWk f) freshVar)) 18 | etaExpand gamma (Star Void :^ _ ::: Star Void :^ _) = star 19 | etaExpand gamma (Pi _ST :^ _R ::: Star Void :^ _) = _ST :^ _R >^< \_S _T -> 20 | let _S' = etaExpand gamma (_S ::: star) 21 | _T' = instantiate (wk _T) (freshVar ::: wk _S) 22 | in mapIx Pi (pair _S' (abstract "YKW" (etaExpand (gamma :\ (Syny,"YKW",_S)) 23 | (_T' ::: star)))) 24 | etaExpand gamma (E e :^ r ::: _) = 25 | mapIx E (fst $ neutExpand gamma (e :^ r)) 26 | 27 | neutExpand :: Sorted gamma => 28 | Context gamma -> 29 | Term Syn ^ gamma -> 30 | (Term Syn ^ gamma, -- eta expanded term 31 | Term Chk ^ gamma) -- its reconstructed type (not expanded) 32 | neutExpand gamma e@(V It :^ r) = (e , lookupC gamma r) 33 | neutExpand gamma (App fs :^ r) = fs :^ r >^< \f s -> 34 | case neutExpand gamma f of 35 | (f , Pi _ST :^ _R) -> _ST :^ _R >^< \_S _T -> 36 | (mapIx App (pair f (etaExpand gamma (s ::: _S))), 37 | instantiate _T (s ::: _S)) 38 | neutExpand gamma (Hole m@(Meta Syny _ _Theta (_T :^ _R)) theta :^ r) = 39 | case help gamma _Theta (theta :^ r) of 40 | (theta' :^ r', fs) -> sortedObj _R 41 | (Hole m theta' :^ r', sub _T (hitter :^ oN) (discard _R fs)) 42 | where 43 | help :: Sorted gamma => Context gamma -> Context theta -> Env theta ^ gamma -> 44 | (Env theta ^ gamma, ALL (Radical gamma) theta) 45 | help gamma C0 theta = (theta,A0) 46 | help gamma (_Theta :\ (s,_,_I :^ _R)) (ES p :^ r) = p :^ r >^< \theta i -> 47 | case help gamma _Theta theta of 48 | (theta', fs) -> sortedObj _R $ case i of 49 | IS t :^ r -> let f = t :^ r ::: sub _I (hitter :^ oN) (discard _R fs) 50 | in (mapIx ES $ pair theta' (mapIx IS (etaExpand gamma f)), 51 | AS fs f) 52 | IP p :^ r -> (mapIx ES $ pair theta' i, AS fs (RP (p :^ r))) 53 | 54 | defEq :: Sorted gamma => 55 | Context gamma -> 56 | Term Chk ^ gamma -> Term Chk ^ gamma -> Term Chk ^ gamma -> 57 | Maybe () 58 | defEq gamma ty t t' = 59 | eq (etaExpand gamma (t ::: ty)) (etaExpand gamma (t' ::: ty)) 60 | 61 | -------------------------------------------------------------------------------- /April17/Elaborator.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE GADTs, DataKinds, TypeOperators, KindSignatures, TypeFamilies #-} 2 | module Elaborator where 3 | 4 | import Data.Type.Equality((:~:)(Refl)) 5 | import Control.Monad.RWS 6 | 7 | import Utils 8 | import Raw 9 | import OPE 10 | import Kernel 11 | import DefEq 12 | import ProofState 13 | 14 | data Elaborator :: (Bwd Sort -> *) -> (Bwd Sort -> *) where 15 | Reject :: String -> Elaborator p gamma 16 | Under :: (Sorty s, String, Info s ^ gamma) -> 17 | ELAB (f ^ (gamma :< s)) (gamma :< s) -> 18 | Elaboratkey ((s !- f) ^ gamma) gamma 19 | Fetch :: Sorty s -> String -> Elaboratkey (Fetching gamma s) gamma 20 | -- Context :: Elaborator (Context gamma @= gamma) gamma 21 | DefEq :: Term Chk ^ gamma -> Term Chk ^ gamma -> Term Chk ^ gamma -> 22 | Elaboratkey () gamma 23 | Query :: String -> Term Chk ^ gamma -> 24 | Elaboratkey (Term Chk ^ gamma) gamma 25 | Define :: Radical gamma Syn -> Elaboratkey (Unit ^ gamma) gamma 26 | 27 | data Fetching :: Bwd Sort -> Sort -> * where 28 | FDefn :: Defn theta s -> Fetching gamma s 29 | FHole :: Meta theta s -> Fetching gamma s 30 | FCtxt :: (This s ^ gamma, Info s ^ gamma) -> Fetching gamma s 31 | 32 | type Elaboratkey a gamma = Elaborator (a @= gamma) gamma 33 | type ELAB a gamma = Prog Elaborator (a @= gamma) gamma 34 | 35 | chkBind :: Sorted gamma => 36 | Sorty s -> Info s ^ gamma -> 37 | (s !- Term Chk) ^ gamma -> Raw c -> 38 | ELAB ((s !- Term Chk) ^ gamma) gamma 39 | chkBind s _S _T (RB _ x t) = 40 | cmd $ Under (s , x , _S) $ chk (dive _T) t 41 | chkBind s _S _T t = 42 | cmd $ Under (s , "" , _S) $ chk (dive _T) t 43 | 44 | chk :: Sorted gamma => 45 | Term Chk ^ gamma -> Raw c -> 46 | ELAB (Term Chk ^ gamma) gamma 47 | chk _T (RA _ ('?':xs)) = cmd $ Query xs _T 48 | chk (Star Void :^ r) (RA _ "Type") = raturn $ Star Void :^ r 49 | chk (Star Void :^ r) (RC (RA _ "Pi") (_S :-: Only _T)) = 50 | chk star _S @>= \_S -> 51 | chkBind Syny _S (mapIx K star) _T @>= \_T -> 52 | raturn $ mapIx Pi (pair _S _T ) 53 | chk (Pi _ST :^ r) (RC (RA _ "\\") (Only t)) = _ST :^ r >^< \ _S _T -> 54 | chkBind Syny _S _T t @>= \t -> 55 | raturn $ mapIx Lam t 56 | chk _T t = 57 | syn t Nothing @>= \(s ::: _S) -> 58 | cmd (DefEq star _S _T) @> 59 | raturn s 60 | 61 | syn :: Sorted gamma => Raw c -> Maybe (NEL (Raw c)) -> 62 | ELAB (Radical gamma Syn) gamma 63 | syn (RC t (RA _ ":" :-: Only _T)) rs = 64 | chk star _T @>= \_T -> 65 | chk _T t @>= \t -> 66 | spine (t ::: _T) rs 67 | syn (RA _ x) rs = 68 | cmd (Fetch Syny x) @>= \ f -> case f of 69 | FCtxt (x , _S) -> spine (mapIx (E . V) x ::: _S) rs 70 | FHole m@(Meta Syny _ _Theta _T) -> 71 | params _Theta rs @>= \ (theta :^ r, sbst, rs) -> 72 | spine (E (Hole m theta) :^ r ::: joinH (sub _T (hitter :^ oN) sbst)) rs 73 | FDefn m@(Defn Syny _ _Theta tT) -> 74 | params _Theta rs @>= \ (_, sbst, rs) -> 75 | spine (subRadical tT (hitter :^ oN) sbst) rs 76 | syn (RC f as) bs = syn f (Just (nconc as bs)) 77 | syn _ _ = cmd $ Reject "raised eyebrow" 78 | 79 | spine :: Sorted gamma => Radical gamma Syn -> 80 | Maybe (NEL (Raw c)) -> ELAB (Radical gamma Syn) gamma 81 | spine h Nothing = raturn h 82 | spine h@(f ::: Pi _ST :^ r) (Just (s :- rs)) = _ST :^ r >^< \ _S _T -> 83 | chk _S s @>= \s -> 84 | spine (app h s) rs 85 | spine _ _ = cmd $ Reject "raised eyebrow" 86 | 87 | params :: Sorted gamma => Context theta -> Maybe (NEL (Raw c)) -> 88 | ELAB ( Env theta ^ gamma -- for a Hole 89 | , ALL (Radical gamma) theta -- for substitution 90 | , Maybe (NEL (Raw c)) -- unconsumed 91 | ) gamma 92 | params C0 rs = raturn (E0 Void :^ oN, A0, rs) 93 | params (_Theta :\ (s, _, _I)) rs = 94 | params _Theta rs @>= \ (theta, sbst, rs) -> 95 | case rs of 96 | Nothing -> cmd $ Reject "Underinstantiation!" 97 | Just (r :- rs) -> case s of 98 | Syny -> 99 | let _T = joinH (sub _I (hitter :^ oN) sbst) 100 | in chk _T r @>= \ t -> 101 | raturn (mapIx ES (pair theta (mapIx IS t)), AS sbst (t ::: _T), rs) 102 | Pnty -> cmd $ Reject "We can't see the point yet!" 103 | -- FIXME 104 | 105 | 106 | define :: Sorted gamma => Raw c -> ELAB (Unit ^ gamma) gamma 107 | define (RC (RA _ "=") (Only t)) = 108 | syn t Nothing @>= \ t -> 109 | cmd (Define t) 110 | define (RC _S (Only (RB _ x t))) = 111 | chk star _S @>= \ _S -> 112 | cmd (Under (Syny, x, _S) (define t)) @> 113 | raturn (Void :^ oN) 114 | define _ = cmd $ Reject "I didn't think much of that definition" 115 | 116 | 117 | ------------------------------------------------------------------------------ 118 | -- Implementation of Elaborator Interface 119 | ------------------------------------------------------------------------------ 120 | 121 | type Elab gamma = 122 | RWST (ProofState, Context gamma, LongName) [Entity] Int Maybe 123 | 124 | elab :: Sorted gamma => ELAB a gamma -> Elab gamma a 125 | elab (RET (At a)) = return a 126 | elab (DO (Reject s) _) = fail s 127 | elab (DO (Under b@(_,x,_) p) k) = do 128 | (ps, gamma, y) <- ask 129 | i <- get 130 | (t, i', w) <- lift $ runRWST (elab p) (ps, gamma :\ b, y) i 131 | tell w 132 | put i' 133 | elab (k RET (At (abstract x t))) 134 | elab (DO (Fetch s x) k) = do 135 | (ps@(Cur ez _ _), gamma, _) <- ask 136 | let (x', h) = parseName ps x 137 | glo B0 = Nothing 138 | glo (ez :< EHole m) | h && metaName m == x' = do 139 | Refl <- sortEq (metaSort m) s 140 | return (FHole m) 141 | glo (ez :< EDefn m) | not h && defnName m == x' = do 142 | Refl <- sortEq (defnSort m) s 143 | return (FDefn m) 144 | glo (ez :< _) = glo ez 145 | fetch :: Sorty s -> Context gamma -> 146 | Maybe (This s ^ gamma, Info s ^ gamma) 147 | fetch s C0 = Nothing 148 | fetch s (gamma :\ (s',y,i :^ r)) = 149 | if x == y then 150 | do Refl <- sortEq s s' 151 | return $ (It :^ OS oN, i :^ O' r) 152 | else 153 | do (x :^ r, i :^ r') <- fetch s gamma 154 | return $ (x :^ O' r, i :^ O' r') 155 | case fetch s gamma of 156 | Just a -> elab (k RET (At (FCtxt a))) 157 | Nothing -> case glo ez of 158 | Just f -> elab (k RET (At f)) 159 | Nothing -> fail ("Can't find " ++ x) 160 | elab (DO (DefEq _T t t') k) = do 161 | (ps, gamma, y) <- ask 162 | lift $ defEq gamma _T t t' 163 | elab (k RET (At ())) 164 | elab (DO (Query x _T) k) = do 165 | (ps, gamma, y) <- ask 166 | i <- get 167 | put (i + 1) 168 | let m = Meta Syny (mappend y (LongName [show i])) gamma _T 169 | tell [EHole m] 170 | elab (k RET (At (E (Hole m (idEnv gamma)) :^ oI))) 171 | elab (DO (Define tT) k) = do 172 | (ps, gamma, y) <- ask 173 | tell [EDefn (Defn Syny y gamma tT)] 174 | elab (k RET (At (Void :^ oN))) 175 | 176 | tryElab :: Sorted gamma => (ProofState, Context gamma, LongName) -> 177 | ELAB a gamma -> Maybe (a, ProofState) 178 | tryElab stuff@(Cur bef u aft, _, _) p = do 179 | (a, _, w) <- runRWST (elab p) stuff 0 180 | let (ez, es) = fishFace B0 w 181 | return (a, Cur (bef +<+ ez) u (es ++ aft)) 182 | 183 | fishFace :: Bwd Entity -> [Entity] -> (Bwd Entity, [Entity]) 184 | fishFace ez [] = (ez, []) 185 | fishFace ez es@(EHole _ : _) = (ez, es) 186 | fishFace ez (e : es) = fishFace (ez :< e) es 187 | 188 | 189 | -- :l Elaborator Render 190 | -- :m Elaborator Render Raw OPE Kernel 191 | -- render N0 <$> (chk C0 (Star Void :^ OZ) =<< rawString "Pi Type X. Pi X x. X") 192 | -- let Just idtype = (chk C0 (Star Void :^ OZ) =<< rawString "Pi Type X. Pi X x. X") 193 | -- render N0 <$> (chk C0 idtype =<< rawString "\\ X. \\ x. x") 194 | -------------------------------------------------------------------------------- /April17/Main.hs: -------------------------------------------------------------------------------- 1 | ------------------------------------------------------------------------------ 2 | ----- ----- 3 | ----- dP dP ----- 4 | ----- 88 88 ----- 5 | ----- 88d888b. 88 .d8888b. d8888P dP dP 88d888b. dP dP .d8888b. ----- 6 | ----- 88' `88 88 88' `88 88 88 88 88' `88 88 88 Y8ooooo. ----- 7 | ----- 88. .88 88 88. .88 88 88. .88 88. .88 88. .88 88 ----- 8 | ----- 88Y888P' dP `88888P8 dP `8888P88 88Y888P' `88888P' `88888P' ----- 9 | ----- 88 .88 88 ----- 10 | ----- dP d8888P dP ----- 11 | ----- ----- 12 | ------------------------------------------------------------------------------ 13 | 14 | module Main where 15 | 16 | import System.IO 17 | import Utils 18 | import Raw 19 | import ProofState 20 | import Command 21 | 22 | version :: String 23 | version = "1,000,000 years BC" 24 | 25 | banner :: IO () 26 | banner = mapM_ putStrLn 27 | [" dP dP " 28 | ," 88 88 " 29 | ,"88d888b. 88 .d8888b. d8888P dP dP 88d888b. dP dP .d8888b. " 30 | ,"88' `88 88 88' `88 88 88 88 88' `88 88 88 Y8ooooo. " 31 | ,"88. .88 88 88. .88 88 88. .88 88. .88 88. .88 88 " 32 | ,"88Y888P' dP `88888P8 dP `8888P88 88Y888P' `88888P' `88888P' " 33 | ,"88 .88 88 " 34 | ,"dP d8888P dP " 35 | ,"" 36 | ," version " ++ version 37 | ,"" 38 | ,"quit quits" 39 | ,"" 40 | ] 41 | 42 | main :: IO () 43 | main = do 44 | banner 45 | hSetBuffering stdin NoBuffering 46 | hSetBuffering stdout NoBuffering 47 | hSetEcho stdin False 48 | mainLoop B0 initialProofState True 49 | putStrLn "TATARANOO!" 50 | putStrLn "" 51 | 52 | mainLoop :: Bwd ProofState -- history unto the dawn of time 53 | -> ProofState -- the present 54 | -> Bool -- is a redisplay necessary 55 | -> IO () -- break stuff, go thud 56 | mainLoop oldz new b = do 57 | if b then mapM_ putStrLn (display new) else return () 58 | putStrLn "" 59 | putStr (prompt new ++ "> ") 60 | r <- rawIO 61 | putStrLn "" 62 | case r of 63 | RA _ "quit" -> return () 64 | RA _ "undo" -> case oldz of 65 | B0 -> do 66 | putStrLn "Nothing to undo" 67 | mainLoop B0 new False 68 | oldz :< old -> mainLoop oldz old True 69 | c -> case command new c of 70 | (ss, m) -> mapM_ putStrLn ss >> case m of 71 | Just newer -> mainLoop (oldz :< new) newer True 72 | Nothing -> mainLoop oldz new False 73 | _ -> mainLoop oldz new True 74 | -- mores stuff should happen here 75 | -------------------------------------------------------------------------------- /April17/OPE.hs: -------------------------------------------------------------------------------- 1 | ------------------------------------------------------------------------------ 2 | ----- ----- 3 | ----- Order-Preserving Embeddings and Coproducts of their Slices ----- 4 | ----- ----- 5 | ------------------------------------------------------------------------------ 6 | 7 | {-# LANGUAGE GADTs, DataKinds, TypeOperators, KindSignatures, 8 | ConstraintKinds, RankNTypes, FlexibleInstances, 9 | PolyKinds #-} 10 | 11 | module OPE where 12 | 13 | import Prelude hiding ((^^)) 14 | import Data.Type.Equality((:~:)(Refl)) 15 | import Utils 16 | 17 | 18 | ------------------------------------------------------------------------------ 19 | -- order-preserving embeddings 20 | ------------------------------------------------------------------------------ 21 | 22 | data (<=) :: Bwd s -> Bwd s -> * where 23 | OZ :: B0 <= B0 24 | OS :: gamma <= delta -> (gamma :< s) <= (delta :< s) 25 | O' :: gamma <= delta -> gamma <= (delta :< s) 26 | 27 | -- composition 28 | (-<=-) :: delta <= theta -> gamma <= delta -> gamma <= theta 29 | OZ -<=- OZ = OZ 30 | OS r -<=- OS r' = OS (r -<=- r') 31 | OS r -<=- O' r' = O' (r -<=- r') 32 | O' r -<=- r' = O' (r -<=- r') 33 | infixr 9 -<=- 34 | 35 | class Sorted (gamma :: Bwd s) where 36 | oI :: gamma <= gamma -- identity 37 | oN :: B0 <= gamma -- initiality 38 | misser :: Select gamma B0 gamma 39 | hitter :: Select gamma gamma B0 40 | copL :: CoP gamma B0 gamma 41 | copR :: CoP B0 gamma gamma 42 | 43 | instance Sorted B0 where 44 | oI = OZ 45 | oN = OZ 46 | misser = None 47 | hitter = None 48 | copL = CZZ 49 | copR = CZZ 50 | 51 | instance Sorted gamma => Sorted (gamma :< s) where 52 | oI = OS oI 53 | oN = O' oN 54 | misser = Miss misser 55 | hitter = Hit hitter 56 | copL = CS' copL 57 | copR = C'S copR 58 | 59 | sortedObj :: gamma <= delta -> Holds (Sorted gamma, Sorted delta) 60 | sortedObj OZ t = t 61 | sortedObj (OS r) t = sortedObj r t 62 | sortedObj (O' r) t = sortedObj r t 63 | 64 | discard :: gamma <= delta -> ALL p delta -> ALL p gamma 65 | discard OZ A0 = A0 66 | discard (OS r) (AS ps p) = AS (discard r ps) p 67 | discard (O' r) (AS ps _) = discard r ps 68 | 69 | missDiscard :: CoP gamma0 gamma1 gamma -> 70 | Select gamma theta ^ delta -> ALL f theta -> 71 | (forall theta0 theta1. (Sorted theta0,Sorted theta1) => 72 | Select gamma0 theta0 ^ delta -> ALL f theta0 -> 73 | Select gamma1 theta1 ^ delta -> ALL f theta1 -> t) -> 74 | t 75 | missDiscard c (xz :^ r) fz g = case hits xz c of 76 | Hits xz0 xz1 c0 c1 -> 77 | g (xz0 :^ r -<=- lCoP c1) 78 | (discard (lCoP c0) fz) 79 | (xz1 :^ r -<=- rCoP c1) 80 | (discard (rCoP c0) fz) 81 | 82 | 83 | {- 84 | One wart here is that while every object has an identity, runtime knowledge 85 | of that object (or at least its length) is required to construct that 86 | identity. So we have 87 | oI :: Sorted gamma => gamma <= gamma 88 | rather than 89 | oI :: gamma <= gamma 90 | 91 | As things stand, we cannot make <= an instance of Category. 92 | 93 | Two options for "improving" the situation: 94 | 1. Replace OZ (identity for B0) with OI (identity in general) and suck up 95 | the resulting redundancy. 96 | 2. Degrade Category to "Partial Category", parametrising over a constraint 97 | (Sorted, in this case) which says which objects have identities. The 98 | sortedObj function would move into the class, asserting that any 99 | object which is the source or target of any morphism must have an 100 | identity morphism. 101 | -} 102 | 103 | 104 | ------------------------------------------------------------------------------ 105 | -- thing-with-thinning 106 | ------------------------------------------------------------------------------ 107 | 108 | data (^) :: (Bwd s -> *) -> (Bwd s -> *) where 109 | (:^) :: f gamma -> gamma <= delta -> f ^ delta 110 | infixl 5 :^ 111 | 112 | instance FunctorIx (^) where 113 | mapIx f (x :^ r) = f x :^ r 114 | 115 | {- 116 | Morally, things-with-thinning is a MonadIx, with 117 | returnIx x = x :^ oI 118 | joinIx ((x :^ r) :^ t) = x :^ (t -<=- r) 119 | Again, however, the need for a Sorted constraint messes that up. 120 | -} 121 | 122 | wk :: f ^ gamma -> f ^ (gamma :< s) 123 | wk (f :^ r) = f :^ O' r 124 | 125 | (^^) :: f ^ gamma -> gamma <= delta -> f ^ delta 126 | (f :^ r) ^^ r' = f :^ r' -<=- r 127 | 128 | joinH :: ((^) f) ^ gamma -> f ^ gamma 129 | joinH (fH :^ r) = fH ^^ r 130 | 131 | ------------------------------------------------------------------------------ 132 | -- coproduct of slice 133 | ------------------------------------------------------------------------------ 134 | 135 | -- CoP gamma delta theta encodes a pair of embeddings, 136 | -- lCoP :: gamma <= theta, rCoP :: delta <= theta 137 | -- which, between them, cover theta. 138 | 139 | data CoP :: Bwd s -> Bwd s -> Bwd s -> * where 140 | CZZ :: CoP B0 B0 B0 141 | CSS :: CoP gamma delta theta -> CoP (gamma :< s) (delta :< s) (theta :< s) 142 | CS' :: CoP gamma delta theta -> CoP (gamma :< s) delta (theta :< s) 143 | C'S :: CoP gamma delta theta -> CoP gamma (delta :< s) (theta :< s) 144 | 145 | lCoP :: CoP gamma delta theta -> gamma <= theta 146 | lCoP CZZ = OZ 147 | lCoP (CSS c) = OS (lCoP c) 148 | lCoP (CS' c) = OS (lCoP c) 149 | lCoP (C'S c) = O' (lCoP c) 150 | 151 | rCoP :: CoP gamma delta theta -> delta <= theta 152 | rCoP CZZ = OZ 153 | rCoP (CSS c) = OS (rCoP c) 154 | rCoP (CS' c) = O' (rCoP c) 155 | rCoP (C'S c) = OS (rCoP c) 156 | 157 | sortedCoP :: CoP gamma delta theta -> 158 | Holds (Sorted gamma, Sorted delta, Sorted theta) 159 | sortedCoP c t = sortedObj (lCoP c) $ sortedObj (rCoP c) t 160 | 161 | -- Given two embeddings which may not cover their shared codomain, 162 | -- we may compute that part of their codomain which they *do* cover, 163 | -- together with its embedding into the whole. 164 | 165 | coP :: gamma <= theta -> delta <= theta -> CoP gamma delta ^ theta 166 | coP OZ OZ = CZZ :^ OZ 167 | coP (OS r) (OS r') = case coP r r' of f :^ t -> CSS f :^ OS t 168 | coP (OS r) (O' r') = case coP r r' of f :^ t -> CS' f :^ OS t 169 | coP (O' r) (OS r') = case coP r r' of f :^ t -> C'S f :^ OS t 170 | coP (O' r) (O' r') = case coP r r' of f :^ t -> f :^ O' t 171 | 172 | {- 173 | The unspoken invariant here is that 174 | 175 | coP r0 r1 = c :^ r 176 | --------------------------------------------- 177 | r0 = r -<=- lCoP c & r1 = r -<=- rCoP c 178 | 179 | theta 180 | ^ 181 | /|\ 182 | / r \ 183 | / | \ 184 | r0 _^_ r1 185 | / _/ \_ \ 186 | /_/ c \_\ 187 | ' ' 188 | gamma delta 189 | 190 | and the apex of c is as low as possible. 191 | 192 | Any other such diagram (r0 = r' -<=- r0' & r1 = r' -<=- r1') 193 | factors through ours: for some s, 194 | r0' = s -<=- lCoP c & r1' = s -<=- rCop c & r = r' -<=- s 195 | 196 | The slice category, <=/theta, represents subcontexts of theta: its 197 | objects are embeddings into theta; their morphisms are triangles. 198 | Here, our c is the witness that r is the categorical coproduct of 199 | r0 and r1. 200 | -} 201 | 202 | 203 | ------------------------------------------------------------------------------ 204 | -- selections 205 | ------------------------------------------------------------------------------ 206 | 207 | data Select :: Bwd s -> Bwd s -> Bwd s -> * where 208 | None :: Select B0 B0 B0 209 | Hit :: Select gamma' delta gamma -> Select (gamma' :< s) (delta :< s) gamma 210 | Miss :: Select gamma' delta gamma -> Select (gamma' :< s) delta (gamma :< s) 211 | 212 | data Hits :: Bwd s -> Bwd s -> Bwd s -> Bwd s -> * where 213 | Hits :: (Sorted gamma0, Sorted gamma1, Sorted theta0, Sorted theta1) => 214 | Select gamma0' theta0 gamma0 -> Select gamma1' theta1 gamma1 -> 215 | CoP theta0 theta1 theta -> CoP gamma0 gamma1 gamma -> 216 | Hits gamma0' gamma1' theta gamma 217 | 218 | hits :: Select gamma' theta gamma -> CoP gamma0' gamma1' gamma' -> 219 | Hits gamma0' gamma1' theta gamma 220 | hits None CZZ = Hits None None CZZ CZZ 221 | hits (Miss s) (CSS c) = case hits s c of 222 | Hits s0 s1 ctheta cgamma -> Hits (Miss s0) (Miss s1) ctheta (CSS cgamma) 223 | hits (Miss s) (CS' c) = case hits s c of 224 | Hits s0 s1 ctheta cgamma -> Hits (Miss s0) s1 ctheta (CS' cgamma) 225 | hits (Miss s) (C'S c) = case hits s c of 226 | Hits s0 s1 ctheta cgamma -> Hits s0 (Miss s1) ctheta (C'S cgamma) 227 | hits (Hit s) (CSS c) = case hits s c of 228 | Hits s0 s1 ctheta cgamma -> Hits (Hit s0) (Hit s1) (CSS ctheta) cgamma 229 | hits (Hit s) (CS' c) = case hits s c of 230 | Hits s0 s1 ctheta cgamma -> Hits (Hit s0) s1 (CS' ctheta) cgamma 231 | hits (Hit s) (C'S c) = case hits s c of 232 | Hits s0 s1 ctheta cgamma -> Hits s0 (Hit s1) (C'S ctheta) cgamma 233 | 234 | missAll :: Select gamma' B0 gamma -> gamma' :~: gamma 235 | missAll None = Refl 236 | missAll (Miss s) = case missAll s of Refl -> Refl 237 | 238 | data ThickSelect :: Bwd s -> Bwd s -> Bwd s -> * where 239 | ThickSelect :: Select gamma theta delta -> 240 | theta <= theta' -> delta <= delta' -> 241 | ThickSelect gamma theta' delta' 242 | 243 | thickSelect :: gamma <= gamma' -> Select gamma' theta' delta' -> 244 | ThickSelect gamma theta' delta' 245 | thickSelect OZ None = ThickSelect None OZ OZ 246 | thickSelect (OS r) (Hit s) = case thickSelect r s of 247 | ThickSelect s rtheta rdelta -> ThickSelect (Hit s) (OS rtheta) rdelta 248 | thickSelect (OS r) (Miss s) = case thickSelect r s of 249 | ThickSelect s rtheta rdelta -> ThickSelect (Miss s) rtheta (OS rdelta) 250 | thickSelect (O' r) (Hit s) = case thickSelect r s of 251 | ThickSelect s rtheta rdelta -> ThickSelect s (O' rtheta) rdelta 252 | thickSelect (O' r) (Miss s) = case thickSelect r s of 253 | ThickSelect s rtheta rdelta -> ThickSelect s rtheta (O' rdelta) 254 | 255 | wkSelect :: (s !- f) gamma -> Select gamma theta ^ delta -> 256 | (forall gamma'. 257 | String -> f gamma' -> Select gamma' theta ^ (delta :< s) -> t) 258 | -> t 259 | wkSelect (L x t) (xz :^ r) g = g x t (Miss xz :^ OS r) 260 | wkSelect (K t) (xz :^ r) g = g "YKW" t (xz :^ O' r) 261 | 262 | sortedSelect :: Select gamma theta delta -> 263 | Holds (Sorted gamma,Sorted theta,Sorted delta) 264 | sortedSelect None t = t 265 | sortedSelect (Hit s) t = sortedSelect s t 266 | sortedSelect (Miss s) t = sortedSelect s t 267 | 268 | 269 | ------------------------------------------------------------------------------ 270 | -- relevant data structures 271 | ------------------------------------------------------------------------------ 272 | 273 | -- unit and product 274 | 275 | data Unit :: Bwd s -> * where 276 | Void :: Unit B0 277 | 278 | void :: Sorted gamma => Unit ^ gamma 279 | void = Void :^ oN 280 | 281 | data (><) :: (Bwd s -> *) -> (Bwd s -> *) -> (Bwd s -> *) where 282 | Pair :: CoP gamma delta theta -> f gamma -> g delta -> (f >< g) theta 283 | infixr 8 >< 284 | 285 | pair :: f ^ theta -> g ^ theta -> (f >< g) ^ theta 286 | pair (f :^ r) (g :^ r') = case coP r r' of c :^ t -> Pair c f g :^ t 287 | 288 | (>^<) :: (f >< g) ^ theta -> (f ^ theta -> g ^ theta -> t) -> t 289 | Pair c f g :^ r >^< h = h (f :^ r -<=- lCoP c) (g :^ r -<=- rCoP c) 290 | infixr 4 >^< 291 | 292 | -- usage and binding 293 | 294 | data This :: s -> (Bwd s -> *) where 295 | It :: This s (B0 :< s) 296 | 297 | data (!-) :: s -> (Bwd s -> *) -> (Bwd s -> *) where 298 | K :: f gamma -> (s !- f) gamma 299 | L :: String -> f (gamma :< s) -> (s !- f) gamma 300 | infixr 9 !- 301 | 302 | abstract :: String -> f ^ (gamma :< s) -> (s !- f) ^ gamma 303 | abstract x (f :^ OS r) = L x f :^ r 304 | abstract _ (f :^ O' r) = K f :^ r 305 | 306 | dive :: (s !- f) ^ gamma -> f ^ (gamma :< s) 307 | dive (K f :^ r) = f :^ O' r 308 | dive (L _ f :^ r) = f :^ OS r 309 | 310 | nom :: (s !- f) gamma -> String 311 | nom (L x _) = x 312 | nom (K _) = "h" 313 | 314 | ------------------------------------------------------------------------------ 315 | -- Equality testing 316 | ------------------------------------------------------------------------------ 317 | 318 | opeEq :: gamma <= theta -> delta <= theta -> Maybe (gamma :~: delta) 319 | opeEq OZ OZ = Just Refl 320 | opeEq (OS r) (OS r') = do 321 | Refl <- opeEq r r' 322 | return Refl 323 | opeEq (O' r) (O' r') = do 324 | Refl <- opeEq r r' 325 | return Refl 326 | opeEq _ _ = Nothing 327 | 328 | class SyntaxEq (f :: Bwd s -> *) where 329 | eq :: f gamma -> f gamma -> Maybe () 330 | 331 | instance SyntaxEq f => SyntaxEq ((^) f) where 332 | eq (f :^ r) (g :^ r') = do 333 | Refl <- opeEq r r' 334 | eq f g 335 | 336 | instance SyntaxEq Unit where 337 | eq _ _ = return () 338 | 339 | instance (SyntaxEq f, SyntaxEq g) => SyntaxEq (f >< g) where 340 | eq (Pair c f g) (Pair c' f' g') = do 341 | eq (f :^ lCoP c) (f' :^ lCoP c') 342 | eq (g :^ rCoP c) (g' :^ rCoP c') 343 | 344 | instance SyntaxEq (This s) where 345 | eq _ _ = return () 346 | 347 | instance SyntaxEq f => SyntaxEq (s !- f) where 348 | eq (K t) (K t') = eq t t' 349 | eq (L _ t) (L _ t') = eq t t' 350 | eq _ _ = fail "gotcha" 351 | -------------------------------------------------------------------------------- /April17/ProofState.hs: -------------------------------------------------------------------------------- 1 | ------------------------------------------------------------------------------ 2 | ----- ----- 3 | ----- The Proof State ----- 4 | ----- ----- 5 | ------------------------------------------------------------------------------ 6 | 7 | {-# LANGUAGE KindSignatures, GADTs, DataKinds, DeriveFunctor, 8 | DeriveFoldable, DeriveTraversable #-} 9 | 10 | module ProofState where 11 | 12 | import Data.Maybe 13 | import Data.List 14 | import Data.List.Split 15 | import Control.Monad 16 | 17 | import Utils 18 | import OPE 19 | import Kernel 20 | import Raw 21 | import Render 22 | 23 | data Cursor u x = Cur (Bwd x) u [x] deriving (Functor, Foldable, Traversable) 24 | 25 | data Defn (delta :: Bwd Sort) (s :: Sort) 26 | = Sorted delta => 27 | Defn {defnSort :: Sorty s 28 | ,defnName :: LongName 29 | ,defnContext :: Context delta 30 | ,defnRadical :: Radical delta s 31 | } 32 | 33 | data Entity :: * where 34 | EHole :: Meta delta s -> Entity 35 | EDefn :: Defn delta s -> Entity 36 | 37 | nameOf :: Entity -> LongName 38 | nameOf (EHole m) = metaName m 39 | nameOf (EDefn d) = defnName d 40 | 41 | type Prefix = LongName 42 | type Range = (Maybe LongName, Maybe LongName) 43 | 44 | type ProofState = Cursor (Prefix, Range) Entity 45 | type ViewPort = Cursor (Prefix, Range) (LongName, Entity) 46 | 47 | initialProofState :: ProofState 48 | initialProofState = Cur B0 zoomedOut [] 49 | 50 | zoomedOut :: (Prefix, Range) 51 | zoomedOut = (mempty, (Nothing, Nothing)) 52 | 53 | texas :: ProofState -> [String] 54 | texas _ = -- art by Joan Stark 55 | [" ___" 56 | ," | |" 57 | ," | '._ _" 58 | ," | ``` '|" 59 | ," ____| \\" 60 | ," `-. |" 61 | ," \\ _ /" 62 | ," ` `\\ /`" 63 | ," \\ .'`" 64 | ," jgs \\ \\ " 65 | ," '-;" 66 | ] 67 | 68 | prompt :: ProofState -> String 69 | prompt (Cur _ (p,r) _) = show p ++ case r of 70 | (Nothing,Nothing) -> "" 71 | (Nothing,Just y) -> " (^ " ++ show y ++ ")" 72 | (Just x,Nothing) -> " (" ++ show x ++ " ^)" 73 | (Just x,Just y) -> " (" ++ show x ++ " ^ " ++ show y ++ ")" 74 | 75 | parseName :: ProofState -> String -> (LongName, Bool {-holey ?-}) 76 | parseName (Cur _ (p, _) _) x = case x of 77 | '/' : _ -> (s, h) 78 | _ -> (mappend p s, h) 79 | where 80 | (qs, y) = partition (== '?') x 81 | s = LongName (filter (not . null) (splitWhen (== '/') y)) 82 | h = not (null qs) 83 | 84 | newName :: ProofState -> String -> Maybe LongName 85 | newName ps x = do 86 | let (y, _) = parseName ps x 87 | guard . not $ any (isPrefixOf (longName y) . longName . nameOf) ps 88 | return y 89 | 90 | inView :: (LongName, Maybe LongName) -> Entity -> Maybe (LongName, Entity) 91 | inView (p, n) e = do 92 | m <- stripPrefix (longName p) (longName (nameOf e)) 93 | guard . not $ isPrefixOf (maybe [""] longName n) m 94 | -- [""] cannot prefix a name 95 | return (LongName m, e) 96 | 97 | viewPort :: ProofState -> ViewPort 98 | viewPort (Cur ez u@(p, (n0, n1)) es) = Cur (findz ez) u (finds es) where 99 | findz (ez :< e) = case inView (p, n0) e of 100 | Nothing -> B0 101 | Just me -> findz ez :< me 102 | findz _ = B0 103 | finds (e : es) = case inView (p, n1) e of 104 | Nothing -> [] 105 | Just me -> me : finds es 106 | finds _ = [] 107 | 108 | displayContext :: Context gamma -> ([String], Namings gamma) 109 | displayContext C0 = ([], N0) 110 | displayContext (gamma :\ (s, x, i)) = (bs ++ [b], NS nz x) where 111 | (bs, nz) = displayContext gamma 112 | b = " " ++ case s of 113 | Syny -> show (RA Vrble x) ++ " : " ++ show (render nz i) 114 | Pnty -> show (RA Vrble x) 115 | 116 | displayEntity :: Char -> (LongName, Entity) -> [String] 117 | displayEntity c (y, EHole m) = ("" : bs ++ [rule, h]) where 118 | (bs, nz) = displayContext (metaContext m) 119 | h = " " ++ show (RA Holey (show y ++ "?")) ++ case metaSort m of 120 | Pnty -> "" 121 | Syny -> " : " ++ show (render nz (metaInfo m)) 122 | rule = replicate (2 + maximum [mylen x | x <- h : bs]) c 123 | displayEntity c (y, EDefn m) = ("" : bs ++ rule : hs) where 124 | (bs, nz) = displayContext (defnContext m) 125 | x = show y 126 | hs = case (defnSort m, defnRadical m) of 127 | (Pnty, RP p) -> [" " ++ show (RA Defin x) 128 | ++ " = " ++ show (renderPnt nz p)] 129 | (Syny, t ::: _T) -> 130 | [ " " ++ show (RA Defin x) ++ " = " ++ show (render nz t) 131 | , replicate (2 + mylen x) ' ' ++ " : " ++ show (render nz _T) 132 | ] 133 | rule = replicate (2 + maximum [mylen x | x <- hs ++ bs]) c 134 | 135 | display :: ProofState -> [String] 136 | display ps = case viewPort ps of 137 | Cur ez _ (e : es) -> foldMap (displayEntity '-') ez ++ 138 | displayEntity '=' e ++ 139 | foldMap (displayEntity '-') es 140 | Cur ez _ [] -> foldMap (displayEntity '-') ez ++ 141 | ["", "==========================="] 142 | 143 | updates :: [Update] -> [Entity] -> [Entity] 144 | updates us [] = [] 145 | updates us (EHole m@(Meta s x _Theta _I) : es) = subInfo s $ 146 | updateContext _Theta us $ \ _Theta -> 147 | let m' = Meta s x _Theta (joinH (update _I us)) 148 | in EHole m' : updates ((m :=> Renew m') : us) es 149 | updates us (EDefn (Defn s x _Theta r) : es) = 150 | updateContext _Theta us $ \ _Theta -> 151 | EDefn (Defn s x _Theta (updateRadical r us)) : updates us es 152 | 153 | fwdToGoal :: ProofState -> ProofState 154 | fwdToGoal (Cur ez u@(p, (_, n)) (e@(EDefn _) : es)) 155 | | Just _ <- inView (p, n) e 156 | = fwdToGoal (Cur (ez :< e) u es) 157 | fwdToGoal ps = ps 158 | 159 | fwdToView :: ProofState -> ProofState 160 | fwdToView ps@(Cur ez u@(p, (n, _)) (e : es)) 161 | | Nothing <- inView (p, n) e = fwdToView (Cur (ez :< e) u es) 162 | fwdToView ps = ps 163 | 164 | -------------------------------------------------------------------------------- /April17/Raw.hs: -------------------------------------------------------------------------------- 1 | ------------------------------------------------------------------------------ 2 | ----- ----- 3 | ----- Raw Syntax ----- 4 | ----- ----- 5 | ------------------------------------------------------------------------------ 6 | 7 | {-# LANGUAGE GADTs, DataKinds, KindSignatures, TypeOperators, 8 | TypeFamilies, PatternSynonyms, RankNTypes #-} 9 | 10 | module Raw where 11 | 12 | import Data.Char 13 | import Control.Concurrent 14 | 15 | import Utils 16 | 17 | 18 | ------------------------------------------------------------------------------ 19 | -- datatype of raw terms: deeply unsubtle 20 | ------------------------------------------------------------------------------ 21 | 22 | data Raw c 23 | = RA c String -- atoms (nonempty, legit characters) 24 | | RB c String (Raw c) -- bindings 25 | | RC (Raw c) (NEL (Raw c)) -- clumps have a head and a nonempty tail 26 | 27 | class Colour c where 28 | colour :: c -> String -> String 29 | 30 | instance Colour () where 31 | colour () x = x 32 | 33 | instance Colour c => Show (Raw c) where 34 | show = big where 35 | big (RA c x) = colour c x 36 | big (RB c x r) = colour c x ++ ". " ++ big r 37 | big (RC r rs) = wee r ++ clu rs 38 | clu (Only r) = case r of 39 | RA c x -> " " ++ colour c x 40 | RB c x r -> " " ++ colour c x ++ ". " ++ big r 41 | RC r rs -> "," ++ clu (r :-: rs) 42 | clu (r :-: rs) = " " ++ wee r ++ clu rs 43 | wee (RA c x) = colour c x 44 | wee r = "(" ++ big r ++ ")" 45 | 46 | 47 | ------------------------------------------------------------------------------ 48 | -- Grammar 49 | ------------------------------------------------------------------------------ 50 | 51 | {- 52 | The only special characters in this syntax are 53 | ( ) , . 54 | which are tokens by themselves. Other tokens (symbols) are as delimited by 55 | specials and whitespace. 56 | 57 | This syntax is designed to do very little analysis: just enough to give us 58 | trees of terms with variable binding. 59 | 60 | The basic picture is that we have 61 | 62 | raw ::= symbol -- an atom, i.e., a tag or identifier 63 | | raw+ -- a clump of subterms 64 | | symbol. raw -- binding 65 | | (symbol) -- grouping for disambiguation 66 | 67 | There are two extra subtleties: 68 | 1. a binding which stands last in a clump needs no parens, e.g., 69 | pi S x. blah de blah 70 | means 71 | (pi S (x. blah de blah)) 72 | 2. a "LISP-convention" favours the right-nested, e.g., 73 | a b c, d e f 74 | means 75 | (a b c (d e f)) 76 | -} 77 | 78 | 79 | ------------------------------------------------------------------------------ 80 | -- The ReadLine interface, for parsing with one character lookahead 81 | ------------------------------------------------------------------------------ 82 | 83 | data RLState = Buff -- there is one character in the lookahead buffer 84 | | Nuff -- there is nothing in the lookahead buffer 85 | | Huff -- we've run out of stuff 86 | 87 | type family Buffy i where 88 | Buffy Buff = True 89 | Buffy i = False 90 | 91 | type family Nuffy i where 92 | Nuffy Nuff = True 93 | Nuffy i = False 94 | 95 | type family Huffy i where 96 | Huffy Huff = True 97 | Huffy i = False 98 | 99 | data ReadLine :: (RLState -> *) -> (RLState -> *) where 100 | Peek :: ReadLine (Peeking i) i 101 | -- peek if you like 102 | Grok :: ReadLine (() @= Nuff) Buff 103 | -- accept a character only if you've seen it (and you like it) 104 | Barf :: ReadLine x i 105 | -- complain (notionally, you should have a reason to, but sod it) 106 | 107 | -- if you were peeking in state i, you might end up in state j 108 | data Peeking (i :: RLState) (j :: RLState) where 109 | See :: Huffy i ~ False => Char -> Peeking i Buff 110 | -- if don't know you're at end-of-text, you might get a character 111 | EOT :: Buffy i ~ False => Peeking i Huff 112 | -- if you do know you're not at end-of-text, you won't get end-of-text 113 | 114 | type READ x i = Prog ReadLine (Got x) i 115 | 116 | 117 | ------------------------------------------------------------------------------ 118 | -- parsing Raw with the ReadLine interface 119 | ------------------------------------------------------------------------------ 120 | 121 | -- to eat a raw term, eat a head then check for a tail 122 | rawR :: READ (Raw ()) i 123 | rawR = (headR />= tailR) />= \ rs -> case rs of 124 | Only r -> rgturn r -- if we get a singleton, that's it 125 | r :-: rs -> rgturn (RC r rs) -- otherwise, it's a clump 126 | 127 | -- eat spaces until you see something that isn't 128 | spcR :: READ () i 129 | spcR = cmd Peek ?>= \ x -> case x of 130 | See c | isSpace c -> cmd Grok @> spcR 131 | _ -> rgturn () 132 | 133 | -- grab a valid small raw term (which might be mutated by what's after it) 134 | headR :: READ (Raw ()) i 135 | headR = spcR /> cmd Peek ?>= \ x -> case x of 136 | -- make an atom? 137 | See c | isAtomCh c -> cmd Grok @> atomR />= \ s -> rgturn (RA () (c : s)) 138 | -- an open paren means we can give a big thing, then a close 139 | See '(' -> cmd Grok @> rawR />= \ r -> closeR /> rgturn r 140 | -- otherwise, it's crap 141 | _ -> cmd Barf 142 | where 143 | -- close paren is required 144 | closeR :: READ () i 145 | closeR = spcR /> cmd Peek ?>= \ x -> case x of 146 | See ')' -> cmd Grok @> rgturn () 147 | _ -> cmd Barf 148 | 149 | -- characters allowed in atoms 150 | isAtomCh :: Char -> Bool 151 | isAtomCh c = (c > ' ') && not (elem c "().,") 152 | 153 | -- keep eating atomic characters 154 | atomR :: READ String i 155 | atomR = cmd Peek ?>= \ x -> case x of 156 | See c | isAtomCh c -> cmd Grok @> atomR />= \ s -> rgturn (c : s) 157 | _ -> rgturn "" 158 | 159 | 160 | -- look after a head; returns a nonempty list that might be a clump 161 | tailR :: Raw () -> READ (NEL (Raw ())) i 162 | tailR r = spcR /> cmd Peek ?>= \ x -> case x of 163 | -- end of text or closing bracket means it's not a clump and we're done 164 | EOT -> rgturn (Only r) 165 | See ')' -> rgturn (Only r) 166 | -- dot means it's not a clump, it's a binding 167 | See '.' -> case r of 168 | RA _ x -> cmd Grok @> rawR />= \ r -> rgturn (Only (RB () x r)) 169 | _ -> cmd Barf 170 | -- comma means the rest of the clump is just one thing 171 | See ',' -> cmd Grok @> rawR />= \ t -> rgturn (r :-: Only t) 172 | -- otherwise, whatever it is is more stuff for the clump 173 | See _ -> (headR />= tailR) />= \ rs -> rgturn (r :-: rs) 174 | 175 | 176 | ------------------------------------------------------------------------------ 177 | -- running the ReadLine interface on a String 178 | ------------------------------------------------------------------------------ 179 | 180 | data RLSTATE (i :: RLState) :: * where 181 | NUFF :: String -> RLSTATE Nuff 182 | BUFF :: Char -> String -> RLSTATE Buff 183 | HUFF :: RLSTATE Huff 184 | 185 | readString :: READ x i -> RLSTATE i -> Maybe x 186 | readString (RET (Got x)) _ = Just x 187 | readString (DO Peek k) HUFF = readString (k RET EOT) HUFF 188 | readString (DO Peek k) (NUFF "") = readString (k RET EOT) HUFF 189 | readString (DO Peek k) (NUFF (c : s)) = readString (k RET (See c)) (BUFF c s) 190 | readString (DO Peek k) (BUFF c s) = readString (k RET (See c)) (BUFF c s) 191 | readString (DO Grok k) (BUFF _ s) = readString (k RET (At ())) (NUFF s) 192 | readString (DO Barf _) _ = Nothing 193 | 194 | 195 | ------------------------------------------------------------------------------ 196 | -- parsing a String as a Raw 197 | ------------------------------------------------------------------------------ 198 | 199 | rawString :: String -> Maybe (Raw ()) 200 | rawString = readString rawR . NUFF 201 | 202 | 203 | ------------------------------------------------------------------------------ 204 | -- The IO Handler 205 | ------------------------------------------------------------------------------ 206 | 207 | -- this should have NoBuffering and echo False 208 | 209 | data ReadLog x 210 | = Log0 (READ x Nuff) 211 | | LogGrok (ReadLog x) 212 | | LogPeek (ReadLog x) (READ x Nuff) 213 | 214 | data RLIOSTATE :: RLState -> * where 215 | IONUFF :: RLIOSTATE Nuff 216 | IOBUFF :: Char -> RLIOSTATE Buff 217 | 218 | rlIO :: READ x Nuff -> IO x 219 | rlIO r = logIO (Log0 r) r IONUFF 220 | 221 | logIO :: ReadLog x -> READ x i -> RLIOSTATE i -> IO x 222 | logIO log (RET (Got x)) _ = curse >> unlogIO log 223 | logIO log (DO Peek k) (IOBUFF c) = logIO log (k RET (See c)) (IOBUFF c) 224 | logIO log p@(DO Peek k) IONUFF = do 225 | c <- getChar 226 | case c of 227 | '\b' -> unlogIO log 228 | '\DEL' -> unlogIO log 229 | '\n' -> case nullable p of 230 | Just x -> return x 231 | Nothing -> logIO (LogPeek log p) (k RET (See '\n')) (IOBUFF '\n') 232 | c -> logIO (LogPeek log p) (k RET (See c)) (IOBUFF c) 233 | logIO log (DO Grok k) (IOBUFF c) = do 234 | putChar c 235 | logIO (LogGrok log) (k RET (At ())) IONUFF 236 | logIO log (DO Barf _) _ = curse >> unlogIO log 237 | 238 | nullable :: Buffy i ~ False => READ x i -> Maybe x 239 | nullable (RET (Got x)) = Just x 240 | nullable (DO Barf _) = Nothing 241 | nullable (DO Peek k) = nullable (k RET EOT) 242 | 243 | curse :: IO () 244 | curse = mapM_ paf "!$!#!&!*!%!?!!!!!!!!!!!$!#!&!*!%!?! " where 245 | paf c = do 246 | putChar c 247 | threadDelay 2000 248 | putChar '\b' 249 | 250 | unlogIO :: ReadLog x -> IO x 251 | unlogIO (Log0 r) = rlIO r 252 | unlogIO (LogGrok log) = do 253 | putStr "\b \b" 254 | unlogIO log 255 | unlogIO (LogPeek log p) = logIO log p IONUFF 256 | 257 | rawIO :: IO (Raw ()) 258 | rawIO = rlIO rawR 259 | -------------------------------------------------------------------------------- /April17/Render.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE GADTs, DataKinds, TypeOperators, KindSignatures #-} 2 | 3 | module Render where 4 | 5 | import Utils 6 | import OPE 7 | import Raw 8 | import Kernel 9 | 10 | type Namings gamma = NameEnv gamma String 11 | 12 | renderBinder :: Namings gamma -> (s !- Term Chk) ^ gamma -> Raw Atom 13 | renderBinder ns (K t :^ r) = render ns (t :^ r) 14 | renderBinder ns (L x t :^ r) = RB Vrble y (render (NS ns y) (t :^ OS r)) where 15 | y = head (dropWhile (`elem` ns) 16 | (x : [x ++ show i | i <- [0 :: Integer ..]])) 17 | 18 | render :: Namings gamma -> Term Chk ^ gamma -> Raw Atom 19 | render ns (Star _ :^ _) = RA TyCon "Type" 20 | render ns (Pi _ST :^ r) = _ST :^ r >^< \ _S _T -> 21 | RC (RA TyCon "Pi") (render ns _S :-: Only (renderBinder ns _T)) 22 | render ns (Lam t :^ r) = RC (RA DaCon "\\") (Only (renderBinder ns (t :^ r))) 23 | render ns (E e :^ r) = renderSyn ns (e :^ r) Nothing 24 | 25 | itsName :: Namings gamma -> (B0 :< s) <= gamma -> String 26 | itsName (NS _ x) (OS _) = x 27 | itsName (NS ns _) (O' r) = itsName ns r 28 | 29 | renderSyn :: Namings gamma -> Term Syn ^ gamma -> 30 | Maybe (NEL (Raw Atom)) -> Raw Atom 31 | renderSyn ns (V It :^ r) Nothing = RA Vrble (itsName ns r) 32 | renderSyn ns (V It :^ r) (Just rs) = RC (RA Vrble (itsName ns r)) rs 33 | renderSyn ns (App fa :^ r) rs = fa :^ r >^< \f a -> 34 | renderSyn ns f (Just (render ns a :- rs)) 35 | renderSyn ns (Hole m theta :^ r) rs = 36 | renderHole ns (show $ metaName m) (metaContext m) (theta :^ r) rs 37 | 38 | renderHole :: Namings gamma -> 39 | String -> Context theta -> Env theta ^ gamma -> 40 | Maybe (NEL (Raw Atom)) -> Raw Atom 41 | renderHole ns h C0 _ Nothing = RA Holey h 42 | renderHole ns h C0 _ (Just rs) = RC (RA Holey h) rs 43 | renderHole ns h (_Theta :\ _) (ES p :^ r) rs = p :^ r >^< \theta i -> 44 | renderHole ns h _Theta theta (Just (renderInstance ns i :- rs)) 45 | 46 | renderInstance :: Namings gamma -> Instance s ^ gamma -> Raw Atom 47 | renderInstance ns (IS s :^ r) = render ns (s :^ r) 48 | renderInstance ns (IP p :^ r) = renderPnt ns (p :^ r) 49 | 50 | renderPnt :: Namings gamma -> Term Pnt ^ gamma -> Raw Atom 51 | renderPnt ns (Hole m theta :^ r) = 52 | renderHole ns (show $ metaName m) (metaContext m) (theta :^ r) Nothing 53 | 54 | 55 | data Atom 56 | = Punct 57 | | TyCon 58 | | DaCon 59 | | Vrble 60 | | Coord 61 | | Holey 62 | | Defin 63 | 64 | instance Show Atom where 65 | show Punct = "\ESC[0m" -- plain 66 | show TyCon = "\ESC[34m" -- blue 67 | show DaCon = "\ESC[31m" -- red 68 | show Vrble = "\ESC[35m" -- magenta 69 | show Coord = "\ESC[36m" -- cyan 70 | show Holey = "\ESC[33m" -- yellow 71 | show Defin = "\ESC[32m" -- green 72 | 73 | instance Colour Atom where 74 | colour c x = show c ++ x ++ show Punct 75 | 76 | -- String length function that discounts colour-change codes 77 | mylen :: String -> Int 78 | mylen ('\ESC' : '[' : s) = go s where 79 | go ('m' : s) = mylen s 80 | go (_ : s) = go s 81 | go [] = 0 82 | mylen (_ : s) = 1 + mylen s 83 | mylen "" = 0 84 | 85 | -- render N0 (Star Void) 86 | -- render N0 (Pi (Pair CZZ (Star Void) (K (Star Void)))) 87 | -- render N0 (Pi (Pair CZZ (Star Void) (L "X" (Pi (Pair (CSS CZZ) (E (V It)) (K (E (V It)))))))) 88 | -------------------------------------------------------------------------------- /April17/Utils.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE KindSignatures, DataKinds, EmptyCase, GADTs, 2 | DeriveFunctor, StandaloneDeriving, PolyKinds, 3 | TypeOperators, ScopedTypeVariables, RankNTypes, 4 | TypeFamilies, UndecidableInstances, PatternSynonyms, 5 | ConstraintKinds, DeriveFoldable, DeriveTraversable #-} 6 | module Utils where 7 | 8 | type family EQ x y where 9 | EQ x x = True 10 | EQ x y = False 11 | 12 | type family OR x y where 13 | OR True y = True 14 | OR x True = True 15 | OR False y = y 16 | OR x False = x 17 | 18 | data Nat = Zero | Suc Nat deriving Show 19 | 20 | type One = Suc Zero 21 | 22 | type family NatLT (m :: Nat) (n :: Nat) where 23 | NatLT m (Suc n) = NatLE m n 24 | NatLT m n = False -- wildcards not supported in ghc<8 25 | 26 | type family NatLE (m :: Nat) (n :: Nat) where 27 | NatLE m n = OR (EQ m n) (NatLT m n) 28 | 29 | data Fin (n :: Nat) where 30 | FZero :: Fin (Suc n) 31 | FSuc :: Fin n -> Fin (Suc n) 32 | 33 | deriving instance Eq (Fin n) 34 | deriving instance Show (Fin n) 35 | 36 | absurd :: Fin Zero -> a 37 | absurd k = case k of {} 38 | 39 | data Vec x (n :: Nat) where 40 | VNil :: Vec x Zero 41 | VCons :: x -> Vec x n -> Vec x (Suc n) 42 | 43 | vlookup :: Fin n -> Vec x n -> x 44 | vlookup FZero (VCons x _ ) = x 45 | vlookup (FSuc i) (VCons _ xs) = vlookup i xs 46 | 47 | -- find the first x in the vector and return its index 48 | velemIndex :: Eq x => x -> Vec x n -> Maybe (Fin n) 49 | velemIndex x VNil = Nothing 50 | velemIndex x (VCons x' xs) = 51 | if x == x' then 52 | Just FZero 53 | else 54 | fmap FSuc (velemIndex x xs) 55 | 56 | -- find the nth x in the vector and return its index 57 | velemIndex' :: Eq x => x -> Nat -> Vec x n -> Maybe (Fin n) 58 | velemIndex' x n VNil = Nothing 59 | velemIndex' x n (VCons x' xs) = 60 | if x == x' then 61 | case n of 62 | Zero -> Just FZero 63 | Suc n -> fmap FSuc (velemIndex' x n xs) 64 | else 65 | fmap FSuc (velemIndex' x n xs) 66 | 67 | -- bwd utilities 68 | data Bwd x = B0 | Bwd x :< x deriving (Functor, Foldable, Traversable) 69 | 70 | bmap :: (a -> b) -> Bwd a -> Bwd b 71 | bmap f B0 = B0 72 | bmap f (xs :< x) = bmap f xs :< f x 73 | 74 | (+<+) :: Bwd x -> Bwd x -> Bwd x 75 | xs +<+ B0 = xs 76 | xs +<+ (ys :< y) = (xs +<+ ys) :< y 77 | 78 | (<><) :: Bwd x -> [x] -> Bwd x 79 | xs <>< (y : ys) = (xs :< y) <>< ys 80 | xs <>< [] = xs 81 | 82 | (<>>) :: Bwd x -> [x] -> [x] 83 | B0 <>> ys = ys 84 | (xs :< x) <>> ys = xs <>> (x : ys) 85 | 86 | instance Monoid (Bwd x) where 87 | mempty = B0 88 | mappend = (+<+) 89 | 90 | data ALL :: (s -> *) -> Bwd s -> * where 91 | A0 :: ALL p B0 92 | AS :: ALL p gamma -> p s -> ALL p (gamma :< s) 93 | 94 | instance FunctorIx ALL where 95 | mapIx f A0 = A0 96 | mapIx f (AS ps p) = AS (mapIx f ps) (f p) 97 | 98 | -- nonempty lists 99 | data NEL x = x :- Maybe (NEL x) 100 | nel :: NEL x -> [x] 101 | nel (x :- xs) = x : case xs of 102 | Nothing -> [] 103 | Just xs -> nel xs 104 | instance Show x => Show (NEL x) where show = show . nel 105 | pattern Only x = x :- Nothing 106 | pattern x :-: xs = x :- Just xs 107 | infixr 3 :-: 108 | nconc :: NEL x -> Maybe (NEL x) -> NEL x 109 | nconc (Only x) ys = x :- ys 110 | nconc (x :-: xs) ys = x :-: nconc xs ys 111 | 112 | -- indexed unit type 113 | data Happy :: k -> * where 114 | Happy :: Happy k 115 | deriving Show 116 | data (:*) (s :: k -> *) (t :: k -> *) (i :: k) = s i :&: t i 117 | 118 | -- reflexive transitive closures 119 | 120 | data LStar r a b where 121 | L0 :: LStar r a a 122 | (:<:) :: LStar r a b -> r b c -> LStar r a c 123 | 124 | lextend :: (forall a b . r a b -> LStar s a b) -> LStar r a b -> LStar s a b 125 | lextend f L0 = L0 126 | lextend f (xs :<: x) = lextend f xs >>> f x 127 | 128 | lmap :: (forall a b . r a b -> s a b) -> LStar r a b -> LStar s a b 129 | lmap f xs = lextend (\ x -> L0 :<: f x) xs 130 | 131 | data RStar r a b where 132 | R0 :: RStar r a a 133 | (:>:) :: r a b -> RStar r b c -> RStar r a c 134 | 135 | class Category (hom :: obj -> obj -> *) where 136 | idCat :: hom x x 137 | (<<<) :: hom y z -> hom x y -> hom x z 138 | f <<< g = g >>> f 139 | (>>>) :: hom x y -> hom y z -> hom x z 140 | f >>> g = g <<< f 141 | 142 | instance Category (->) where 143 | idCat = id 144 | (<<<) = (.) 145 | 146 | instance Category (LStar r) where 147 | idCat = L0 148 | xs >>> L0 = xs 149 | xs >>> (ys :<: y) = (xs >>> ys) :<: y 150 | 151 | instance Category (RStar r) where 152 | idCat = R0 153 | R0 >>> ys = ys 154 | (x :>: xs) >>> ys = x :>: (xs >>> ys) 155 | 156 | -- existential 157 | 158 | data Ex (f :: k -> *) where 159 | Wit :: f i -> Ex f 160 | 161 | data Ex2 (f :: k -> l -> *)(j :: l) where 162 | Wit2 :: f i j -> Ex2 f j 163 | 164 | type Dot f g = Ex (f :* g) 165 | 166 | newtype Flip {- pin'eck -} f x y = Flip {pilf :: f y x} 167 | 168 | type RC r s x y = Dot (r x) (Flip s y) 169 | 170 | type f -:> g = forall i . f i -> g i 171 | 172 | class FunctorIx (f :: (i -> *) -> (j -> *)) where 173 | mapIx :: (x -:> y) -> f x -:> f y 174 | 175 | class FunctorIx f => MonadIx (f :: (i -> *) -> (i -> *)) where 176 | joinIx :: f (f x) -:> f x 177 | returnIx :: x -:> f x 178 | 179 | (?>=) :: MonadIx m => m s i -> (s -:> m t) -> m t i 180 | m ?>= k = joinIx (mapIx k m) 181 | 182 | data Prog :: ((i -> *) -> (i -> *)) -> ((i -> *) -> (i -> *)) where 183 | RET :: s i -> Prog intf s i 184 | DO :: intf r i 185 | -> (forall t. (s -:> Prog intf t) -> 186 | (r -:> Prog intf t)) 187 | -> Prog intf s i 188 | 189 | instance FunctorIx (Prog f) where 190 | mapIx f (RET x) = RET (f x) 191 | mapIx f (DO c g) = DO c $ \ k -> g (k . f) 192 | 193 | instance MonadIx (Prog f) where 194 | returnIx = RET 195 | joinIx (RET p) = p 196 | joinIx (DO c g) = DO c $ \ k -> g (joinIx . mapIx k) 197 | 198 | cmd :: f x -:> Prog f x 199 | cmd c = DO c ($) 200 | 201 | data (@=) :: * -> i -> (i -> *) where 202 | At :: x -> (x @= i) i 203 | 204 | mat :: (a -> b) -> ((a @= i) -:> (b @= i)) 205 | mat f (At a) = At (f a) 206 | 207 | (@>=) :: MonadIx m => m (a @= j) i -> (a -> m t j) -> m t i 208 | m @>= k = m ?>= \ x -> case x of At a -> k a 209 | 210 | (@>) :: MonadIx m => m (a @= j) i -> m t j -> m t i 211 | m @> n = m @>= const n 212 | infixr 3 @> 213 | 214 | raturn :: MonadIx m => a -> m (a @= i) i 215 | raturn = returnIx . At 216 | 217 | data Got :: * -> (i -> *) where 218 | Got :: x -> Got x i 219 | 220 | mgt :: (a -> b) -> (Got a -:> Got b) 221 | mgt f (Got a) = Got (f a) 222 | 223 | (/>=) :: MonadIx m => m (Got a) i -> (forall j. a -> m t j) -> m t i 224 | m />= k = m ?>= \ x -> case x of Got a -> k a 225 | 226 | (/>) :: MonadIx m => m (Got ()) i -> (forall j. m t j) -> m t i 227 | m /> n = m />= const n 228 | infixr 3 /> 229 | 230 | rgturn :: MonadIx m => a -> m (Got a) i 231 | rgturn = returnIx . Got 232 | 233 | ------------------------------------------------------------------------------ 234 | -- a type witnessing a constraint 235 | ------------------------------------------------------------------------------ 236 | 237 | type Holds c = forall t . (c => t) -> t 238 | -------------------------------------------------------------------------------- /April17/guide/guide.tex: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage{a4} 3 | \usepackage{alltt} 4 | 5 | \newcommand*\cleartoleftpage{% 6 | \clearpage 7 | \ifodd\value{page}\hbox{}\newpage\fi 8 | } 9 | 10 | \begin{document} 11 | 12 | \begin{verbatim} 13 | dP dP 14 | 88 88 15 | 88d888b. 88 .d8888b. d8888P dP dP 88d888b. dP dP .d8888b. 16 | 88' `88 88 88' `88 88 88 88 88' `88 88 88 Y8ooooo. 17 | 88. .88 88 88. .88 88 88. .88 88. .88 88. .88 88 18 | 88Y888P' dP `88888P8 dP `8888P88 88Y888P' `88888P' `88888P' 19 | 88 .88 88 20 | dP d8888P dP 21 | \end{verbatim} 22 | 23 | \noindent{\Huge the user guide} 24 | 25 | \vspace*{1in} 26 | 27 | \section{introduction} 28 | 29 | The {\tt platypus} system does not yet exist, but let's write its user guide anyway, 30 | as that will help us figure out what \emph{should} exist. 31 | 32 | Platypus is a rudimentary proof assistant based on constructive type theory, in the 33 | tradition of the constructive engine. It is 34 | \begin{description} 35 | \item[command-driven] using all the user interface technologies of a thoroughly 36 | modern paper teletype, 37 | \item[inconsistent] thanks to type in type, 38 | \item[European] in the sense that the theory has a notion of \emph{definitional} 39 | equality which is computed, and thus a disappointing accident, 40 | \item[ugly] because its syntax is not exactly s-expressions but in that area 41 | \end{description} 42 | and it is all of these things not because they are virtues, but because they are 43 | cheap. 44 | 45 | 46 | \section{get me out of here} 47 | 48 | The {\tt quit} command gets you out, if you happen to wish you were 49 | not in. 50 | 51 | \section{the syntax of everything} 52 | 53 | We have a universal syntax. 54 | \[\begin{array}{rrll} 55 | \mathit{thing} & ::= & \mathit{atom} 56 | & \mbox{nonempty sequence of nonspace chars bar {\tt .,()}}\\ 57 | & | & \mathit{atom}.\,\mathit{thing} 58 | & \mbox{binding}\\ 59 | & | & \mathit{thing}\;\mathit{thing}+ 60 | & \mbox{clump of two or more things} \\ 61 | & | & {\tt (}\mathit{thing}{\tt )} 62 | & \mbox{parentheses for disambiguation} \\ 63 | \end{array}\] 64 | You \emph{may} insert parentheses for disambiguation anywhere. You 65 | \emph{must} use them to wrap a binding or a clump which is not the 66 | last thing in another clump. You need not wrap in parentheses a 67 | binding that stands last in a clump. So 68 | \[ 69 | f\;x.\,a\; b\;\;\mbox{means}\;\;(f\;(x.\,(a\; b))) \qquad 70 | f\;(x.\,a)\; b\;\;\mbox{means}\;\;(f\;(x.\,a)\; b) 71 | \] 72 | If a clump stands last in a clump, you may omit the parentheses 73 | around the inner clump if you insert a {\tt ,} before it. So 74 | \[ 75 | a\;b\;c,\,d\;e\;f\;\;\mbox{means}\;\;(a\;b\;c\;(d\;e\;f)) 76 | \] 77 | In this way, as in the 1950s, right-nested things flatten into 78 | list-like things. We will also be predisposed to make binding 79 | constructs put their binding last. E.g., dependent function types 80 | look like 81 | \[ 82 | {\tt Pi}\;S\;x.\,T 83 | \] 84 | and a big long telescope of such things is 85 | \[ 86 | {\tt Pi}\;S_0\;x_0.\,{\tt Pi}\;S_1\;x_1.\,\cdots {\tt Pi}\;S_n\;x_n.\, T 87 | \] 88 | Degenerate vacuous binding is indicated by \emph{not binding}, rather 89 | than binding an underscore or some such. So for a nondependent 90 | function 91 | type, we have 92 | \[ 93 | {\tt Pi}\;S_0,\,{\tt Pi}\;S_1,\,\cdots {\tt Pi}\;S_n\; T 94 | \] 95 | 96 | We use this syntax for everything, including commands typed in the 97 | shell. You may not type a syntax error (but, of course, you may make 98 | use of this syntax in meaningless ways). When you hit enter, your 99 | input so far is presumed complete if it parses. If, however, the 100 | parser still awaits more text (usually a {\tt )}) to form valid input, 101 | you will be prompted to supply it. So, you can give a multi-line 102 | clump as a command if you take the precaution to begin it with {\tt (}. 103 | 104 | 105 | \section{the business is definition} 106 | 107 | The point of a {\tt platypus} session is to construct some \emph{definitions} by refinement, 108 | with intermediate unknowns represented as \emph{holes}. 109 | 110 | A definition has 111 | \begin{description} 112 | \item[a name] which is some sort of identifier, consisting of nonwhitespace 113 | characters other than {\tt (),.}; by convention the character {\tt /} is 114 | used to give names a hierarchical structure, so {\tt foo/goo} is the name 115 | of {\tt foo}'s {\tt goo}; 116 | \item[a telescope of parameters] which is a type theoretic context; uses of 117 | the definition must instantiate the parameters; 118 | \item[a type]; 119 | \item[a value] which must have the indicated type in the scope of the 120 | parameters; values may refer to \emph{holes} if the definition is incomplete 121 | \end{description} 122 | 123 | A hole is like a definition, except that its name ends with {\tt ?} and its value is unknown. As soon as the hole is filled, its usage sites are refined and the hole ceases to exist. 124 | 125 | Although names are suggestive of hierarchical structure, we flatten the 126 | hierarchy into a \emph{state}, which is a sequence of holes and definitions. 127 | 128 | What is the syntax of a definition? 129 | \[ 130 | \mathit{name}{\tt ,}\;\mathit{body} 131 | \] 132 | where 133 | \[\begin{array}{rrl} 134 | \mathit{body} & ::= & {\tt =}\;\mathit{term}\;{\tt :}\;\mathit{type} \\ 135 | & | & \mathit{type}\;x.\mathit{body} \\ 136 | \end{array}\] 137 | E.g., one might define a useless identity operation, named {\tt foo} as follows. 138 | \begin{verbatim} 139 | foo, Type X. X x. =, x : X 140 | \end{verbatim} 141 | 142 | One can, of course, imagine a nicer syntax. (A body is always a 143 | \emph{clump}, hence the comma.) 144 | 145 | 146 | \section{where am I and what can see from there?} 147 | 148 | The command prompt looks like 149 | \[ 150 | \mathit{prefix}\;\mathit{range}\;{\tt>} 151 | \] 152 | where a $\mathit{prefix}$ is either empty or an identifier (typically 153 | a pathlike sequence of ${\tt /}\mathit{name}$ blocks), and a 154 | $\mathit{range}$ is either empty or 155 | \[ 156 | {\tt (}[\mathit{nameA}]\;\mbox{\tt \^{}}\;[\mathit{nameB}]{\tt )} 157 | \] 158 | where either but not both $\mathit{nameX}$ may be omitted. 159 | The prompt tells us our \emph{field of view}: we can see everything whose name 160 | has $\mathit{prefix}$ as a prefix and which comes strictly between $\mathit{prefix}{\tt /}\mathit{nameA}$ and $\mathit{prefix}{\tt /}\mathit{nameB}$. If the prefix is nonempty, there should be something whose name begins with it. Likewise the range endpoints should be 161 | names of actual things. 162 | 163 | Before the prompt is issued, you will see a display of all the things in the field of view. The bodies of definitions whose names are strictly longer than the prefix are suppressed. 164 | The display will give only the suffix of the name, to save space. 165 | A hole (whose name is $\mathit{prefix}{\tt /}\mathit{suffix}{\tt ?}$is 166 | displayed as a vertical context, followed by a line of dashes, followed by 167 | the name $\mathit{suffix}{\tt ?}$, a {\tt :} and the hole's type. E.g., 168 | \begin{verbatim} 169 | X : Type 170 | x : X 171 | ---------------- 172 | id/body? : X 173 | 174 | > 175 | \end{verbatim} 176 | 177 | A definition whose name is strictly longer than the prefix will have 178 | its value suppressed, but its context and type will be displayed in 179 | the same format as a goal (but with no {\tt ?}). A definition whose 180 | name is exactly the prefix will have 181 | If the prefix is $\mathit{blah}$ for some definition $\mathit{blah}$, the body 182 | of the definition will be displayed, instead of the (empty)name 183 | suffix. We might hope to arrive at the following 184 | \begin{verbatim} 185 | X : Type 186 | x : X 187 | -------------------------------------- 188 | (\, \x . x) : Pi Type X. Pi X x. X 189 | \end{verbatim} 190 | 191 | 192 | Some time earlier, you might have typed at the prompt {\tt >} 193 | \begin{verbatim} 194 | > id, =, ? : Pi Type X. Pi X x. X 195 | \end{verbatim} 196 | which would result in the display. 197 | \begin{verbatim} 198 | ------------------------------ 199 | id? : Pi Type X. Pi X x. X 200 | 201 | ------------------------------ 202 | id : Pi Type X. Pi X x. X 203 | 204 | > 205 | \end{verbatim} 206 | and you might then have typed 207 | \begin{verbatim} 208 | > = \ X. \ x. ?body 209 | \end{verbatim} 210 | which would result in the goal display above. 211 | 212 | Your primary means of not getting bombarded with too much information is thus 213 | to lengthen the prompt. The command 214 | \[ 215 | {\tt /}\;\mathit{suffix}\; 216 | \] 217 | will add ${\tt /}\mathit{suffix}$ to the prompt prefix. It is forbidden 218 | to have a nonempty prefix which refers to nothing in the context. 219 | 220 | Meanwhile, the command 221 | \[ 222 | {\tt <} 223 | \] 224 | will shorten the prompt to its penultimate {\tt /}, or to empty if there is at most 225 | one {\tt /}. 226 | 227 | The command `zoom out to $\mathit{name}$' 228 | \[ 229 | {\tt <}\;\mathit{name} 230 | \] 231 | will change the prefix to its longest proper subsequence ending in 232 | ${\tt /}\mathit{name}{\tt /}$ 233 | if there is such a thing. 234 | 235 | The command {\tt <<} zooms all the way out to the root. 236 | 237 | Your secondary means of not being bombarded with too much information is to narrow 238 | the range with the command 239 | \[ 240 | \mathit{name}\;\mbox{\tt \^{}}\;\mathit{name}' 241 | \] 242 | which means `show me only the things between the two names'. You may omit either end 243 | to mean `show me from the beginning' or `show me to the end'. You can use this also to 244 | widen your view. In particular, the command 245 | \[ 246 | \mbox{\tt \^{}} 247 | \] 248 | means `show everything'. 249 | 250 | But where am I? If I make a definition, where will it go? I am just before 251 | the first hole in my field of view (which means that if there is no such hole, I am at the 252 | end of what I can see. So, viewing range restrictions let us insert definitions 253 | before the end of the state. 254 | 255 | 256 | 257 | \cleartoleftpage 258 | \appendix 259 | 260 | \section{Command Cheatsheet} 261 | 262 | \[\begin{array}{ll} 263 | \mathit{name}\;\mathit{construction} & 264 | \mbox{make a definition} \\ 265 | {\tt =}\;\mathit{term} & \mbox{give a refinement} \smallskip\\ 266 | 267 | {\tt /}\;\mathit{name} & \mbox{zoom prefix in to name}\\ 268 | {\tt <}\;[\mathit{name}] & \mbox{zoom prefix out [to name]}\\ 269 | {\tt <<} & \mbox{zoom prefix all the way out}\\ 270 | {}[\mathit{name}]\;\mbox{\tt \^{}}\;[\mathit{name}] & \mbox{set the 271 | range} 272 | \bigskip\\ 273 | {\tt quit} & \mbox{get me out of here} \\ 274 | {\tt undo} & \mbox{undo last change} 275 | \end{array}\] 276 | 277 | \section{Grammars} 278 | 279 | \[\begin{array}{rrll} 280 | \mathit{construction} & ::= & {\tt =}\:\mathit{syn} & 281 | \mbox{construction body} \\ 282 | & | & \mathit{chk}\;x.\,\mathit{construction} & 283 | \mbox{variable parameter construction} \medskip \\ 284 | \mathit{chk} & ::= & \mathit{syn} & 285 | \mbox{term with synthesizable type} \\ 286 | & | & {\tt Type} & \mbox{type of all types} \\ 287 | & | & {\tt Pi}\;\mathit{chk}\;\mathit{scope} & \mbox{function type} \\ 288 | & | & {\tt \backslash}\;\mathit{scope} & \mbox{abstraction} \\ 289 | & | & {\tt ?}\mathit{name} & \mbox{hole generation} 290 | \medskip \\ 291 | \mathit{scope} & ::= & x.\,\mathit{chk} & \mbox{named binding} \\ 292 | & | & \mathit{chk} & \mbox{vacuous binding} 293 | \medskip \\ 294 | \mathit{syn} & ::= & x & \mbox{variable usage} \\ 295 | & | & \mathit{chk}\;{\tt :}\;\mathit{chk} & \mbox{type ascription} \\ 296 | & | & \mathit{syn}\;\mathit{chk}+ & \mbox{application} 297 | \end{array}\] 298 | 299 | \end{document} 300 | -------------------------------------------------------------------------------- /Boxen/Boxen.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE GADTs, DataKinds, KindSignatures, StandaloneDeriving, 2 | PatternSynonyms, PatternGuards, 3 | MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances, 4 | UndecidableInstances #-} 5 | 6 | module Boxen where 7 | 8 | import Prelude hiding ((/)) 9 | import Data.List hiding ((\\)) 10 | import Control.Monad 11 | import Control.Applicative 12 | 13 | 14 | ------------------------------------------------------------------------------ 15 | -- SYNTAX OF TERMS AND VALUES 16 | ------------------------------------------------------------------------------ 17 | 18 | data Dir = In | Ex 19 | data Phase = Val | Cut 20 | 21 | data Tm :: Phase -> Dir -> * where 22 | A :: String -> Tm p In -- atoms 23 | (:&) :: Tm p In -> Tm p In -> Tm p In -- cons cells 24 | L :: Bd p -> Tm p In -- abstractions 25 | E :: Tm p Ex -> Tm p In -- eliminations 26 | V :: Int -> Tm Cut Ex -- bound variables 27 | P :: Ref -> Tm p Ex -- free variables 28 | (:/) :: Tm p Ex -> Tm p In -> Tm p Ex -- action 29 | (:<) :: Tm Cut In -> Tm Cut In -> Tm Cut Ex -- cut 30 | infixr 6 :& 31 | infixl 8 :/ 32 | infixl 9 :< 33 | 34 | pattern N = A "" -- nil is the atom with the empty name 35 | 36 | data Bd :: Phase -> * where 37 | K :: Tm p In -> Bd p 38 | B :: Tm Cut In -> Bd Cut 39 | (:%) :: ENV -> Tm Cut In -> Bd Val 40 | 41 | deriving instance Show (Tm p d) 42 | deriving instance Show (Bd p) 43 | deriving instance Eq (Tm Cut d) 44 | 45 | instance Eq (Bd Cut) where 46 | K s == K t = s == t 47 | B s == B t = s == t 48 | K s == B t = varOp Thin 0 s == t 49 | B s == K t = s == varOp Thin 0 t 50 | 51 | type VAL = Tm Val In 52 | type ENV = [THING] 53 | type KIND = VAL -- for documentary purposes 54 | data THING = (:::) { valOf :: VAL, kindOf :: KIND } deriving Show 55 | infix 2 ::: 56 | type TERM = Tm Cut In 57 | type CUT = Tm Cut Ex 58 | 59 | data Ref = Ref {nom :: String, kind :: KIND} 60 | instance Show Ref where show = nom 61 | instance Eq Ref where x == y = nom x == nom y 62 | thing :: Ref -> THING 63 | thing x = E (P x) ::: kind x 64 | 65 | 66 | ------------------------------------------------------------------------------ 67 | -- COMPUTATION 68 | ------------------------------------------------------------------------------ 69 | 70 | class Eval t v | t -> v, v -> t where 71 | val :: ENV -> t -> v 72 | 73 | instance Eval TERM VAL where 74 | val g (A a) = A a 75 | val g (s :& t) = val g s :& val g t 76 | val g (L b) = L (val g b) 77 | val g (E e) = valOf (val g e) 78 | 79 | instance Eval (Bd Cut) (Bd Val) where 80 | val g (K t) = K (val g t) 81 | val g (B t) = g :% t 82 | 83 | instance Eval CUT THING where 84 | val g (V i) = g !! i 85 | val g (P x) = thing x 86 | val g (f :/ s) = val g f / val g s 87 | val g (t :< _T) = val g t ::: val g _T 88 | 89 | class Slash f a t | f -> t where 90 | (/) :: f -> a -> t 91 | infixl 8 /, //, /: 92 | 93 | instance Slash THING VAL THING where 94 | f / v = f // v ::: f /: v 95 | 96 | 97 | ------------------------------------------------------------------------------ 98 | -- TYPECHECKING WITH A NAME SUPPLY 99 | ------------------------------------------------------------------------------ 100 | 101 | newtype TC x = TC {tc :: Int -> Maybe x} -- deriving Monoid is wrong 102 | 103 | class Discharge t b | t -> b, b -> t where 104 | (\\) :: Ref -> t -> b 105 | infix 3 \\ 106 | 107 | (!-) :: Discharge t b => KIND -> (Ref -> TC t) -> TC b 108 | _S !- k = TC $ \ i -> 109 | let x = Ref {nom = show i, kind = _S} 110 | in x \\ tc (k x) (i + 1) 111 | infixr 4 !- 112 | 113 | va :: Eval t v => (t -> TC ()) -> t -> TC v 114 | va chk t = do 115 | chk t 116 | return (val [] t) 117 | 118 | nil :: Tm p In -> TC () 119 | nil N = return () 120 | nil _ = empty 121 | 122 | 123 | ------------------------------------------------------------------------------ 124 | -- KINDS OF THING (OR JUDGMENT FORMS, SORT OF) 125 | ------------------------------------------------------------------------------ 126 | 127 | pattern Type = A "Type" 128 | pattern El _T = A "El" :& _T :& N 129 | pattern Seg = A "Seg" 130 | pattern Point z = A "Point" :& z :& N 131 | pattern Types z = A "Types" :& z :& N 132 | pattern Path' _S _T = A "Path" :& _S :& _T :& N 133 | 134 | kindly :: TERM -> TC () 135 | kindly Type = return () 136 | kindly (El _T) = Type >:> _T 137 | kindly Seg = return () 138 | kindly (Point z) = Seg >:> z 139 | kindly (Types z) = Seg >:> z 140 | kindly (Path' _S _T) = do 141 | nil _S <|> Type >:> _S 142 | nil _T <|> Type >:> _T 143 | kindly _ = fail "unkind" 144 | 145 | 146 | ------------------------------------------------------------------------------ 147 | -- CONSTRUCTIONS 148 | ------------------------------------------------------------------------------ 149 | 150 | pattern Pi _S _T = A "Pi" :& _S :& L _T :& N 151 | pattern Sg _S _T = A "Sg" :& _S :& L _T :& N 152 | pattern Path z _S _M _T = A "Path" :& z :& _S :& _M :& _T :& N 153 | pattern Mid _L _T _R = _L :& _T :& _R :& N 154 | pattern Fork l r = A "fork" :& l :& r :& N 155 | 156 | (>:>) :: KIND -> TERM -> TC () 157 | infix 5 >:> 158 | 159 | Type >:> Pi _S _T = do 160 | _S <- va (Type >:>) _S 161 | _S !- \ x -> Type >:> _T / x 162 | 163 | El (Pi _S _T) >:> L t = do 164 | _S !- \ x -> _T / x >:> t / x 165 | 166 | Type >:> Sg _S _T = do 167 | _S <- va (Type >:>) _S 168 | _S !- \ x -> Type >:> _T / x 169 | 170 | El (Sg _S _T) >:> s :& t = do 171 | s <- va (El _S >:>) s 172 | El (_T / (s ::: _S)) >:> t 173 | 174 | Type >:> Path z _S _M _T = do 175 | z <- va (Seg >:>) z 176 | Type >:> _S 177 | Types z >:> _M 178 | Type >:> _T 179 | 180 | Seg >:> N = return () 181 | Types N >:> N = return () 182 | Point _ >:> A "0" = return () 183 | Point _ >:> A "1" = return () 184 | 185 | Seg >:> l :& r = do Seg >:> l ; Seg >:> r 186 | Types (l :& r) >:> Mid _L _T _R = do Types l >:> _L ; Type >:> _T ; Types r >:> _R 187 | Point (l :& r) >:> A "left" :& p = Point l >:> p 188 | Point (l :& r) >:> A "right" :& p = Point r >:> p 189 | 190 | El (Path N _S N _T) >:> L _ST = do 191 | _ST <- va (\ _ST -> Point N !- \ i -> Type >:> _ST / i) _ST 192 | equalAt Type _S (_ST / (A "0" ::: Point N)) 193 | equalAt Type (_ST / (A "1" ::: Point N)) _T 194 | 195 | El (Path (l :& r) _S (Mid _L _T _R) _U) >:> Fork f g = do 196 | Path l _S _L _T >:> f 197 | Path r _T _R _U >:> g 198 | 199 | El (Path N _S N _T) >:> Fork f g = Path' _S _T >:> Fork f g 200 | 201 | Path' _S _T >:> Fork f g = do 202 | f <- va ((Path' _S N) >:>) f 203 | Path' ((f ::: Path' _S N) // A "1") _T >:> g 204 | 205 | want >:> E e = do 206 | got <- synth e 207 | want <<== kindOf got 208 | 209 | _ >:> _ = fail "ill typed" 210 | 211 | 212 | ------------------------------------------------------------------------------ 213 | -- ELIMINATION ACTIONS 214 | ------------------------------------------------------------------------------ 215 | 216 | (/:>) :: KIND -> TERM -> TC () 217 | El (Pi _S _T) /:> s = El _S >:> s 218 | El (Sg _S _T) /:> A "car" = return () 219 | El (Sg _S _T) /:> A "cdr" = return () 220 | El (Path (l :& r) _S _ _T) /:> A "left" = return () 221 | El (Path (l :& r) _S _ _T) /:> A "right" = return () 222 | El (Path z _S _M _T) /:> p = Point z >:> p 223 | El (Point z) /:> (f :& _T) = do 224 | _T <- va (Type >:>) _T 225 | case _T of 226 | Path z' _ _ _ -> Point z <<== Point z' 227 | _ -> empty 228 | El _T >:> f 229 | _ /:> _ = fail "bad action" 230 | 231 | (/:) :: THING -> VAL -> KIND 232 | (f ::: El (Pi _S _T)) /: v = El (_T / (v ::: _S)) 233 | (p ::: El (Sg _S _T)) /: A "car" = El _S 234 | p@(_ ::: El (Sg _S _T)) /: A "cdr" = El (_T / (p // A "car" ::: _S)) 235 | 236 | (_ ::: El (Path (l :& _) _S (Mid _L _T _) _)) /: A "left" = El (Path l _S _L _T) 237 | (_ ::: El (Path (_ :& r) _ (Mid _ _S _R) _T)) /: A "right" = El (Path r _S _R _T) 238 | (_ ::: El (Path _ _ _ _)) /: _ = Type 239 | 240 | (//) :: THING -> VAL -> VAL 241 | (L b ::: El (Pi _S _T)) // v = b / (v ::: _S) 242 | (u :& v ::: El (Sg _S _T)) // A "car" = u 243 | (u :& v ::: El (Sg _S _T)) // A "cdr" = v 244 | (_ ::: El (Path _ _S _ _)) // A "0" = _S 245 | (_ ::: El (Path _ _ _ _T)) // A "1" = _T 246 | q@(_ ::: El (Path _ _ _ _)) // (a :& p) = q / a // p 247 | (Fork f g ::: _) // A "left" = f 248 | (Fork f g ::: _) // A "right" = g 249 | (Fork f g ::: El _T@(Path z _ _ _)) // p = (p ::: Point z) // (Fork f g :& _T) 250 | (L b ::: Path' _ _) // p = b / (p ::: Point N) 251 | (Fork f g ::: Path' _S _) // A "0" = (f ::: Path' _S N) // A "0" 252 | (Fork f g ::: Path' _ _T) // A "1" = (g ::: Path' N _T) // A "1" 253 | (E f ::: _) // v = E (f :/ v) 254 | (p ::: Point _) // (f :& _T) = (f ::: El _T) // p 255 | 256 | ------------------------------------------------------------------------------ 257 | -- SUBKINDING 258 | ------------------------------------------------------------------------------ 259 | 260 | (<<==) :: KIND -> KIND -> TC () 261 | infix 5 <<== 262 | 263 | Type <<== Type = return () 264 | 265 | El (Pi _S _T) <<== El (Pi _S' _T') = do 266 | _S' <<== _S 267 | _S' !- \ x -> _T / x <<== _T' / x 268 | 269 | El (Sg _S _T) <<== El (Sg _S' _T') = do 270 | _S <<== _S' 271 | _S !- \ x -> _T / x <<== _T' / x 272 | 273 | El (Path N _S N _T) <<== El (Path _ _S' _ _T') = do 274 | equalAt Type _S _S' 275 | equalAt Type _T _T' 276 | 277 | El (Path (l :& r) _S (Mid _L _T _R) _U) <<== 278 | El (Path (l' :& r') _S' (Mid _L' _T' _R') _U') = do 279 | El (Path l _S _L _T) <<== El (Path l' _S' _L' _T') 280 | El (Path r _T _R _U) <<== El (Path r' _T' _R' _U') 281 | 282 | Path' _S _T <<== Path' _S' _T' = do 283 | nil _S' <|> equalAt Type _S _S' -- check that _S isn't N? 284 | nil _T' <|> equalAt Type _T _T' -- check that _S isn't N? 285 | 286 | Point _ <<== Point N = return () 287 | 288 | Point (l :& r) <<== Point (l' :& r') = do 289 | Point l <<== Point l' 290 | Point r <<== Point r' 291 | 292 | _ <<== _ = fail "not a subkind" 293 | 294 | 295 | ------------------------------------------------------------------------------ 296 | -- QUOTATION AND EQUALITY 297 | ------------------------------------------------------------------------------ 298 | 299 | equalAt :: KIND -> VAL -> VAL -> TC () 300 | equalAt k s t = do 301 | s' <- quote (s ::: k) 302 | t' <- quote (t ::: k) 303 | guard (s' == t') 304 | 305 | quote :: THING -> TC TERM 306 | 307 | quote (Pi _S _T ::: Type) = do 308 | _S' <- quote (_S ::: Type) 309 | _T' <- El _S !- \ x -> quote (_T / x ::: Type) 310 | return (Pi _S' _T') 311 | 312 | quote f@(_ ::: Pi _S _T) = do 313 | t' <- _S !- \ x -> quote (f / x) 314 | return (L t') 315 | 316 | quote (Sg _S _T ::: Type) = do 317 | _S' <- quote (_S ::: Type) 318 | _T' <- El _S !- \ x -> quote (_T / x ::: Type) 319 | return (Sg _S' _T') 320 | 321 | quote p@(_ ::: Sg _S _T) = do 322 | s' <- quote (p / "car") 323 | t' <- quote (p / "cdr") 324 | return (s' :& t') 325 | 326 | 327 | ------------------------------------------------------------------------------ 328 | -- VARIABLE OPERATIONS 329 | ------------------------------------------------------------------------------ 330 | 331 | data VarOp = Subs [CUT] | Abst [Ref] | Thin 332 | 333 | class Syntactic t where 334 | varOp :: VarOp -> Int -> t -> t 335 | 336 | instance Syntactic (Tm Cut d) where 337 | varOp Thin i (V j) | i <= j = V (j + 1) 338 | varOp (Subs es) i (V j) | i <= j = es !! (j - i) 339 | varOp (Abst xs) i (P x) | Just j <- findIndex (x ==) xs = V (i + j) 340 | varOp z i (L (B t)) = L (B (varOp z (i + 1) t)) 341 | varOp z i (L (K t)) = L (K (varOp z i t)) 342 | varOp z i (u :& v) = varOp z i u :& varOp z i v 343 | varOp z i (E e) = E (varOp z i e) 344 | varOp z i (f :/ a) = varOp z i f :/ varOp z i a 345 | varOp z i (t :< _T) = varOp z i t :< varOp z i _T 346 | varOp z i t = t 347 | 348 | instance Syntactic () where 349 | varOp _ _ () = () 350 | 351 | instance Syntactic t => Syntactic (Maybe t) where 352 | varOp z i = fmap (varOp z i) 353 | 354 | instance Discharge () () where 355 | _ \\ () = () 356 | 357 | instance Discharge TERM (Bd Cut) where 358 | x \\ t = B (varOp (Abst [x]) 0 t) 359 | 360 | instance Discharge t b => Discharge (Maybe t) (Maybe b) where 361 | x \\ mt = fmap (x \\) mt 362 | 363 | 364 | ------------------------------------------------------------------------------ 365 | -- BORING 366 | ------------------------------------------------------------------------------ 367 | 368 | instance Slash (Bd Val) THING VAL where 369 | K t / _ = t 370 | (g :% t) / vS = val (vS : g) t 371 | 372 | instance Slash (Bd Val) Ref VAL where 373 | t / x = t / thing x 374 | 375 | instance Slash (Bd Cut) CUT TERM where 376 | K t / _ = t 377 | B t / s = varOp (Subs [s]) 0 t 378 | 379 | instance Slash (Bd Cut) Ref TERM where 380 | b / x = b / (P x :: CUT) 381 | 382 | instance Slash THING Ref THING where 383 | f / x = f / (E (P x) :: VAL) 384 | 385 | instance Slash THING String THING where 386 | f / x = f / (A x :: VAL) 387 | 388 | instance Monad TC where 389 | return x = TC $ \ l -> Just x 390 | TC a >>= k = TC $ \ l -> do 391 | a <- a l 392 | tc (k a) l 393 | fail _ = TC $ \ _ -> Nothing 394 | 395 | instance Applicative TC where 396 | pure = return 397 | (<*>) = ap 398 | 399 | instance Functor TC where 400 | fmap = ap . return 401 | 402 | instance Alternative TC where 403 | empty = TC $ \ _ -> empty 404 | TC a <|> TC b = TC $ \ l -> a l <|> b l 405 | 406 | synth :: CUT -> TC THING 407 | synth (P x) = return (thing x) 408 | synth (f :/ a) = do 409 | f <- synth f 410 | a <- va (kindOf f /:>) a 411 | return (f / a) 412 | synth (t :< _K) = do 413 | _K <- va kindly _K 414 | t <- va (_K >:>) t 415 | return (t ::: _K) 416 | 417 | -------------------------------------------------------------------------------- /Crossroads/MetaMeta.agda: -------------------------------------------------------------------------------- 1 | module MetaMeta where 2 | 3 | data Fwd (X : Set) : Set where 4 | [] : Fwd X 5 | _,-_ : X -> Fwd X -> Fwd X 6 | infixr 4 _,-_ 7 | 8 | data Bwd (X : Set) : Set where 9 | [] : Bwd X 10 | _-,_ : Bwd X -> X -> Bwd X 11 | infixl 4 _-,_ 12 | 13 | data _:>_ {X : Set} : Bwd X -> X -> Set where 14 | ze : forall {xz x} -> (xz -, x) :> x 15 | su : forall {x xz y} -> xz :> x -> (xz -, y) :> x 16 | 17 | data Syn (I : Set) : Set where 18 | [] : Syn I 19 | # : I -> Syn I 20 | _=>_ : I -> Syn I -> Syn I 21 | _/_ : Syn I -> Syn I -> Syn I 22 | Sum : Fwd (Syn I) -> Syn I 23 | infixr 5 _/_ 24 | infixr 6 _=>_ 25 | 26 | record One : Set where constructor <> 27 | record Sg (S : Set)(T : S -> Set) : Set where 28 | constructor _,_ 29 | field 30 | fst : S 31 | snd : T fst 32 | open Sg 33 | _*_ : Set -> Set -> Set 34 | S * T = Sg S \ _ -> T 35 | infixr 4 _,_ 36 | 37 | data Zero : Set where 38 | data Two : Set where tt ff : Two 39 | _+_ : Set -> Set -> Set 40 | S + T = Sg Two \ { tt -> S ; ff -> T } 41 | 42 | data Nat : Set where 43 | ze : Nat 44 | su : Nat -> Nat 45 | 46 | Node : {I : Set}(R : Bwd I -> I -> Set) -> Syn I -> Bwd I -> Set 47 | Choose : {I : Set}(R : Bwd I -> I -> Set) -> Fwd (Syn I) -> Bwd I -> Set 48 | Node R [] G = One 49 | Node R (# i) G = R G i 50 | Node R (j => F) G = Node R F (G -, j) 51 | Node R (F / F') G = Node R F G * Node R F' G 52 | Node R (Sum Fs) G = Choose R Fs G 53 | Choose R [] G = Zero 54 | Choose R (F ,- Fs) G = Node R F G + Choose R Fs G 55 | 56 | data BwdIn {X : Set} : Bwd X -> X -> Set where 57 | ze : forall {xz x} -> BwdIn (xz -, x) x 58 | su : forall {xz y x} -> BwdIn xz x -> BwdIn (xz -, y) x 59 | 60 | data Tm {I : Set}(F : I -> Syn I)(G : Bwd I)(i : I) : Set where 61 | [_] : Node (Tm F) (F i) G -> Tm F G i 62 | ! : BwdIn G i -> Tm F G i 63 | 64 | data Cx {I : Set}(F B : I -> Syn I) : Bwd I -> Set where 65 | [] : Cx F B [] 66 | _-,_ : forall {G i} -> Cx F B G -> Node (Tm F) (B i) G -> Cx F B (G -, i) 67 | 68 | 69 | data Status : Set where blk red : Status 70 | 71 | Mode : Status -> Set 72 | Mode blk = Two -- tt for input; ff for output 73 | Mode red = One 74 | 75 | Role : Set 76 | Role = Sg Status Mode 77 | 78 | 79 | -- the syntax of judgment forms is given by some 80 | -- J : Set -- the judgment forms 81 | -- JSyn : J -> Syn (Role * I) -- the syntax of each 82 | 83 | module ExPat {I : Set}(F B : I -> Syn I) where 84 | -- F is the term syntax for each sort 85 | -- B is the syntax of contextual bindings for variables of each sort 86 | 87 | data Arity : Set where 88 | # : I -> Arity 89 | _=>_ : I -> Arity -> Arity 90 | infixr 6 _==>_ 91 | 92 | _==>_ : Bwd I -> Arity -> Arity 93 | [] ==> t = t 94 | (is -, i) ==> t = is ==> (i => t) 95 | 96 | Sch : Set 97 | Sch = Status * Arity 98 | 99 | SCx : Set 100 | SCx = Bwd Sch 101 | 102 | AllBlk : SCx -> Set 103 | AllBlk [] = One 104 | AllBlk (G -, (blk , _)) = AllBlk G 105 | AllBlk (G -, (red , _)) = Zero 106 | 107 | data SVar (t : Sch) : SCx -> SCx -> Set where 108 | ze : {G : SCx} -> SVar t (G -, t) (G -, (blk , snd t)) 109 | su : {G G' : SCx}{s : Sch} -> SVar t G G' -> SVar t (G -, s) (G' -, s) 110 | 111 | data Exp (G : SCx)(L : Bwd I)(r : Status) : Arity -> SCx -> Set 112 | data Exps (G : SCx)(L : Bwd I)(r : Status) : Syn I -> SCx -> Set 113 | data Exp G L r where 114 | sv : forall {T G'} -> (x : SVar (r , T) G G') -> Exp G L r T G' 115 | bv : forall {i} -> L :> i -> Exp G L r (# i) G 116 | _$_ : forall {i T G' G''} -> Exp G L r (i => T) G' -> Exp G' L r (# i) G'' 117 | -> Exp G L r T G'' 118 | [_] : forall {i G'} -> Exps G L r (F i) G' -> Exp G L r (# i) G' 119 | data Exps G L r where 120 | [] : Exps G L r [] G 121 | [_] : forall {i G'} -> Exp G L r (# i) G' -> Exps G L r (# i) G' 122 | _,_ : forall {S T G' G''} -> Exps G L r S G' -> 123 | Exps G' L r T G'' -> Exps G L r (S / T) G'' 124 | inl : forall {S Ts G'} -> Exps G L r S G' -> Exps G L r (Sum (S ,- Ts)) G' 125 | inr : forall {S Ts G'} -> Exps G L r (Sum Ts) G' -> Exps G L r (Sum (S ,- Ts)) G' 126 | la : forall {i T G'} -> Exps G (L -, i) r T G' -> Exps G L r (i => T) G' 127 | 128 | data Pats (G : SCx)(L : Bwd I)(r : Status) : Syn I -> SCx -> Set 129 | data Pat (G : SCx)(L : Bwd I)(r : Status)(i : I) : SCx -> Set 130 | data Pat G L r i where 131 | [_] : forall {G'} -> Pats G L r (F i) G' -> Pat G L r i G' 132 | is : forall {G'} -> Exp G L blk (# i) G' -> Pat G L r i G' -- force r = blk ? 133 | bi : Pat G L r i (G -, (r , (L ==> # i))) 134 | ov : forall {G'} -> Pats G [] blk (B i) G' -> Pat G L r i G' 135 | data Pats G L r where 136 | [] : Pats G L r [] G 137 | [_] : forall {i G'} -> Pat G L r i G' -> Pats G L r (# i) G' 138 | _,_ : forall {S T G' G''} -> Pats G L r S G' -> 139 | Pats G' L r T G'' -> Pats G L r (S / T) G'' 140 | inl : forall {S Ts G'} -> Pats G L r S G' -> Pats G L r (Sum (S ,- Ts)) G' 141 | inr : forall {S Ts G'} -> Pats G L r (Sum Ts) G' -> 142 | Pats G L r (Sum (S ,- Ts)) G' 143 | la : forall {i T G'} -> Pats G (L -, i) r T G' -> Pats G L r (i => T) G' 144 | 145 | module Judge {J : Set}(JF : J -> Fwd (Role * I)) where 146 | 147 | PremStuff : SCx -> Bwd I -> (Role * I) -> SCx -> Set 148 | PremStuff G L ((blk , tt) , i) G' = Exp G L blk (# i) G' 149 | PremStuff G L ((blk , ff) , i) G' = Pat G L blk i G' 150 | PremStuff G L ((red , <>) , i) G' = Exp G L red (# i) G' 151 | 152 | data Premise (G : SCx)(L : Bwd I) : Fwd (Role * I) -> SCx -> Set where 153 | [] : Premise G L [] G 154 | _,-_ : forall {S Ts G' G''} -> PremStuff G L S G' -> 155 | Premise G' L Ts G'' -> Premise G L (S ,- Ts) G'' 156 | 157 | ConcStuff : SCx -> (Role * I) -> SCx -> Set 158 | ConcStuff G ((blk , tt) , i) G' = Pat G [] blk i G' 159 | ConcStuff G ((blk , ff) , i) G' = Exp G [] blk (# i) G' 160 | ConcStuff G ((red , <>) , i) G' = Pat G [] red i G' 161 | 162 | data CxE (G : SCx) : Bwd I -> SCx -> Set where 163 | [] : CxE G [] G 164 | _-,_ : forall {L G' G''} -> 165 | CxE G L G' -> (z : Sg I \ i -> Exps G' L blk (B i) G'') -> 166 | CxE G (L -, fst z) G'' 167 | 168 | data Rule (G : SCx) : Fwd (Role * I) -> Set where 169 | [] : {_ : AllBlk G} -> Rule G [] 170 | _,-_ : forall {S G' Ts} -> ConcStuff G S G' -> Rule G' Ts -> 171 | Rule G (S ,- Ts) 172 | _!_!-_&_ : forall {G' G'' Ts L} -> CxE G L G' -> 173 | (j : J) -> Premise G' L (JF j) G'' -> 174 | Rule G'' Ts -> Rule G Ts 175 | infixr 4 _,-_ _!_!-_&_ 176 | 177 | -- how do we check each context entry for validity? 178 | RulesCon : Set 179 | RulesCon = (i : I) -> Fwd 180 | (Sg SCx \ G -> Pats [] [] red (B i) G * Rule G []) 181 | 182 | -- what are the rules for each judgment form? 183 | RulesSyn : Set 184 | RulesSyn = (j : J) -> Fwd (Rule [] (JF j)) 185 | 186 | -- for each judgment form, 187 | -- what is demanded of inputs? what is delivered by outputs? 188 | 189 | 190 | 191 | ---------------- 192 | 193 | data TTSort : Set where term elim : TTSort 194 | TTSyn TTCxE : TTSort -> Syn TTSort 195 | TTSyn term = Sum ([] 196 | ,- # term / elim => # term 197 | ,- elim => # term 198 | ,- # elim 199 | ,- []) 200 | TTSyn elim = Sum (# elim / # term 201 | ,- # term / # term 202 | ,- []) 203 | 204 | TTCxE term = Sum [] 205 | TTCxE elim = # term 206 | 207 | data TTJudge : Set where chk syn sub : TTJudge 208 | TTJudgeForm : TTJudge -> Fwd (Role * TTSort) 209 | TTJudgeForm chk = ((blk , tt) , term) ,- ((red , <>) , term) ,- [] 210 | TTJudgeForm syn = ((red , <>) , elim) ,- ((blk , ff) , term) ,- [] 211 | TTJudgeForm sub = ((blk , tt) , term) ,- ((blk , tt) , term) ,- [] 212 | 213 | open ExPat 214 | open Judge 215 | 216 | pattern STAR = [ inl [] ] 217 | pattern PI S T = [ inr (inl ([ S ] , la [ T ])) ] 218 | pattern LA T = [ inr (inr (inl (la [ T ]))) ] 219 | pattern EN E = [ inr (inr (inr (inl [ E ]))) ] 220 | pattern AP F S = [ inl ([ F ] , [ S ]) ] 221 | pattern TY s S = [ inr (inl ([ s ] , [ S ])) ] 222 | 223 | rulesC : RulesCon TTSyn TTCxE TTJudgeForm 224 | rulesC term = [] 225 | rulesC elim = _ , ([ bi ] , ([] ! chk !- STAR ,- sv ze ,- [] & [])) ,- [] 226 | 227 | rulesS : RulesSyn TTSyn TTCxE TTJudgeForm 228 | rulesS chk = 229 | (STAR ,- STAR ,- []) 230 | ,- (STAR ,- PI bi bi ,- 231 | [] ! chk !- STAR ,- sv (su ze) ,- [] & 232 | ([] -, (elim , [ sv (su ze) ])) ! chk !- 233 | STAR ,- sv ze $ bv ze ,- [] & []) 234 | ,- (PI bi bi ,- LA bi ,- 235 | ([] -, (elim , [ sv (su (su ze)) ])) ! chk !- 236 | (sv (su ze) $ bv ze) ,- ((sv ze $ bv ze) ,- []) & []) 237 | ,- (bi ,- EN bi ,- 238 | [] ! syn !- sv ze ,- bi ,- [] & 239 | [] ! sub !- sv ze ,- sv (su (su ze)) ,- [] & []) 240 | ,- [] 241 | rulesS syn = 242 | (AP bi bi ,- 243 | [] ! syn !- sv (su ze) ,- PI bi bi ,- [] & 244 | [] ! chk !- sv (su ze) ,- sv (su (su ze)) ,- [] & 245 | sv ze $ TY (sv (su (su ze))) (sv (su ze)) ,- []) 246 | ,- (TY bi bi ,- 247 | [] ! chk !- STAR ,- sv ze ,- [] & 248 | [] ! chk !- sv ze ,- sv (su ze) ,- [] & 249 | sv ze ,- []) 250 | ,- ((ov [ bi ] ,- sv ze ,- [])) 251 | ,- [] 252 | rulesS sub = (bi ,- is (sv ze) ,- []) 253 | ,- [] 254 | -------------------------------------------------------------------------------- /Crossroads/board1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/msp-strath/ZEUG/535abea1a6a61bf7bb0d9aeacdae9b60a1489710/Crossroads/board1.jpg -------------------------------------------------------------------------------- /Crossroads/board2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/msp-strath/ZEUG/535abea1a6a61bf7bb0d9aeacdae9b60a1489710/Crossroads/board2.jpg -------------------------------------------------------------------------------- /Crossroads/board3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/msp-strath/ZEUG/535abea1a6a61bf7bb0d9aeacdae9b60a1489710/Crossroads/board3.jpg -------------------------------------------------------------------------------- /Crossroads/board4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/msp-strath/ZEUG/535abea1a6a61bf7bb0d9aeacdae9b60a1489710/Crossroads/board4.jpg -------------------------------------------------------------------------------- /Crossroads/pig.sty: -------------------------------------------------------------------------------- 1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 2 | %%%%%%%%%% %%%%%%%%%% 3 | %%%%%%%%%% Epigram LaTeX Style %%%%%%%%%% 4 | %%%%%%%%%% %%%%%%%%%% 5 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 6 | 7 | %%% This file is intended to replace the old macros.ltx. 8 | 9 | 10 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 11 | %%% Colours %%% 12 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 13 | 14 | \usepackage{color} 15 | \newcommand{\redFG}[1]{\textcolor[rgb]{0.6,0,0}{#1}} 16 | \newcommand{\greenFG}[1]{\textcolor[rgb]{0,0.4,0}{#1}} 17 | \newcommand{\blueFG}[1]{\textcolor[rgb]{0,0,0.8}{#1}} 18 | \newcommand{\orangeFG}[1]{\textcolor[rgb]{0.8,0.4,0}{#1}} 19 | \newcommand{\purpleFG}[1]{\textcolor[rgb]{0.4,0,0.4}{#1}} 20 | \newcommand{\yellowFG}[1]{\textcolor{yellow}{#1}} 21 | \newcommand{\brownFG}[1]{\textcolor[rgb]{0.5,0.2,0.2}{#1}} 22 | \newcommand{\blackFG}[1]{\textcolor[rgb]{0,0,0}{#1}} 23 | \newcommand{\whiteFG}[1]{\textcolor[rgb]{1,1,1}{#1}} 24 | \newcommand{\yellowBG}[1]{\colorbox[rgb]{1,1,0.2}{#1}} 25 | \newcommand{\brownBG}[1]{\colorbox[rgb]{1.0,0.7,0.4}{#1}} 26 | 27 | \newcommand{\ColourEpigram}{ 28 | \newcommand{\red}{\redFG} 29 | \newcommand{\green}{\greenFG} 30 | \newcommand{\blue}{\blueFG} 31 | \newcommand{\orange}{\orangeFG} 32 | \newcommand{\purple}{\purpleFG} 33 | \newcommand{\yellow}{\yellowFG} 34 | \newcommand{\brown}{\brownFG} 35 | \newcommand{\black}{\blackFG} 36 | \newcommand{\white}{\whiteFG} 37 | } 38 | 39 | \newcommand{\MonochromeEpigram}{ 40 | \newcommand{\red}{\blackFG} 41 | \newcommand{\green}{\blackFG} 42 | \newcommand{\blue}{\blackFG} 43 | \newcommand{\orange}{\blackFG} 44 | \newcommand{\purple}{\blackFG} 45 | \newcommand{\yellow}{\blackFG} 46 | \newcommand{\brown}{\blackFG} 47 | \newcommand{\black}{\blackFG} 48 | \newcommand{\white}{\blackFG} 49 | } 50 | 51 | 52 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 53 | %%% Inference Rules (some ancient macros by Conor) %%% 54 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 55 | 56 | \newlength{\rulevgap} 57 | \setlength{\rulevgap}{0.05in} 58 | \newlength{\ruleheight} 59 | \newlength{\ruledepth} 60 | \newsavebox{\rulebox} 61 | \newlength{\GapLength} 62 | \newcommand{\gap}[1]{\settowidth{\GapLength}{#1} \hspace*{\GapLength}} 63 | \newcommand{\dotstep}[2]{\begin{tabular}[b]{@{}c@{}} 64 | #1\\$\vdots$\\#2 65 | \end{tabular}} 66 | \newlength{\fracwid} 67 | \newcommand{\dotfrac}[2]{\settowidth{\fracwid}{$\frac{#1}{#2}$} 68 | \addtolength{\fracwid}{0.1in} 69 | \begin{tabular}[b]{@{}c@{}} 70 | $#1$\\ 71 | \parbox[c][0.02in][t]{\fracwid}{\dotfill} \\ 72 | $#2$\\ 73 | \end{tabular}} 74 | \newcommand{\Rule}[2]{\savebox{\rulebox}[\width][b] % 75 | {\( \frac{\raisebox{0in} {\( #1 \)}} % 76 | {\raisebox{-0.03in}{\( #2 \)}} \)} % 77 | \settoheight{\ruleheight}{\usebox{\rulebox}} % 78 | \addtolength{\ruleheight}{\rulevgap} % 79 | \settodepth{\ruledepth}{\usebox{\rulebox}} % 80 | \addtolength{\ruledepth}{\rulevgap} % 81 | \raisebox{0in}[\ruleheight][\ruledepth] % 82 | {\usebox{\rulebox}}} 83 | \newcommand{\Case}[2]{\savebox{\rulebox}[\width][b] % 84 | {\( \dotfrac{\raisebox{0in} {\( #1 \)}} % 85 | {\raisebox{-0.03in}{\( #2 \)}} \)} % 86 | \settoheight{\ruleheight}{\usebox{\rulebox}} % 87 | \addtolength{\ruleheight}{\rulevgap} % 88 | \settodepth{\ruledepth}{\usebox{\rulebox}} % 89 | \addtolength{\ruledepth}{\rulevgap} % 90 | \raisebox{0in}[\ruleheight][\ruledepth] % 91 | {\usebox{\rulebox}}} 92 | \newcommand{\Axiom}[1]{\savebox{\rulebox}[\width][b] % 93 | {$\frac{}{\raisebox{-0.03in}{$#1$}}$} % 94 | \settoheight{\ruleheight}{\usebox{\rulebox}} % 95 | \addtolength{\ruleheight}{\rulevgap} % 96 | \settodepth{\ruledepth}{\usebox{\rulebox}} % 97 | \addtolength{\ruledepth}{\rulevgap} % 98 | \raisebox{0in}[\ruleheight][\ruledepth] % 99 | {\usebox{\rulebox}}} 100 | \newcommand{\RuleSide}[3]{\gap{\mbox{$#2$}} \hspace*{0.1in} % 101 | \Rule{#1}{#3} % 102 | \hspace*{0.1in}\mbox{$#2$}} 103 | \newcommand{\AxiomSide}[2]{\gap{\mbox{$#1$}} \hspace*{0.1in} % 104 | \Axiom{#2} % 105 | \hspace*{0.1in}\mbox{$#1$}} 106 | \newcommand{\RULE}[1]{\textbf{#1}} 107 | \newcommand{\hg}{\hspace{0.2in}} 108 | 109 | 110 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 111 | % %%% Emphasis %%% 112 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 113 | % 114 | % %%% It's good to separate definitional emphasis... 115 | % 116 | % \newcommand{\demph}{\textbf} 117 | % 118 | % %%% ...from rhetorical emphasis. 119 | % 120 | % \newcommand{\remph}{\textit} 121 | % 122 | % 123 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 124 | % %%% Identifier Fonts %%% 125 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 126 | % 127 | % \newcommand{\CN}{\textsf} 128 | % 129 | % 130 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 131 | % %%% Expressions %%% 132 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 133 | % 134 | % \usepackage{upgreek} 135 | % 136 | % \newcommand{\LAMBINDER}{\red{\uplambda}} 137 | % \newcommand{\Bhab}[2]{#1\!:\!#2} 138 | % \newcommand{\To}{\mathop{\blue{\rightarrow}}} 139 | % \newcommand{\mto}{\mathop{\red{\mapsto}}} 140 | % 141 | % \newcommand{\PI}[2]{\blue{(}\Bhab{#1}{#2}\blue{)}\To} 142 | % \newcommand{\PIS}[1]{\blue{(}#1\blue{)}\To} 143 | % \newcommand{\LLAM}[2]{\LAMBINDER\Bhab{#1}{#2}\mto} 144 | % \newcommand{\LAM}[1]{\LAMBINDER #1 \mto} 145 | % 146 | % \newcommand{\TYPE}{\blue{\star}} 147 | % \newcommand{\EQ}{\mathrel{\D{=}}} 148 | % 149 | % \newcommand{\FROG}{\mbox{\(\backslash\!\!\!\mid\!\!\!\slash\)}} 150 | % 151 | % \newcommand{\ang}[1]{\red{\left<\black{#1}\right>}} 152 | % \newcommand{\sqr}[1]{\red{\left[\black{#1}\right]}} 153 | % \newcommand{\rbar}{\red{|}} 154 | % 155 | % \DeclareMathAlphabet{\mathkw}{OT1}{cmss}{bx}{n} 156 | % \newcommand{\K}[1]{\mathkw{#1}} 157 | % \newcommand{\M}[1]{\mathit{#1}} 158 | % \newcommand{\V}[1]{\purple{\mathit{#1}}} 159 | % \newcommand{\p}{\purple{'}} 160 | % \newcommand{\D}[1]{\blue{\CN{#1}}} 161 | % \newcommand{\tag}[1]{\red{\CN{#1}}} 162 | % \newcommand{\C}[2]{\ang{\tag{#1}#2}} 163 | % \newcommand{\F}[1]{\green{\CN{#1}}} 164 | % \newcommand{\U}[1]{\orange{\CN{#1}}} 165 | % 166 | % \newcommand{\Enum}{\D{Enum}} 167 | % \newcommand{\enum}[1]{\D{enum}\:#1} 168 | % 169 | % \newcommand{\Sig}{\D{Sig}} 170 | % \newcommand{\dom}[1]{\F{dom}\:#1} 171 | % \newcommand{\sig}[1]{\D{sig}\:#1} 172 | % \newcommand{\Refl}{\overline} 173 | % 174 | % \newcommand{\Rhab}{:} 175 | % 176 | % \newcommand{\Ret}[1]{\:\FATR\:#1} 177 | % \newcommand{\By}[1]{\:\FATL\:#1} 178 | % \newcommand{\Refute}[1]{\:\FROG\:#1} 179 | % \newcommand{\With}[1]{\:\&\:#1} 180 | % 181 | % \newcommand{\refl}[1]{\purple{\overline{\black{#1}}}} 182 | % \newcommand{\coe}[4]{#4\:\green{\left[\black{#3:#1\EQ#2}\right>}} 183 | % \newcommand{\coh}[4]% 184 | % {#4\:\green{\left[\!\left[\black{#3:#1\EQ#2}\right|\!\right>}} 185 | % 186 | % \newcommand{\qcoe}[3]{#1\:\green{\F{coe}(\black{#2\EQ #3})}} 187 | % \newcommand{\qcoh}[3]{#1\:\green{\F{coh}(\black{#2\EQ #3})}} 188 | % \newcommand{\qcong}[3]{#1\:\green{\F{cong}(\black{#2:#3})}} 189 | % 190 | % \newcommand{\naughtE}[1]{#1\:\green{\FROG}} 191 | % \newcommand{\caseE}[1]{#1\:\F{case}} 192 | % \newcommand{\ExpandE}[1]{#1\:\F{Expand}} 193 | % \newcommand{\DecodeE}[1]{#1\:\F{Decode}} 194 | % \newcommand{\KitE}[2]{#1\:\F{Kit(}#2\F{)}} 195 | % \newcommand{\hd}{\green{\bullet}} 196 | % \newcommand{\tl}{\green{-}} 197 | % \newcommand{\rZ}{\red{\mathsf{0}}} 198 | % \newcommand{\rS}{\red{\mathsf{S}}} 199 | % 200 | % 201 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 202 | % %%% Grammars %%% 203 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 204 | % 205 | % \newcommand{\SC}{\textsc} %% syntactic category 206 | % \newcommand{\Grammar}[2]{ 207 | % \begin{array}[t]{rrll} 208 | % #1 & ::= & #2 209 | % \end{array} 210 | % } 211 | % \newcommand{\Gor}{\\ & | &} %% separator making new row of array 212 | % \newcommand{\GNew}[1]{\medskip \\ #1 & ::= &} 213 | % \newcommand{\Gbr}[2]{\!\left\lgroup #2 \right\rgroup^{\!\!#1}} 214 | % 215 | % \newcommand{\wbox}{\square} 216 | % \newcommand{\bbox}{\blacksquare} 217 | % 218 | % 219 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 220 | % %%% Judgments %%% 221 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 222 | % 223 | % \newcommand{\EC}{\mathcal{E}} 224 | % \newcommand{\Tn}{\vdash} 225 | % \newcommand{\Eq}{\equiv} 226 | % \newcommand{\x}{\V{x}} 227 | % \newcommand{\xS}{\Bhab{\x}{S}} 228 | % 229 | % 230 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 231 | % %%% Computation Relations %%% 232 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 233 | % 234 | % \usepackage{amssymb} 235 | % \newcommand{\step}[1]{\leadsto_{#1}} 236 | % 237 | % 238 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 239 | % %%% Meta-Computation Relations %%% 240 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 241 | % 242 | % \newcommand{\mq}{\mathrel{\Longrightarrow}} 243 | % 244 | % 245 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 246 | % %%% Nonexamples %%% 247 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 248 | % 249 | % %%% Non-examples in display math must be explicitly marked as such, in order 250 | % %%% to avoid dangerous wrong learnings. 251 | % 252 | % \newcommand{\BAD}{\mbox{\(\red{(\times)}\)}} 253 | % \newcommand{\NONEXAMPLE}[1]{ 254 | % \[ 255 | % \BAD\qquad 256 | % #1 257 | % \]% 258 | % } 259 | % 260 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 261 | % %%% Source %%% 262 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 263 | % 264 | % \newenvironment{source}{\[\begin{array}[t]{ll}}{\end{array}\]} 265 | % 266 | % \newcommand{\letH}[2]{\K{let} & #1 \quad #2} 267 | % \newcommand{\letV}[2]{\K{let} & #1 \\ #2} 268 | % 269 | % \newcommand{\sgap}{\medskip \\} 270 | % \newcommand{\X}{l@{\:}} 271 | % \newcommand{\I}{@{\!\_}l@{}} 272 | % \newcommand{\prog}[2]{\begin{array}[t]{@{}#1}#2\end{array}} 273 | % \newcommand{\dent}[2]{\multicolumn{#1}{@{\;\;}l}{#2}} 274 | % \newcommand{\UPI}[1]{\PIBINDER{#1}\FATR} 275 | % \newcommand{\ULAM}[1]{\LAMBINDER{#1}\FATR} 276 | % 277 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 278 | % %%% Vertical Arrangement %%% 279 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 280 | % 281 | % \newcommand{\STL}[1]{\begin{array}[t]{@{}l@{}}#1\end{array}} 282 | % \newcommand{\STC}[1]{\begin{array}[t]{@{}c@{}}#1\end{array}} 283 | % \newcommand{\CASES}[1]{\begin{array}[t]{@{}l@{\;\mapsto\;}l@{}}#1\end{array}} 284 | % 285 | % 286 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 287 | % %%% Ecce %%% 288 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 289 | % 290 | % \newcommand{\ecce}[1]{\begin{array}{@{}l@{\;}l}#1\end{array}} 291 | % 292 | % \newcommand{\ecDEFN}[4]{ 293 | % \F{#1}&[\\ 294 | % \multicolumn{2}{@{}l}{\quad\ecce{#2}}\\ 295 | % \;]&\FATR\;#3\;:\:#4 \\ 296 | % } 297 | % \newcommand{\ecDefn}[4]{ 298 | % \F{#1} & [#2]\;\FATR\;#3\;:\:#4 \\ 299 | % } 300 | % \newcommand{\ecdefn}[3]{ 301 | % \F{#1} & \FATR\;#2\;:\:#3 \\ 302 | % } 303 | % \newcommand{\ecHOLE}[3]{ 304 | % \U{#1} & [\\ 305 | % \multicolumn{2}{@{}l}{\quad\ecce{#2}}\\ 306 | % \;]&\FATR\;?\;:\:#3\\ 307 | % } 308 | % \newcommand{\ecHole}[3]{ 309 | % \U{#1}&[#2]\;\FATR\;?\;:\:#3 \\ 310 | % } 311 | % \newcommand{\echole}[2]{ 312 | % \U{#1} & \FATR\;?\;:\:#2 \\ 313 | % } 314 | % \newcommand{\ecpara}[2]{ 315 | % \V{#1} & :\;#2 \\ 316 | % } 317 | % 318 | -------------------------------------------------------------------------------- /DeBruijnify.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE GADTs, DataKinds, KindSignatures, StandaloneDeriving #-} 2 | module DeBrujnify where 3 | 4 | import Utils 5 | import Data.List 6 | import Data.Maybe 7 | 8 | data Raw = RLam String Raw 9 | | RApp Raw Raw 10 | | RVar String Nat 11 | deriving Show 12 | 13 | data Tm (n :: Nat) :: * where 14 | Lam :: Tm (Suc n) -> Tm n 15 | App :: Tm n -> Tm n -> Tm n 16 | V :: Fin n -> Tm n 17 | P :: String -> Tm n 18 | 19 | deriving instance Show (Tm n) 20 | 21 | type SC = Maybe 22 | 23 | deBruijnify :: Vec String n -> Raw -> SC (Tm n) 24 | deBruijnify g (RLam x t) = Lam <$> deBruijnify (VCons x g) t 25 | deBruijnify g (RApp t u) = App <$> deBruijnify g t <*> deBruijnify g u 26 | deBruijnify g (RVar x n) = V <$> velemIndex' x n g 27 | 28 | -------------------------------------------------------------------------------- /Extrude/Pointy.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/msp-strath/ZEUG/535abea1a6a61bf7bb0d9aeacdae9b60a1489710/Extrude/Pointy.pdf -------------------------------------------------------------------------------- /Extrude/Pointy.tex: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | %%\usepackage{fourier} %% \widearc 3 | \usepackage{wasysym} 4 | \usepackage{amssymb} 5 | %\usepackage{amsmath} 6 | %\usepackage{mathabx} %% \overgroup \wideparen 7 | \usepackage{upgreek} 8 | \usepackage{pig} 9 | 10 | \ColourEpigram 11 | 12 | \begin{document} 13 | \title{Subtle Points} 14 | \author{James, Fred and Conor, probably} 15 | \maketitle 16 | 17 | \newcommand{\hb}{\!:\!} 18 | \newcommand{\sbs}[1]{[ #1 ]} 19 | \newcommand{\PI}[3]{\blue{(}#1\hb#2\blue{)} \mathrel{\blue{\to}} #3} 20 | \newcommand{\LA}[2]{\red{\uplambda}\,#1 \mathrel{\red{\to}} #2} 21 | \newcommand{\SG}[3]{\blue{(}#1\hb#2\blue{)} \mathrel{\blue{\times}} #3} 22 | \newcommand{\pr}{\red{,}\,} 23 | \newcommand{\car}{\:\green{\textsf{car}}} 24 | \newcommand{\cdr}{\:\green{\textsf{cdr}}} 25 | \newcommand{\NE}{\underline} 26 | \newcommand{\ATOM}[1]{\black{\mathsf{#1}}} 27 | \newcommand{\Atom}[1]{\blue{\mathsf{#1}}} 28 | \newcommand{\atom}[1]{\red{\mathsf{#1}}} 29 | \newcommand{\KIND}{\ATOM{KIND}} 30 | \newcommand{\TYPE}{\ATOM{TYPE}} 31 | \newcommand{\EL}[1]{\ATOM{EL}\:#1} 32 | 33 | We have a system of \emph{kinds}, kind checking and kind synthesis. Contexts assign kinds to free variables. We write $\dashv x:K$ to assert such an assignment. We write context lookups and context extensions in rules, but never the contexts themselves. 34 | \[ 35 | \Rule{e\in J\quad J\le K} 36 | {K\ni \NE e} 37 | \qquad 38 | \Rule{\dashv x:K} 39 | {x\in K} 40 | \qquad 41 | \Rule{\KIND\ni K\quad K\ni t} 42 | {t\hb K \in K} 43 | \qquad 44 | \NE{t\hb K}\leadsto t 45 | \] 46 | 47 | Subkinding is certainly reflexive. 48 | \[ 49 | \Axiom{K\le K} 50 | \] 51 | We shall ensure that transitivity is admissible, and that so is subsumption. 52 | \[ 53 | \textbf{Admissibly,}\qquad 54 | \Rule{I\le J \quad J\le K} 55 | {I\le K} 56 | \qquad 57 | \Rule{J\ni t\quad J\le K} 58 | {K\ni t} 59 | \] 60 | We also allow kinds to compute before checking and after synthesis. 61 | \[ 62 | \Rule{J\leadsto K\quad K\ni t} 63 | {J\ni t} 64 | \qquad 65 | \Rule{e\in J\quad J\leadsto K} 66 | {e\in K} 67 | \] 68 | 69 | 70 | \section{type theory as we know it} 71 | 72 | Types are a kind. 73 | Each type gives rise to the kind of its elements. 74 | \[ 75 | \Axiom{\KIND\ni\TYPE} 76 | \qquad 77 | \Rule{\TYPE\ni T} 78 | {\KIND\ni \EL T} 79 | \] 80 | 81 | 82 | Function types work like this. 83 | \[\begin{array}{c} 84 | \Rule{x:\EL S \vdash \TYPE\ni T} 85 | {\TYPE\ni \PI xST} 86 | \qquad 87 | \Rule{x:\EL S \vdash \EL T\ni t} 88 | {\EL{\PI xST} \ni \LA xt} 89 | \\ 90 | \Rule{f\in \EL{\PI xS{T\sbs x}}\quad \EL S \ni s} 91 | {f\:s\in \EL {T\sbs{s\hb S}}} 92 | \\ 93 | (\LA x{t\sbs x} : \EL{\PI xS{T\sbs x}})\:s\leadsto 94 | t(s\hb \EL S):\EL{T(s\hb \EL S)} 95 | \\ 96 | \Rule{\EL{S'}\le \EL S\quad x:\EL{S'}\vdash \EL T\le \EL{T'}} 97 | {\EL{\PI xST}\le\EL{\PI x{S'}{T'}}} 98 | \end{array}\] 99 | 100 | Pair types work like this. 101 | \[\begin{array}{c} 102 | \Rule{x:\EL\:S \vdash \TYPE\ni T} 103 | {\TYPE\ni \SG xST} 104 | \qquad 105 | \Rule{\EL\:S\ni s\qquad \EL\:T\sbs{s\hb\EL\:S}\ni t} 106 | {\EL\:\SG xS{T\sbs x} \ni s\pr t} 107 | \\ 108 | \Rule{p\in \EL\:\SG xST} 109 | {p\:\car\in \EL S} 110 | \qquad 111 | \Rule{p\in \EL\:\SG xS{T\sbs x}} 112 | {p\:\cdr\in \EL\:T\sbs{s\hb S}} 113 | \\ 114 | (s\pr t : \EL\:\SG xST)\:\car\leadsto 115 | s\hb \EL\:S \\ 116 | (s\pr t : \EL\:\SG xS{T\sbs x})\:\cdr\leadsto 117 | t\hb \EL\:T\sbs{p\:\car} 118 | \\ 119 | \Rule{\EL{S}\le \EL{S'}\quad x:\EL S\vdash \EL T\le \EL{T'}} 120 | {\EL{\SG xST}\le\EL{\SG x{S'}{T'}}} 121 | \end{array}\] 122 | 123 | There will probably be more stuff. 124 | 125 | 126 | \section{segmentations and points} 127 | 128 | \newcommand{\SE}{\ATOM{SEG}} 129 | \newcommand{\PO}[1]{\ATOM{POINT}\:#1} 130 | \newcommand{\da}{\Atom{-}} 131 | \newcommand{\se}[1]{\Atom{\{}#1\Atom{\}}} 132 | \newcommand{\PL}{\atom{0}} 133 | \newcommand{\PR}{\atom{1}} 134 | \newcommand{\SL}{\atom{\blacktriangleleft}} 135 | \newcommand{\SR}{\atom{\blacktriangleright}} 136 | \newcommand{\sy}[1]{#1^{\green{\circ}}} 137 | 138 | Segmentations are a kind, and so are points in segmentations. 139 | \[ 140 | \Axiom{\KIND\ni\SE}\qquad 141 | \Rule{\SE\ni\sigma} 142 | {\KIND\ni\PO\sigma} 143 | \] 144 | There is a trivial segmentations. All segmentations have endpoints. 145 | \[ 146 | \Axiom{\SE\ni\da}\qquad 147 | \Axiom{\PO\sigma\ni\PL}\qquad 148 | \Axiom{\PO\sigma\ni\PR} 149 | \] 150 | But there are also nontrivial segmentations. 151 | \[ 152 | \Rule{\SE\ni\sigma\quad \TYPE\ni T\quad \SE\ni\tau} 153 | {\SE\ni\se{\sigma T \tau}} 154 | \] 155 | These have inner points. 156 | \[ 157 | \Rule{\PO\sigma\ni p} 158 | {\PO\se{\sigma T \tau}\ni\SL p} 159 | \qquad 160 | \Rule{\PO\tau\ni p} 161 | {\PO\se{\sigma T \tau}\ni\SR p} 162 | \] 163 | The equational theory of points admits 164 | \[ 165 | \SL\PL\leadsto \PL\qquad 166 | \SL\PR\leadsto \SR\PL \qquad 167 | \SR\PR\leadsto \PR 168 | \] 169 | Each segmentation has its opposite. There are no neutral segmentations, 170 | so this involutive operation computes fully. 171 | \[ 172 | \sy\da = \da\qquad 173 | \sy{(\se{\sigma T \tau})} = \se{\sy\tau T \sy\sigma} 174 | \] 175 | However, there will be variables which stand for points, so opposite is 176 | an eliminator for points. 177 | \[ 178 | \Rule{e\in\PO\sigma} 179 | {\sy e\in\PO{\sy\sigma}} 180 | \] 181 | It computes as follows. 182 | \[\begin{array}{c} 183 | \sy{(\PL\hb\PO\sigma)}\leadsto \PR\hb\PO{\sy\sigma} \\ 184 | \sy{(\SL p\hb\PO{\se{\sigma T \tau}})}\leadsto \SR(p\hb\PO\sigma)\hb\PO{\se{\sy\tau T \sy\sigma}} \\ 185 | \sy{(\SR p\hb\PO{\se{\sigma T \tau}})}\leadsto \SL(p\hb\PO\tau)\hb\PO{\se{\sy\tau T \sy\sigma}} \\ 186 | \sy{(\PR\hb\PO\sigma)}\leadsto \PL\hb\PO{\sy\sigma} \\ 187 | \sy{(\sy{i})} \leadsto i 188 | \end{array}\] 189 | Note that a neutral point is either a point variable or its opposite. 190 | 191 | For any given segmentation $\sigma$, we can compute the finite set 192 | of canonical points it contains. 193 | \[\begin{array}{l} 194 | \textrm{Point}(\sigma) = \{\PL\}\cup\textrm{Inner}(\sigma)\cup\{\PR\}\\ 195 | \textrm{Inner}(\da) = \{\} \\ 196 | \textrm{Inner}(\sigma T\tau) = \{\PL p|p\in\textrm{Inner}(\sigma)\}\cup\{\SR\PL\}\cup 197 | \{\PR p|p\in\textrm{Inner}(\tau)\} 198 | \end{array}\] 199 | 200 | Segmentations are ordered by \emph{subtlety}, which induces 201 | inclusion of canonical point sets, and thence subkinding. 202 | \[ 203 | \Axiom{\da\le\sigma} 204 | \qquad 205 | \Rule{\sigma\le\sigma'\quad 206 | \TYPE\ni T \equiv T' \quad 207 | \tau\le\tau'} 208 | {\se{\sigma T\tau}\le\se{\sigma' T'\tau'}} 209 | \qquad 210 | \Rule{\sigma\le\tau} 211 | {\PO\sigma\le\PO\tau} 212 | \] 213 | 214 | 215 | \section{type paths} 216 | 217 | A type path type is given as a segmentation between two types. 218 | \[ 219 | \Rule{\TYPE\ni S\quad \SE\ni\sigma\quad\TYPE\ni T} 220 | {\TYPE\ni S\sigma T} 221 | \] 222 | A type path type with segmentation $\sigma$ tells us the types at all the 223 | \emph{canonical} points in $\textrm{Point}(\sigma)$. 224 | \[\begin{array}{c} 225 | (S\sigma U).\PL = S\qquad 226 | (S\se{\sigma T\tau}U).\SL p = (S\sigma T).p\\ 227 | (S\se{\sigma T\tau}U).\SR p = (T\tau U).p\qquad 228 | (S\sigma U).\PR = U 229 | \end{array}\] 230 | A type path is an abstraction over the points in the segmentation which 231 | connects with the segmentation at all points: the universal quantifier 232 | eliminates as a finite conjunction of premises. 233 | \[ 234 | \Rule{i:\PO\sigma \vdash \TYPE \ni T\sbs i \quad 235 | \forall p\in \textrm{Point}(\sigma).\:\TYPE \ni (S\sigma U).p\equiv T\sbs{p\hb\PO\sigma} } 236 | {\EL{S\sigma U}\ni\LA i T} 237 | \] 238 | 239 | Subtlety induces subtyping contravariantly, meaning that you can forget the 240 | existence of (hence connection with) intermediate points. 241 | \[ 242 | \Rule{\sigma\le\tau} 243 | {\EL{S\tau T}\le\EL{S\sigma T}} 244 | \] 245 | The elimination behaviour is by substitution. 246 | \[ 247 | (\LA i T\sbs i: \EL{S\sigma U})\:p \leadsto T\sbs{p\hb\PO\sigma}:\TYPE 248 | \] 249 | So, given that type reconstruction is easy for neutrals, we may have 250 | \[ 251 | Q\in\EL{S\sigma U}\wedge (S\sigma U).p = T \quad\Rightarrow\quad Q\:p\leadsto T\hb\TYPE 252 | \] 253 | 254 | If we know that a path has nontrivial segmentation, we can grab its segments. 255 | Let's write $Q\SL$ for the path $\LA i{Q\:(\SL i)}$ which is just the left segment 256 | of $Q$, and similarly $Q\SR$ for the right segment. 257 | 258 | \section{kinky paths} 259 | 260 | Next, we give you form of conditional expression to compute types from points in a 261 | piecewise continuous way. 262 | \newcommand{\kink}{\green{\mathsf{kink}}} 263 | \[ 264 | \Rule{\begin{array}{c} 265 | p\in\PO\pi\quad 266 | \TYPE\ni S\quad \SE\ni\sigma \quad \TYPE\ni T \quad \SE\ni\tau \quad \TYPE\ni U \\ 267 | \pi\le\se{\sigma T\tau} \qquad \qquad \qquad \qquad 268 | \EL{S\sigma T}\ni P \qquad \qquad 269 | \EL{T\tau U}\ni Q \qquad \qquad 270 | \end{array}} 271 | {p\:\kink(S\sigma T\tau U|P\:Q)\in\TYPE} 272 | \] 273 | The conditional computes when we learn which half the point is in. Of course, the 274 | two halves meet in the middle. 275 | \[\begin{array}{c} 276 | (\PL\hb\PO\pi)\:\kink(S\_\_\_\_|P\:Q) \leadsto S\hb\TYPE \\ 277 | (\SL p\hb\PO{\sigma T \tau}) \:\kink(S\_\_\_U|P\:Q) 278 | \leadsto (P\hb\EL{S\sigma T})\:p \\ 279 | (\SR p\hb\PO{\sigma T \tau}) \:\kink(S\_\_\_U|P\:Q) 280 | \leadsto (Q\hb\EL{T\tau U})\:p \\ 281 | (\PR\hb\PO\pi)\:\kink(\_\_\_\_U|P\:Q) \leadsto U\hb\TYPE \\ 282 | \end{array}\] 283 | 284 | Now, the subtlety is that the kinky construction can be more subtle 285 | than the point being projected from it: that just means we actually know more types 286 | at more points than we can be asked about. We can allow points that we're definitely 287 | not going to get asked for to wobble about a bit. 288 | \[\begin{array}{l} 289 | p\in\PO\da \wedge 290 | i:\PO\sigma \vdash \TYPE\ni S \equiv (P\hb\EL{S\sigma T})\:i\\ 291 | \quad\Rightarrow\quad 292 | p\:\kink(S\sigma T\tau U|P\:Q) \leadsto (Q\hb\EL{T\tau U})\:p 293 | \end{array}\] 294 | If $p$ can only refer to an \emph{endpoint} and $P$ is 295 | \emph{constant} (and equal to both $S$ and $T$) we can 296 | effective yank the midpoint all the way left, taking $Q$ as the whole path. 297 | Similarly, 298 | \[\begin{array}{l} 299 | p\in\PO\da \wedge 300 | i:\PO\sigma \vdash \TYPE\ni (Q\hb\EL{T\tau U})\:i \equiv U \\ 301 | \quad\Rightarrow\quad 302 | p\:\kink(S\sigma T\tau U|P\:Q) \leadsto (P\hb\EL{S\sigma T})\:p 303 | \end{array}\] 304 | Moreover, if we're sure we're only going to project an endpoint, we can 305 | standardise the segmentation structure by shuffling kinks to the right. 306 | \[\begin{array}{l} 307 | p\in\PO\da \wedge\\ 308 | i:\PO\sigma \vdash \TYPE\ni (P\hb\EL{S\sigma T})\:i \equiv 309 | i\:\kink(S \sigma_0 S' \sigma_1 T|P_0\:P_1)\\ 310 | \quad\Rightarrow\quad 311 | p\:\kink(S \sigma T \tau U|P\:Q) \\ 312 | \qquad \leadsto \quad 313 | p\:\kink(S \sigma_0 S'\se{\sigma_1 T\tau}U|P_0\:\LA i{i\:\kink(S' \sigma_1 T\tau U|P_1\:Q)}) 314 | \end{array}\] 315 | 316 | So we have acquired categorical structure for trivially segmented paths $S\da T$. 317 | If we have $\EL{S\da T}\ni P$ and $\EL{T\da U}\ni Q$, we get their composite 318 | \[ 319 | \EL{S\da U}\ge 320 | \EL{S\se{\da T\da}U}\ni 321 | \LA i{i\:\kink(S\da T\da U|P\:Q)} 322 | \] 323 | where this composition absorbs identities (constant paths) and is associative. 324 | We write this composition as $\circ$. 325 | 326 | \section{transporting values along type paths} 327 | 328 | \newcommand{\xport}[4]{#4\green{(}#1\green{|}#2\green{\to}#3\green{)}} 329 | 330 | Let us now figure out how to transport values between points on a type path. 331 | \[ 332 | \Rule{ 333 | \EL{S\sigma T}\ni Q\quad 334 | \PO\sigma\ni p\quad 335 | \PO\sigma\ni r\quad 336 | \EL{Q\:p}\ni u} 337 | {\xport Qpru\in \EL{Q\:r}} 338 | \] 339 | How should this work? For a start, the fact that we can decide equality of open 340 | things in point kinds means we can run on the spot. 341 | \[ 342 | \xport Qiiu \leadsto u\hb \EL{Q\:i} 343 | \] 344 | 345 | We can also zoom. Here, matching against $\SL$ and $\SR$ should be liberal 346 | in the sense that $\SL i$ matches $\PL$ with $i=\PL$ and $\SR\PL$ with $i=\PR$, 347 | and similarly on the right. 348 | \[\begin{array}{c} 349 | \xport Q{\SL i}{\SL j}u \leadsto 350 | \xport{Q\SL}iju 351 | \\ 352 | \xport Q{\SR i}{\SR j}u \leadsto 353 | \xport{Q\SR}iju 354 | \end{array}\] 355 | If we get a thing on each side, we go via the middle. 356 | \[\begin{array}{c} 357 | \xport Q{\SL i}{\SR j}u \leadsto 358 | \xport{Q\SR}\PL j{\xport{Q\SL}i\PR u} 359 | \\ 360 | \xport Q{\SR i}{\SL j}u \leadsto 361 | \xport{Q\SL}\PR j{\xport{Q\SR}i\PL u} 362 | \end{array}\] 363 | 364 | If $i$ and $j$ are distinct but neutral, it's ok to be stuck. 365 | Real work must happen when $i$ and $j$ are a permutation of $\PL$ and $\PR$, 366 | and $\sigma\equiv\da$. That's as far as we can get by looking at the points. 367 | 368 | We can also look at the type $Q\:k$ we get at some arbitrary point 369 | $k\hb\PO\sigma$, which amounts to $\eta$-expanding, then peeking under the $\LA k{}$. 370 | Kinky paths transport in stages, e.g.: 371 | \[ 372 | \xport{\LA k{k\:\kink(S\sigma T\tau U|P\:Q)}}\PL\PR s\leadsto 373 | \xport Q\PL\PR{\xport P\PL\PR s} 374 | \] 375 | (What if the head isn't $k$? It's ok to be stuck, I think, because $s$ can't be canonical.) 376 | Now, the fact that $\kink$ squishes constant paths means that transportation must also 377 | do so. 378 | \[ 379 | \xport{\LA \_ T}iju \leadsto u 380 | \] 381 | In particular, we have now established functoriality of transportation. 382 | 383 | When we have handy $\eta$-laws and structural paths, we can be quite aggressive and 384 | still resolve critical pairs. 385 | \[\begin{array}{l} 386 | \xport{\LA k{\SG x{S\sbs k}{T\sbs{k,x}}}}iju \leadsto\\ 387 | \quad 388 | \mbox{let}\;s\sbs k = \xport{\LA k{S\sbs k}}ik{(u\car)}; 389 | \;t\sbs k = \xport{\LA k{T\sbs{k,s\sbs k}}}ik{(u\cdr)}\\ 390 | \quad 391 | \mbox{in}\;s\sbs j,t\sbs j 392 | \end{array}\] 393 | The coherence comes from the fact that we can extrude a value across the whole 394 | type path from any point on it. Functions are just as easy. 395 | \[\begin{array}{l} 396 | \xport{\LA k{\PI x{S\sbs k}{T\sbs{k,x}}}}iju \leadsto \LA x{}\\ 397 | \quad 398 | \mbox{let}\;s\sbs k = \xport{\LA k{S\sbs k}}jkx; 399 | \;t\sbs k = \xport{\LA k{T\sbs{k,s\sbs k}}}ik{(u\:s\sbs i)}\\ 400 | \quad 401 | \mbox{in}\;t\sbs j 402 | \end{array}\] 403 | 404 | How about type paths? Composition! 405 | \[ 406 | \xport{\LA k{S\sbs k\da T\sbs k}}\PL\PR Q \leadsto 407 | (\LA k{S\sbs{\sy k}})\circ Q \circ (\LA k {T\sbs k}) 408 | \] 409 | And if we have nontrivial segmentation, that's ok, too. We can transport the 410 | segments separately, then kink them back together. 411 | \[\begin{array}{l} 412 | \xport{\LA k{S\sbs k\se{\sigma\sbs k T\sbs k\tau\sbs k}U\sbs k}}\PL\PR Q \leadsto 413 | \LA i{}\\ 414 | \quad 415 | i\:\kink(S\sbs\PR \sigma\sbs\PR T\sbs\PR \tau\sbs\PR U\sbs\PR|\\ 416 | \qquad (\xport{\LA k{S\sbs k \sigma\sbs k T\sbs k}}\PL\PR{Q\SL}) \\ 417 | \qquad (\xport{\LA k{T\sbs k \tau\sbs k U\sbs k}}\PL\PR{Q\SR})) 418 | \end{array}\] 419 | 420 | 421 | \end{document} -------------------------------------------------------------------------------- /Extrude/pig.sty: -------------------------------------------------------------------------------- 1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 2 | %%%%%%%%%% %%%%%%%%%% 3 | %%%%%%%%%% Epigram LaTeX Style %%%%%%%%%% 4 | %%%%%%%%%% %%%%%%%%%% 5 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 6 | 7 | %%% This file is intended to replace the old macros.ltx. 8 | 9 | 10 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 11 | %%% Colours %%% 12 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 13 | 14 | \usepackage{color} 15 | \newcommand{\redFG}[1]{\textcolor[rgb]{0.6,0,0}{#1}} 16 | \newcommand{\greenFG}[1]{\textcolor[rgb]{0,0.4,0}{#1}} 17 | \newcommand{\blueFG}[1]{\textcolor[rgb]{0,0,0.8}{#1}} 18 | \newcommand{\orangeFG}[1]{\textcolor[rgb]{0.8,0.4,0}{#1}} 19 | \newcommand{\purpleFG}[1]{\textcolor[rgb]{0.4,0,0.4}{#1}} 20 | \newcommand{\yellowFG}[1]{\textcolor{yellow}{#1}} 21 | \newcommand{\brownFG}[1]{\textcolor[rgb]{0.5,0.2,0.2}{#1}} 22 | \newcommand{\blackFG}[1]{\textcolor[rgb]{0,0,0}{#1}} 23 | \newcommand{\whiteFG}[1]{\textcolor[rgb]{1,1,1}{#1}} 24 | \newcommand{\yellowBG}[1]{\colorbox[rgb]{1,1,0.2}{#1}} 25 | \newcommand{\brownBG}[1]{\colorbox[rgb]{1.0,0.7,0.4}{#1}} 26 | 27 | \newcommand{\ColourEpigram}{ 28 | \newcommand{\red}{\redFG} 29 | \newcommand{\green}{\greenFG} 30 | \newcommand{\blue}{\blueFG} 31 | \newcommand{\orange}{\orangeFG} 32 | \newcommand{\purple}{\purpleFG} 33 | \newcommand{\yellow}{\yellowFG} 34 | \newcommand{\brown}{\brownFG} 35 | \newcommand{\black}{\blackFG} 36 | \newcommand{\white}{\whiteFG} 37 | } 38 | 39 | \newcommand{\MonochromeEpigram}{ 40 | \newcommand{\red}{\blackFG} 41 | \newcommand{\green}{\blackFG} 42 | \newcommand{\blue}{\blackFG} 43 | \newcommand{\orange}{\blackFG} 44 | \newcommand{\purple}{\blackFG} 45 | \newcommand{\yellow}{\blackFG} 46 | \newcommand{\brown}{\blackFG} 47 | \newcommand{\black}{\blackFG} 48 | \newcommand{\white}{\blackFG} 49 | } 50 | 51 | 52 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 53 | %%% Inference Rules (some ancient macros by Conor) %%% 54 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 55 | 56 | \newlength{\rulevgap} 57 | \setlength{\rulevgap}{0.05in} 58 | \newlength{\ruleheight} 59 | \newlength{\ruledepth} 60 | \newsavebox{\rulebox} 61 | \newlength{\GapLength} 62 | \newcommand{\gap}[1]{\settowidth{\GapLength}{#1} \hspace*{\GapLength}} 63 | \newcommand{\dotstep}[2]{\begin{tabular}[b]{@{}c@{}} 64 | #1\\$\vdots$\\#2 65 | \end{tabular}} 66 | \newlength{\fracwid} 67 | \newcommand{\dotfrac}[2]{\settowidth{\fracwid}{$\frac{#1}{#2}$} 68 | \addtolength{\fracwid}{0.1in} 69 | \begin{tabular}[b]{@{}c@{}} 70 | $#1$\\ 71 | \parbox[c][0.02in][t]{\fracwid}{\dotfill} \\ 72 | $#2$\\ 73 | \end{tabular}} 74 | \newcommand{\Rule}[2]{\savebox{\rulebox}[\width][b] % 75 | {\( \frac{\raisebox{0in} {\( #1 \)}} % 76 | {\raisebox{-0.03in}{\( #2 \)}} \)} % 77 | \settoheight{\ruleheight}{\usebox{\rulebox}} % 78 | \addtolength{\ruleheight}{\rulevgap} % 79 | \settodepth{\ruledepth}{\usebox{\rulebox}} % 80 | \addtolength{\ruledepth}{\rulevgap} % 81 | \raisebox{0in}[\ruleheight][\ruledepth] % 82 | {\usebox{\rulebox}}} 83 | \newcommand{\Case}[2]{\savebox{\rulebox}[\width][b] % 84 | {\( \dotfrac{\raisebox{0in} {\( #1 \)}} % 85 | {\raisebox{-0.03in}{\( #2 \)}} \)} % 86 | \settoheight{\ruleheight}{\usebox{\rulebox}} % 87 | \addtolength{\ruleheight}{\rulevgap} % 88 | \settodepth{\ruledepth}{\usebox{\rulebox}} % 89 | \addtolength{\ruledepth}{\rulevgap} % 90 | \raisebox{0in}[\ruleheight][\ruledepth] % 91 | {\usebox{\rulebox}}} 92 | \newcommand{\Axiom}[1]{\savebox{\rulebox}[\width][b] % 93 | {$\frac{}{\raisebox{-0.03in}{$#1$}}$} % 94 | \settoheight{\ruleheight}{\usebox{\rulebox}} % 95 | \addtolength{\ruleheight}{\rulevgap} % 96 | \settodepth{\ruledepth}{\usebox{\rulebox}} % 97 | \addtolength{\ruledepth}{\rulevgap} % 98 | \raisebox{0in}[\ruleheight][\ruledepth] % 99 | {\usebox{\rulebox}}} 100 | \newcommand{\RuleSide}[3]{\gap{\mbox{$#2$}} \hspace*{0.1in} % 101 | \Rule{#1}{#3} % 102 | \hspace*{0.1in}\mbox{$#2$}} 103 | \newcommand{\AxiomSide}[2]{\gap{\mbox{$#1$}} \hspace*{0.1in} % 104 | \Axiom{#2} % 105 | \hspace*{0.1in}\mbox{$#1$}} 106 | \newcommand{\RULE}[1]{\textbf{#1}} 107 | \newcommand{\hg}{\hspace{0.2in}} 108 | 109 | 110 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 111 | % %%% Emphasis %%% 112 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 113 | % 114 | % %%% It's good to separate definitional emphasis... 115 | % 116 | % \newcommand{\demph}{\textbf} 117 | % 118 | % %%% ...from rhetorical emphasis. 119 | % 120 | % \newcommand{\remph}{\textit} 121 | % 122 | % 123 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 124 | % %%% Identifier Fonts %%% 125 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 126 | % 127 | % \newcommand{\CN}{\textsf} 128 | % 129 | % 130 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 131 | % %%% Expressions %%% 132 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 133 | % 134 | % \usepackage{upgreek} 135 | % 136 | % \newcommand{\LAMBINDER}{\red{\uplambda}} 137 | % \newcommand{\Bhab}[2]{#1\!:\!#2} 138 | % \newcommand{\To}{\mathop{\blue{\rightarrow}}} 139 | % \newcommand{\mto}{\mathop{\red{\mapsto}}} 140 | % 141 | % \newcommand{\PI}[2]{\blue{(}\Bhab{#1}{#2}\blue{)}\To} 142 | % \newcommand{\PIS}[1]{\blue{(}#1\blue{)}\To} 143 | % \newcommand{\LLAM}[2]{\LAMBINDER\Bhab{#1}{#2}\mto} 144 | % \newcommand{\LAM}[1]{\LAMBINDER #1 \mto} 145 | % 146 | % \newcommand{\TYPE}{\blue{\star}} 147 | % \newcommand{\EQ}{\mathrel{\D{=}}} 148 | % 149 | % \newcommand{\FROG}{\mbox{\(\backslash\!\!\!\mid\!\!\!\slash\)}} 150 | % 151 | % \newcommand{\ang}[1]{\red{\left<\black{#1}\right>}} 152 | % \newcommand{\sqr}[1]{\red{\left[\black{#1}\right]}} 153 | % \newcommand{\rbar}{\red{|}} 154 | % 155 | % \DeclareMathAlphabet{\mathkw}{OT1}{cmss}{bx}{n} 156 | % \newcommand{\K}[1]{\mathkw{#1}} 157 | % \newcommand{\M}[1]{\mathit{#1}} 158 | % \newcommand{\V}[1]{\purple{\mathit{#1}}} 159 | % \newcommand{\p}{\purple{'}} 160 | % \newcommand{\D}[1]{\blue{\CN{#1}}} 161 | % \newcommand{\tag}[1]{\red{\CN{#1}}} 162 | % \newcommand{\C}[2]{\ang{\tag{#1}#2}} 163 | % \newcommand{\F}[1]{\green{\CN{#1}}} 164 | % \newcommand{\U}[1]{\orange{\CN{#1}}} 165 | % 166 | % \newcommand{\Enum}{\D{Enum}} 167 | % \newcommand{\enum}[1]{\D{enum}\:#1} 168 | % 169 | % \newcommand{\Sig}{\D{Sig}} 170 | % \newcommand{\dom}[1]{\F{dom}\:#1} 171 | % \newcommand{\sig}[1]{\D{sig}\:#1} 172 | % \newcommand{\Refl}{\overline} 173 | % 174 | % \newcommand{\Rhab}{:} 175 | % 176 | % \newcommand{\Ret}[1]{\:\FATR\:#1} 177 | % \newcommand{\By}[1]{\:\FATL\:#1} 178 | % \newcommand{\Refute}[1]{\:\FROG\:#1} 179 | % \newcommand{\With}[1]{\:\&\:#1} 180 | % 181 | % \newcommand{\refl}[1]{\purple{\overline{\black{#1}}}} 182 | % \newcommand{\coe}[4]{#4\:\green{\left[\black{#3:#1\EQ#2}\right>}} 183 | % \newcommand{\coh}[4]% 184 | % {#4\:\green{\left[\!\left[\black{#3:#1\EQ#2}\right|\!\right>}} 185 | % 186 | % \newcommand{\qcoe}[3]{#1\:\green{\F{coe}(\black{#2\EQ #3})}} 187 | % \newcommand{\qcoh}[3]{#1\:\green{\F{coh}(\black{#2\EQ #3})}} 188 | % \newcommand{\qcong}[3]{#1\:\green{\F{cong}(\black{#2:#3})}} 189 | % 190 | % \newcommand{\naughtE}[1]{#1\:\green{\FROG}} 191 | % \newcommand{\caseE}[1]{#1\:\F{case}} 192 | % \newcommand{\ExpandE}[1]{#1\:\F{Expand}} 193 | % \newcommand{\DecodeE}[1]{#1\:\F{Decode}} 194 | % \newcommand{\KitE}[2]{#1\:\F{Kit(}#2\F{)}} 195 | % \newcommand{\hd}{\green{\bullet}} 196 | % \newcommand{\tl}{\green{-}} 197 | % \newcommand{\rZ}{\red{\mathsf{0}}} 198 | % \newcommand{\rS}{\red{\mathsf{S}}} 199 | % 200 | % 201 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 202 | % %%% Grammars %%% 203 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 204 | % 205 | % \newcommand{\SC}{\textsc} %% syntactic category 206 | % \newcommand{\Grammar}[2]{ 207 | % \begin{array}[t]{rrll} 208 | % #1 & ::= & #2 209 | % \end{array} 210 | % } 211 | % \newcommand{\Gor}{\\ & | &} %% separator making new row of array 212 | % \newcommand{\GNew}[1]{\medskip \\ #1 & ::= &} 213 | % \newcommand{\Gbr}[2]{\!\left\lgroup #2 \right\rgroup^{\!\!#1}} 214 | % 215 | % \newcommand{\wbox}{\square} 216 | % \newcommand{\bbox}{\blacksquare} 217 | % 218 | % 219 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 220 | % %%% Judgments %%% 221 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 222 | % 223 | % \newcommand{\EC}{\mathcal{E}} 224 | % \newcommand{\Tn}{\vdash} 225 | % \newcommand{\Eq}{\equiv} 226 | % \newcommand{\x}{\V{x}} 227 | % \newcommand{\xS}{\Bhab{\x}{S}} 228 | % 229 | % 230 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 231 | % %%% Computation Relations %%% 232 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 233 | % 234 | % \usepackage{amssymb} 235 | % \newcommand{\step}[1]{\leadsto_{#1}} 236 | % 237 | % 238 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 239 | % %%% Meta-Computation Relations %%% 240 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 241 | % 242 | % \newcommand{\mq}{\mathrel{\Longrightarrow}} 243 | % 244 | % 245 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 246 | % %%% Nonexamples %%% 247 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 248 | % 249 | % %%% Non-examples in display math must be explicitly marked as such, in order 250 | % %%% to avoid dangerous wrong learnings. 251 | % 252 | % \newcommand{\BAD}{\mbox{\(\red{(\times)}\)}} 253 | % \newcommand{\NONEXAMPLE}[1]{ 254 | % \[ 255 | % \BAD\qquad 256 | % #1 257 | % \]% 258 | % } 259 | % 260 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 261 | % %%% Source %%% 262 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 263 | % 264 | % \newenvironment{source}{\[\begin{array}[t]{ll}}{\end{array}\]} 265 | % 266 | % \newcommand{\letH}[2]{\K{let} & #1 \quad #2} 267 | % \newcommand{\letV}[2]{\K{let} & #1 \\ #2} 268 | % 269 | % \newcommand{\sgap}{\medskip \\} 270 | % \newcommand{\X}{l@{\:}} 271 | % \newcommand{\I}{@{\!\_}l@{}} 272 | % \newcommand{\prog}[2]{\begin{array}[t]{@{}#1}#2\end{array}} 273 | % \newcommand{\dent}[2]{\multicolumn{#1}{@{\;\;}l}{#2}} 274 | % \newcommand{\UPI}[1]{\PIBINDER{#1}\FATR} 275 | % \newcommand{\ULAM}[1]{\LAMBINDER{#1}\FATR} 276 | % 277 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 278 | % %%% Vertical Arrangement %%% 279 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 280 | % 281 | % \newcommand{\STL}[1]{\begin{array}[t]{@{}l@{}}#1\end{array}} 282 | % \newcommand{\STC}[1]{\begin{array}[t]{@{}c@{}}#1\end{array}} 283 | % \newcommand{\CASES}[1]{\begin{array}[t]{@{}l@{\;\mapsto\;}l@{}}#1\end{array}} 284 | % 285 | % 286 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 287 | % %%% Ecce %%% 288 | % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 289 | % 290 | % \newcommand{\ecce}[1]{\begin{array}{@{}l@{\;}l}#1\end{array}} 291 | % 292 | % \newcommand{\ecDEFN}[4]{ 293 | % \F{#1}&[\\ 294 | % \multicolumn{2}{@{}l}{\quad\ecce{#2}}\\ 295 | % \;]&\FATR\;#3\;:\:#4 \\ 296 | % } 297 | % \newcommand{\ecDefn}[4]{ 298 | % \F{#1} & [#2]\;\FATR\;#3\;:\:#4 \\ 299 | % } 300 | % \newcommand{\ecdefn}[3]{ 301 | % \F{#1} & \FATR\;#2\;:\:#3 \\ 302 | % } 303 | % \newcommand{\ecHOLE}[3]{ 304 | % \U{#1} & [\\ 305 | % \multicolumn{2}{@{}l}{\quad\ecce{#2}}\\ 306 | % \;]&\FATR\;?\;:\:#3\\ 307 | % } 308 | % \newcommand{\ecHole}[3]{ 309 | % \U{#1}&[#2]\;\FATR\;?\;:\:#3 \\ 310 | % } 311 | % \newcommand{\echole}[2]{ 312 | % \U{#1} & \FATR\;?\;:\:#2 \\ 313 | % } 314 | % \newcommand{\ecpara}[2]{ 315 | % \V{#1} & :\;#2 \\ 316 | % } 317 | % 318 | -------------------------------------------------------------------------------- /Layout.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE PatternGuards, GADTs, GeneralizedNewtypeDeriving #-} 2 | 3 | module Layout where 4 | 5 | -- This file is intended as a not especially ZEUG specific manager for the 6 | -- lexical and layout structure of text, with an associated parser 7 | -- combinator library for the lexical structure thus emerging. 8 | -- 9 | -- Crucially, the analysis rejects no inputs and preserves the information 10 | -- content of the text (apart from turning any tabs into spaces, which, 11 | -- frankly is more than they deserve). 12 | 13 | import Data.Maybe 14 | import Control.Applicative 15 | import Control.Monad 16 | 17 | 18 | ------------------------------------------------------------------------------ 19 | -- LEXICAL STRUCTURE 20 | ------------------------------------------------------------------------------ 21 | 22 | -- Where you might be used to thinking of tokens as being other than 23 | -- whitespace, we are keen to preserve it. Also, we are not averse to 24 | -- tokens having interesting internal structure, e.g. when they 25 | -- represent valid groupings of some sort. 26 | 27 | data Token 28 | = Spc Int -- horizontal space of positive length 29 | | EoL -- line break 30 | | Sym String -- one contiguous nonspace symbol 31 | | Grp String [Token] String -- a valid bracketing of more tokens 32 | | Sub [Token] -- a substructure found by parsing 33 | deriving (Eq, Ord) -- anything <= EoL is whitespace 34 | 35 | -- Invariants: 36 | -- we never have two consecutive Spc tokens 37 | 38 | -- We should say what tokens look like by saying how to print them. 39 | 40 | nekot :: Token -> String -> String -- a difference-String 41 | nekot (Sym x) s = x ++ s 42 | nekot EoL s = '\n' : s 43 | nekot (Spc i) s = replicate i ' ' ++ s 44 | nekot (Grp x ts y) s = x ++ foldr nekot (y ++ s) ts 45 | nekot (Sub ts) s = foldr nekot s ts 46 | 47 | -- Now we can say how to look for them. Lexical structure is very 48 | -- simple. Lexing is done by this thing: 49 | 50 | tokens :: String -> [Token] 51 | 52 | -- A few characters form tokens by themselves, namely ()[]{},; 53 | -- (proposal 1: add .) (proposal 2: don't add . and remove ,;) 54 | -- Apart from that, no attempt is made to split contiguous nonwhitespace, 55 | -- so you just get to shove more spaces in. That'll be an Agda thing. 56 | 57 | tokens [] = [] 58 | tokens (c : s) 59 | | elem c solo = Sym [c] : tokens s 60 | | elem c spc = let (i, b) = space 1 s in Spc i : tokens b 61 | | c == '\n' = EoL : tokens s 62 | | otherwise = let (a, b) = symb s in Sym (c : a) : tokens b 63 | where 64 | spc = " \t" 65 | solo = "()[]{},;`" 66 | symb = break (`elem` ('\n' : spc ++ solo)) 67 | space i (c : s) | elem c spc = space (i + 1) s 68 | space i s = (i, s) 69 | 70 | -- You will note that we deliver only Spc, EoL and Sym tokens at this 71 | -- stage. More structure comes later. 72 | 73 | -- For diagnostic purposes, let us give a show instance for Token. 74 | instance Show Token where 75 | show (Spc i) 76 | | i < 4 = replicate i ' ' 77 | | otherwise = "[" ++ show i ++ ">" 78 | show EoL = "\n" 79 | show (Sym x) = x 80 | show (Grp x ts y) = x ++ show ts ++ y 81 | show (Sub ts) = "-" 82 | 83 | 84 | ------------------------------------------------------------------------------ 85 | -- LAYOUT STRUCTURE 86 | ------------------------------------------------------------------------------ 87 | 88 | -- A document is a sequence of blocks. A block consists of a headline 89 | -- (being a list of tokens), and the possibility of a subordinated 90 | -- document, introduced by the one and only layout herald 91 | -- -: 92 | -- which looks like a horizontal thing followed by some vertical things. 93 | 94 | type Document = [Block] 95 | data Block 96 | = [Token] :-: -- the headline 97 | Maybe Document -- the subordinated document, if present 98 | deriving Show 99 | 100 | headline :: Document -> [Token] 101 | headline ((ts :-: _) : _) = ts 102 | headline _ = [] 103 | 104 | -- The main interface to the layout machinery is via these operations 105 | -- which are total. Moreover, for tab-free input 106 | -- tuoyal . layout = id 107 | 108 | layout :: String -> Document 109 | tuoyal :: Document -> String 110 | 111 | -- The purpose of layout is to discover useful structure, given the 112 | -- constraints of the forgetful operation, tuoyal, implemented thus: 113 | 114 | tuoyal ls = foldr blockOut [] ls where 115 | blockOut (ts :-: mls) s = foldr nekot (subDocOut mls s) ts 116 | subDocOut Nothing s = s 117 | subDocOut (Just ls) s = "-:" ++ foldr blockOut s ls 118 | 119 | -- The key operation grabs the subdocument appropriate to the current 120 | -- indentation level. It should be called only at start of line. 121 | 122 | tokDoc :: Int -- seek a document indented this much... 123 | -> [Token] -- ...from this token stream; 124 | -> ( Document -- return the document... 125 | , [Token] -- ...and the unconsumed tokens 126 | ) 127 | 128 | -- We may then give the implementation of layout. 129 | 130 | layout = fst -- just the document, please! no tokens unused... 131 | . tokDoc 0 -- ...making a document indented by 0, from... 132 | . tokens -- ... the tokens you get from the input 133 | 134 | -- Now, to implement tokDoc, we need to detect indentation level, 135 | -- just by a little lookahead. PROVIDED WE'RE AT BEGINNING OF LINE. 136 | 137 | dent :: [Token] -> Int 138 | dent [] = maxBound -- end of file, maximally indented 139 | dent (EoL : _) = maxBound -- blank line, maximally indented 140 | dent (Spc i : EoL : _) = maxBound -- trailing spaces only, same again 141 | dent (Spc i : _) = i -- can't be followed by more space 142 | dent _ = 0 -- isn't space 143 | 144 | tokDoc i ts | j < i = ([], ts) -- stop if we're outdented 145 | | otherwise = moreDoc i ts where 146 | j = dent ts -- makes sense only at start of line 147 | -- for moreDoc, we know that what we see belongs to us 148 | moreDoc i [] = ([], []) -- end of file 149 | moreDoc i ts = ((groupify us :-: m) : ls, ts2) where 150 | -- ^^^^^^^^^^^ find bracket structure 151 | ((us :-: m), ts1) = tokBlock ts -- get the first block 152 | (ls, ts2) = tokDoc i ts1 -- then the rest 153 | -- tokBlock gets us to the end of the current block, i.e., the 154 | -- start of the line after the current block 155 | tokBlock [] = ([] :-: Nothing, []) 156 | -- end of file is end of block 157 | tokBlock (EoL : ts) | dent ts < j = ([EoL] :-: Nothing, ts) 158 | -- stop after EoL when what follows is an outdent 159 | tokBlock (Sym "-:" : ts) = ([] :-: Just ls, ts') where 160 | (ls, ts') = moreDoc (j + 1) ts 161 | -- we've seen the layout herald; anything right of it belongs 162 | -- to the subdoc, and anything below it indented strictly more 163 | -- than this line 164 | tokBlock (t : ts) = ((t : ts1) :-: mls, ts2) where 165 | (ts1 :-: mls, ts2) = tokBlock ts 166 | -- otherwise keep grabbing more tokens 167 | 168 | -- We're looking for opportunities to package token sequences in 169 | -- group tokens. When you find an open, try to grab tokens until 170 | -- the corresponding close. If you don't find it, don't panic... 171 | -- ...but don't make a group. 172 | 173 | groupify :: [Token] -> [Token] 174 | groupify = fst . chomp (const False) where 175 | groupers = [("(", ")"), ("[", "]"), ("{", "}"), ("`", "`")] 176 | fcons t (ts, z) = (t : ts, z) 177 | -- chomp keeps making groups but stops at its caller's closer 178 | chomp :: (Token -> Bool) -- is this my caller's closer? 179 | -> [Token] -- input stream 180 | -> ( [Token] -- grouped inputs before the closer 181 | , Maybe [Token] -- ungrouped inputs after the closer 182 | ) -- or Nothing, if we didn't find it 183 | chomp p [] = ([], Nothing) -- didn't find closer 184 | chomp p (t : ts) | p t = ([], Just ts) -- have found closer 185 | chomp p (t@(Sym x) : ts) -- is this the start of a subgroup? 186 | | Just y <- lookup x groupers -- if so, y closes x 187 | = case chomp (Sym y ==) ts of 188 | (ss, Just ts) -> fcons (Grp x ss y) (chomp p ts) 189 | -- we found the closer, so we can make a group 190 | (ss, Nothing) -> (t : ss, Nothing) 191 | -- we didn't find the closer, so we stay flat 192 | chomp p (t : ts) = fcons t (chomp p ts) -- if not, chomp on 193 | 194 | 195 | ------------------------------------------------------------------------------ 196 | -- PARSING TOKENS (LIST OF SUCCESSES STYLE) 197 | ------------------------------------------------------------------------------ 198 | 199 | newtype ParseTokens a = ParseTokens 200 | {parseTokens :: [Token] -- inputs 201 | -> [( [Token] -> [Token] -- difference-list of consumed input 202 | , a -- thing constructed 203 | , [Token] -- unused inputs 204 | )]} 205 | deriving Monoid -- why keep a dog and bark yourself? 206 | 207 | -- Let us have a datatype for substructures carrying the token sequence 208 | -- from which they were parsed. The purpose of Sub tokens is to mark 209 | -- the corresponding discovered structure in the token sequence. As a 210 | -- result, the consumed input may contain Sub tokens where the unconsumed 211 | -- input may not. 212 | 213 | data Sub x = [Token] := x deriving Show 214 | 215 | subproj :: Sub x -> x 216 | subproj (_ := x) = x 217 | 218 | 219 | sub :: ParseTokens a -> ParseTokens (Sub a) 220 | sub ap = ParseTokens $ \ ts -> do 221 | (ad, a, ts) <- parseTokens ap ts -- parse the substructure 222 | let ats = ad [] -- reify the difference-list... 223 | return ((Sub ats :), ats := a, ts) -- ...and share it! 224 | 225 | -- We can collect the parses which eat the input and reify their 226 | -- difference lists. Note that ad [] will be ts with added structure. 227 | parses :: ParseTokens a -> [Token] -> [([Token], a)] 228 | parses ap ts = [(ad [], a) | (ad, a, []) <- parseTokens ap ts] 229 | 230 | -- The Monad instance is standard. 231 | instance Monad ParseTokens where 232 | return x = ParseTokens $ \ ts -> return (id, x, ts) 233 | pa >>= k = ParseTokens $ \ ts -> do -- thread by shadowing 234 | (ad, a, ts) <- parseTokens pa ts 235 | (bd, b, ts) <- parseTokens (k a) ts 236 | return (ad . bd, b, ts) 237 | 238 | -- boilerplate 239 | instance Applicative ParseTokens where 240 | pure = return 241 | pf <*> pa = pf >>= \ f -> pa >>= \ a -> return (f a) 242 | -- boilerplate 243 | instance Functor ParseTokens where 244 | fmap = (<*>) . pure 245 | -- boilerplate 246 | instance Alternative ParseTokens where 247 | empty = mempty 248 | (<|>) = mappend 249 | 250 | -- get the next token 251 | tok :: ParseTokens Token 252 | tok = ParseTokens $ \ ts -> case ts of 253 | t : ts -> [((t :), t, ts)] 254 | _ -> [] 255 | 256 | -- get the next symbol 257 | sym :: ParseTokens String 258 | sym = tok >>= \x -> case x of 259 | Sym x -> return x 260 | _ -> empty 261 | 262 | -- demand a particular next symbol 263 | eat :: String -> ParseTokens () 264 | eat x = do s <- sym; guard (x == s) 265 | 266 | -- discard whitespace 267 | gap :: ParseTokens () 268 | gap = ParseTokens $ \ ts -> 269 | let (ss, us) = span (<= EoL) ts in [((ss ++), (), us)] 270 | 271 | -- grab a possibly empty sequence, allowing whitespace 272 | gapMany :: ParseTokens a -> ParseTokens [a] 273 | gapMany ap = gap *> many (ap <* gap) 274 | 275 | -- parse a group with specific delimiters 276 | grp :: String -> ParseTokens a -> String -> ParseTokens a 277 | grp x ap y = ParseTokens $ \ ts -> case ts of 278 | t@(Grp x' gts y') : ts | x == x' && y == y' -> 279 | [((Grp x ats y :), a, ts) | (ats, a) <- parses ap gts] 280 | _ -> [] 281 | -- (proposal: insist on the group delivering exactly one parse) 282 | 283 | -- how to grow left-recursive stuff 284 | grow :: Int -- minimum number of growings 285 | -> ParseTokens a -- what to grow 286 | -> (Sub a -> ParseTokens a) -- how to grow it 287 | -> ParseTokens a -- the fully grown thing 288 | grow i ap kp = ParseTokens $ \ ts -> extend i (parseTokens ap ts) where 289 | extend 0 triples = (triples >>= more 0) ++ triples 290 | extend i triples = triples >>= more (i - 1) 291 | more i (ad, a, ts) = extend i 292 | [ ((Sub ats :) . bd, ab, ts) 293 | | (bd, ab, ts) <- parseTokens (kp (ats := a)) ts 294 | ] where ats = ad [] 295 | 296 | refine :: (a -> Maybe b) -> ParseTokens a -> ParseTokens b 297 | refine f p = p >>= \a -> case f a of 298 | Nothing -> empty 299 | Just b -> return b 300 | 301 | ------------------------------------------------------------------------------ 302 | -- PARSING DOCUMENTS 303 | ------------------------------------------------------------------------------ 304 | 305 | document :: Format x -> Document -> [[x]] -- list of possible parses 306 | document = traverse . formatBlock 307 | 308 | data Format x where -- this is a bit too uniform 309 | Format :: (([Token], a) -> [b] -> x) -- semantics 310 | -> ParseTokens a -- parser for headlines 311 | -> Format b -- format for subdocument blocks 312 | -> Format x -- format for document blocks 313 | 314 | formatBlock :: Format x -> Block -> [x] 315 | formatBlock (Format f h l) (ts :-: mtss) = 316 | f <$> parses h ts <*> document l (fromMaybe [] mtss) 317 | -------------------------------------------------------------------------------- /MetaZEUG/Layout.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE PatternGuards, GADTs, GeneralizedNewtypeDeriving #-} 2 | 3 | module Layout where 4 | 5 | -- This file is intended as a not especially ZEUG specific manager for the 6 | -- lexical and layout structure of text, with an associated parser 7 | -- combinator library for the lexical structure thus emerging. 8 | -- 9 | -- Crucially, the analysis rejects no inputs and preserves the information 10 | -- content of the text (apart from turning any tabs into spaces, which, 11 | -- frankly is more than they deserve). 12 | 13 | import Data.Maybe 14 | import Control.Applicative 15 | import Control.Monad 16 | 17 | 18 | ------------------------------------------------------------------------------ 19 | -- LEXICAL STRUCTURE 20 | ------------------------------------------------------------------------------ 21 | 22 | -- Where you might be used to thinking of tokens as being other than 23 | -- whitespace, we are keen to preserve it. Also, we are not averse to 24 | -- tokens having interesting internal structure, e.g. when they 25 | -- represent valid groupings of some sort. 26 | 27 | data Token 28 | = Spc Int -- horizontal space of positive length 29 | | EoL -- line break 30 | | Sym String -- one contiguous nonspace symbol 31 | | Grp String [Token] String -- a valid bracketing of more tokens 32 | | Sub [Token] -- a substructure found by parsing 33 | deriving (Eq, Ord) -- anything <= EoL is whitespace 34 | 35 | -- Invariants: 36 | -- we never have two consecutive Spc tokens 37 | 38 | -- We should say what tokens look like by saying how to print them. 39 | 40 | nekot :: Token -> String -> String -- a difference-String 41 | nekot (Sym x) s = x ++ s 42 | nekot EoL s = '\n' : s 43 | nekot (Spc i) s = replicate i ' ' ++ s 44 | nekot (Grp x ts y) s = x ++ foldr nekot (y ++ s) ts 45 | nekot (Sub ts) s = foldr nekot s ts 46 | 47 | -- Now we can say how to look for them. Lexical structure is very 48 | -- simple. Lexing is done by this thing: 49 | 50 | tokens :: String -> [Token] 51 | 52 | -- A few characters form tokens by themselves, namely ()[]{},; 53 | -- (proposal 1: add .) (proposal 2: don't add . and remove ,;) 54 | -- Apart from that, no attempt is made to split contiguous nonwhitespace, 55 | -- so you just get to shove more spaces in. That'll be an Agda thing. 56 | 57 | tokens [] = [] 58 | tokens (c : s) 59 | | elem c solo = Sym [c] : tokens s 60 | | elem c spc = let (i, b) = space 1 s in Spc i : tokens b 61 | | c == '\n' = EoL : tokens s 62 | | otherwise = let (a, b) = symb s in Sym (c : a) : tokens b 63 | where 64 | spc = " \t" 65 | solo = "()[]{},;" 66 | symb = break (`elem` ('\n' : spc ++ solo)) 67 | space i (c : s) | elem c spc = space (i + 1) s 68 | space i s = (i, s) 69 | 70 | -- You will note that we deliver only Spc, EoL and Sym tokens at this 71 | -- stage. More structure comes later. 72 | 73 | -- For diagnostic purposes, let us give a show instance for Token. 74 | instance Show Token where 75 | show (Spc i) 76 | | i < 4 = replicate i ' ' 77 | | otherwise = "[" ++ show i ++ ">" 78 | show EoL = "\n" 79 | show (Sym x) = show x 80 | show (Grp x ts y) = x ++ show ts ++ y 81 | show (Sub ts) = "-" 82 | 83 | 84 | ------------------------------------------------------------------------------ 85 | -- LAYOUT STRUCTURE 86 | ------------------------------------------------------------------------------ 87 | 88 | -- A document is a sequence of blocks. A block consists of a headline 89 | -- (being a list of tokens), and the possibility of a subordinated 90 | -- document, introduced by the one and only layout herald 91 | -- -: 92 | -- which looks like a horizontal thing followed by some vertical things. 93 | 94 | type Document = [Block] 95 | data Block 96 | = [Token] :-: -- the headline 97 | Maybe Document -- the subordinated document, if present 98 | deriving Show 99 | 100 | -- The main interface to the layout machinery is via these operations 101 | -- which are total. Moreover, for tab-free input 102 | -- tuoyal . layout = id 103 | 104 | layout :: String -> Document 105 | tuoyal :: Document -> String 106 | 107 | -- The purpose of layout is to discover useful structure, given the 108 | -- constraints of the forgetful operation, tuoyal, implemented thus: 109 | 110 | tuoyal ls = foldr blockOut [] ls where 111 | blockOut (ts :-: mls) s = foldr nekot (subDocOut mls s) ts 112 | subDocOut Nothing s = s 113 | subDocOut (Just ls) s = "-:" ++ foldr blockOut s ls 114 | 115 | -- The key operation grabs the subdocument appropriate to the current 116 | -- indentation level. It should be called only at start of line. 117 | 118 | tokDoc :: Int -- seek a document indented this much... 119 | -> [Token] -- ...from this token stream; 120 | -> ( Document -- return the document... 121 | , [Token] -- ...and the unconsumed tokens 122 | ) 123 | 124 | -- We may then give the implementation of layout. 125 | 126 | layout = fst -- just the document, please! no tokens unused... 127 | . tokDoc 0 -- ...making a document indented by 0, from... 128 | . tokens -- ... the tokens you get from the input 129 | 130 | -- Now, to implement tokDoc, we need to detect indentation level, 131 | -- just by a little lookahead. PROVIDED WE'RE AT BEGINNING OF LINE. 132 | 133 | dent :: [Token] -> Int 134 | dent [] = maxBound -- end of file, maximally indented 135 | dent (EoL : _) = maxBound -- blank line, maximally indented 136 | dent (Spc i : EoL : _) = maxBound -- trailing spaces only, same again 137 | dent (Spc i : _) = i -- can't be followed by more space 138 | dent _ = 0 -- isn't space 139 | 140 | tokDoc i ts | j < i = ([], ts) -- stop if we're outdented 141 | | otherwise = moreDoc i ts where 142 | j = dent ts -- makes sense only at start of line 143 | -- for moreDoc, we know that what we see belongs to us 144 | moreDoc i [] = ([], []) -- end of file 145 | moreDoc i ts = ((groupify us :-: m) : ls, ts2) where 146 | -- ^^^^^^^^^^^ find bracket structure 147 | ((us :-: m), ts1) = tokBlock ts -- get the first block 148 | (ls, ts2) = tokDoc i ts1 -- then the rest 149 | -- tokBlock gets us to the end of the current block, i.e., the 150 | -- start of the line after the current block 151 | tokBlock [] = ([] :-: Nothing, []) 152 | -- end of file is end of block 153 | tokBlock (EoL : ts) | dent ts < j = ([EoL] :-: Nothing, ts) 154 | -- stop after EoL when what follows is an outdent 155 | tokBlock (Sym "-:" : ts) = ([] :-: Just ls, ts') where 156 | (ls, ts') = moreDoc (j + 1) ts 157 | -- we've seen the layout herald; anything right of it belongs 158 | -- to the subdoc, and anything below it indented strictly more 159 | -- than this line 160 | tokBlock (t : ts) = ((t : ts1) :-: mls, ts2) where 161 | (ts1 :-: mls, ts2) = tokBlock ts 162 | -- otherwise keep grabbing more tokens 163 | 164 | -- We're looking for opportunities to package token sequences in 165 | -- group tokens. When you find an open, try to grab tokens until 166 | -- the corresponding close. If you don't find it, don't panic... 167 | -- ...but don't make a group. 168 | 169 | groupify :: [Token] -> [Token] 170 | groupify = fst . chomp (const False) where 171 | groupers = [("(", ")"), ("[", "]"), ("{", "}")] 172 | fcons t (ts, z) = (t : ts, z) 173 | -- chomp keeps making groups but stops at its caller's closer 174 | chomp :: (Token -> Bool) -- is this my caller's closer? 175 | -> [Token] -- input stream 176 | -> ( [Token] -- grouped inputs before the closer 177 | , Maybe [Token] -- ungrouped inputs after the closer 178 | ) -- or Nothing, if we didn't find it 179 | chomp p [] = ([], Nothing) -- didn't find closer 180 | chomp p (t : ts) | p t = ([], Just ts) -- have found closer 181 | chomp p (t@(Sym x) : ts) -- is this the start of a subgroup? 182 | | Just y <- lookup x groupers -- if so, y closes x 183 | = case chomp (Sym y ==) ts of 184 | (ss, Just ts) -> fcons (Grp x ss y) (chomp p ts) 185 | -- we found the closer, so we can make a group 186 | (ss, Nothing) -> (t : ss, Nothing) 187 | -- we didn't find the closer, so we stay flat 188 | chomp p (t : ts) = fcons t (chomp p ts) -- if not, chomp on 189 | 190 | 191 | ------------------------------------------------------------------------------ 192 | -- PARSING TOKENS (LIST OF SUCCESSES STYLE) 193 | ------------------------------------------------------------------------------ 194 | 195 | newtype ParseTokens a = ParseTokens 196 | {parseTokens :: [Token] -- inputs 197 | -> [( [Token] -> [Token] -- difference-list of consumed input 198 | , a -- thing constructed 199 | , [Token] -- unused inputs 200 | )]} 201 | deriving Monoid -- why keep a dog and bark yourself? 202 | 203 | -- Let us have a datatype for substructures carrying the token sequence 204 | -- from which they were parsed. The purpose of Sub tokens is to mark 205 | -- the corresponding discovered structure in the token sequence. As a 206 | -- result, the consumed input may contain Sub tokens where the unconsumed 207 | -- input may not. 208 | 209 | data Sub x = [Token] := x deriving Show 210 | 211 | sub :: ParseTokens a -> ParseTokens (Sub a) 212 | sub ap = ParseTokens $ \ ts -> do 213 | (ad, a, ts) <- parseTokens ap ts -- parse the substructure 214 | let ats = ad [] -- reify the difference-list... 215 | return ((Sub ats :), ats := a, ts) -- ...and share it! 216 | 217 | -- We can collect the parses which eat the input and reify their 218 | -- difference lists. Note that ad [] will be ts with added structure. 219 | parses :: ParseTokens a -> [Token] -> [([Token], a)] 220 | parses ap ts = [(ad [], a) | (ad, a, []) <- parseTokens ap ts] 221 | 222 | -- The Monad instance is standard. 223 | instance Monad ParseTokens where 224 | return x = ParseTokens $ \ ts -> return (id, x, ts) 225 | pa >>= k = ParseTokens $ \ ts -> do -- thread by shadowing 226 | (ad, a, ts) <- parseTokens pa ts 227 | (bd, b, ts) <- parseTokens (k a) ts 228 | return (ad . bd, b, ts) 229 | 230 | -- boilerplate 231 | instance Applicative ParseTokens where 232 | pure = return 233 | pf <*> pa = pf >>= \ f -> pa >>= \ a -> return (f a) 234 | -- boilerplate 235 | instance Functor ParseTokens where 236 | fmap = (<*>) . pure 237 | -- boilerplate 238 | instance Alternative ParseTokens where 239 | empty = mempty 240 | (<|>) = mappend 241 | 242 | -- get the next symbol 243 | sym :: ParseTokens String 244 | sym = ParseTokens $ \ ts -> case ts of 245 | t@(Sym x) : ts -> [((t :), x, ts)] 246 | _ -> [] 247 | 248 | -- demand a particular next symbol 249 | eat :: String -> ParseTokens () 250 | eat x = do s <- sym; guard (x == s) 251 | 252 | -- discard whitespace 253 | gap :: ParseTokens () 254 | gap = ParseTokens $ \ ts -> 255 | let (ss, us) = span (<= EoL) ts in [((ss ++), (), us)] 256 | 257 | -- grab a possibly empty sequence, allowing whitespace 258 | gapMany :: ParseTokens a -> ParseTokens [a] 259 | gapMany ap = gap *> many (ap <* gap) 260 | 261 | -- parse a group with specific delimiters 262 | grp :: String -> ParseTokens a -> String -> ParseTokens a 263 | grp x ap y = ParseTokens $ \ ts -> case ts of 264 | t@(Grp x' gts y') : ts | x == x' && y == y' -> 265 | [((Grp x ats y :), a, ts) | (ats, a) <- parses ap gts] 266 | _ -> [] 267 | -- (proposal: insist on the group delivering exactly one parse) 268 | 269 | -- how to grow left-recursive stuff 270 | grow :: Int -- minimum number of growings 271 | -> ParseTokens a -- what to grow 272 | -> (Sub a -> ParseTokens a) -- how to grow it 273 | -> ParseTokens a -- the fully grown thing 274 | grow i ap kp = ParseTokens $ \ ts -> extend i (parseTokens ap ts) where 275 | extend 0 triples = (triples >>= more 0) ++ triples 276 | extend i triples = triples >>= more (i - 1) 277 | more i (ad, a, ts) = extend i 278 | [ ((Sub ats :) . bd, ab, ts) 279 | | (bd, ab, ts) <- parseTokens (kp (ats := a)) ts 280 | ] where ats = ad [] 281 | 282 | 283 | ------------------------------------------------------------------------------ 284 | -- PARSING DOCUMENTS 285 | ------------------------------------------------------------------------------ 286 | 287 | document :: Format x -> Document -> [[x]] -- list of possible parses 288 | document = traverse . formatBlock 289 | 290 | data Format x where -- this is a bit too uniform 291 | Format :: (([Token], a) -> [b] -> x) -- semantics 292 | -> ParseTokens a -- parser for headlines 293 | -> Format b -- format for subdocument blocks 294 | -> Format x -- format for document blocks 295 | 296 | formatBlock :: Format x -> Block -> [x] 297 | formatBlock (Format f h l) (ts :-: mtss) = 298 | f <$> parses h ts <*> document l (fromMaybe [] mtss) 299 | -------------------------------------------------------------------------------- /MetaZEUG/MetaZOIG.tex: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage{upgreek} 3 | \usepackage{stmaryrd} 4 | 5 | \begin{document} 6 | \title{\textsf{ZEUG} Metalogic} 7 | \author{Conor, Fred, Guillaume, James} 8 | \maketitle 9 | 10 | \section{Syntax and Evaluation} 11 | 12 | \newcommand{\neu}{\underline} 13 | \newcommand{\lam}[1]{\uplambda #1.\,} 14 | \newcommand{\quo}{\textrm{'}} 15 | 16 | We have mutually defined syntactic categories for `positive' things 17 | representing canonical forms and `negative' things which have the potential 18 | to compute. This is not exactly the `polarity' that focusing people talk about, 19 | but it allows us to use some symbols to refer to a useful distinction. 20 | 21 | Terms($+$) and eliminations($-$): 22 | \[ 23 | \begin{array}[t]{rrl@{\quad}l} 24 | S,T,s,t & ::= & \neu e & \mbox{elimination}\\ 25 | & | & \quo c & \mbox{constant}\\ 26 | & | & [s, t] & \mbox{pair}\\ 27 | & | & \lam {} t & \mbox{abstraction}\\ 28 | \end{array} 29 | \qquad 30 | \begin{array}[t]{rrl@{\quad}l} 31 | E,F,e,f & ::= & x & \mbox{name}\\ 32 | & | & f\:s & \mbox{action}\\ 33 | & | & i & \mbox{index}\\ 34 | & | & t : T & \mbox{cut}\\ 35 | \end{array} 36 | \] 37 | 38 | Informally, let us write $\lam x t$ for the correspondingly de Bruijn indexed version. Constants are tokens or a blank (`nil'). 39 | We may use LISP-like conveniences. 40 | Tidy 41 | $[\vec{s},[\vec{t}]]$ as $[\vec{s}\: \vec{t}]$. 42 | Tidy $[\vec{s},\quo]$ as $[\vec{s}]$. 43 | Tidy $\quo$ as $[]$. 44 | 45 | Values($+$) and neutrals($-$): 46 | \[ 47 | \begin{array}[t]{rrl@{\quad}l} 48 | U,V,u,v & ::= & \neu n & \mbox{neutral}\\ 49 | & | & \quo c & \mbox{constant}\\ 50 | & | & [u, v] & \mbox{pair}\\ 51 | & | & \lam {[\gamma]} t & \mbox{abstraction}\\ 52 | \end{array} 53 | \qquad 54 | \begin{array}[t]{rrl@{\quad}l} 55 | M,N,m,n & ::= & x & \mbox{name}\\ 56 | & | & n\:u & \mbox{action}\\ 57 | \end{array} 58 | \] 59 | 60 | \newcommand{\ev}[1]{\llbracket #1 \rrbracket} 61 | Evaluation: 62 | \[ 63 | \begin{array}[t]{rcl} 64 | \ev {\neu e} \gamma & = & \ev e \gamma \\ 65 | \ev {\quo c} \gamma & = & \quo c \\ 66 | \ev {[s, t]} \gamma & = & [\ev s \gamma, \ev t \gamma] \\ 67 | \ev {\lam {} t} \gamma & = & \lam{[\gamma]} t \\ 68 | \end{array} 69 | \qquad 70 | \begin{array}[t]{rcl} 71 | \ev x \gamma & = & x \\ 72 | \ev {f\:s} \gamma & = & \ev f \gamma \cdot \ev s \gamma \\ 73 | \ev i \gamma & = & \gamma.i \\ 74 | \ev {t : T} \gamma & = & \ev t \gamma\\ 75 | \end{array} 76 | \] 77 | 78 | Action: 79 | \[\begin{array}{r@{\cdot}lcl} 80 | \neu n & u & = & \neu {n\:u} 81 | \end{array}\] 82 | 83 | Action is a thing we need to extend. 84 | 85 | 86 | \section{Judgments and Properties} 87 | 88 | \newcommand{\herald}{-\!\!:} 89 | \newcommand{\lay}[2]{\begin{array}[t]{@{}l@{}}#1\;\herald\\ % 90 | \quad\begin{array}[t]{@{}l@{}}#2\end{array}\end{array}} 91 | 92 | \newcommand{\type}{\textsc{type}\;} 93 | \newcommand{\judgment}{\textsc{judgment}\;} 94 | \newcommand{\pve}{\mbox{$+$}} 95 | \newcommand{\nve}{\mbox{$+$}} 96 | 97 | Judgments are statements about things. A judgment is a formula with some places for the things. When we declare a judgment form, we give its template, marking the places clearly with parentheses. Inside the parentheses, we indicate the \emph{mode} of the place: $?$ for input, $!$ for output; we indicate the \emph{phase} of the place, by writing $\$$ if we are expecting a syntactic object or nothing, if we expect a value; we indicate whether the place is for positive or negative things; 98 | we name the thing in the place. For example, we may propose that a positive syntactic object is a type. 99 | \[ 100 | \lay{\judgment \type (?\$\pve\:S)}{} 101 | \] 102 | (Yes, I know it looks like swearing.) 103 | Indented below (as signalled by the `layout herald', $\herald$ (depicting a horizontal thing followed by some vertical things), are the preconditions about 104 | the inputs and the postconditions about the outputs. 105 | For type formation, there are no such conditions. We refer to syntactic inputs 106 | as \emph{subjects} of the judgment: the judgment's purpose is to determine their validity. The value inputs should be validated in order to propose the judgment in the first place. For example, the type checking judgment must have a valid type. 107 | \[ 108 | \lay{\judgment (?\pve\: S) \ni (?\$\pve\:s)} 109 | {\type S} 110 | \] 111 | How to check such a declaration for sanity. Every variable which stands for a value 112 | input must be used as the subject of a premise. Reading down the premises, we regard 113 | $S$ as syntactic until it has been the subject of a judgment, at which point we become entitled to its value. 114 | 115 | The type synthesis judgment delivers an output. We explain what must be true of it. 116 | \[ 117 | \lay{\judgment (?\$\nve\:s) \in (!\pve\:S)} 118 | {\type S} 119 | \] 120 | 121 | 122 | Contexts ascribe properties to variables. 123 | A property is something like $:S$ or $=s$, expressing some sort of relationship to 124 | a value. A context entry is a fresh variable to which some properties have been postfixed. We will need to explain what conditions are necessary for the formulation of a property and the ascription of it to a variable. For example, we might say 125 | \newcommand{\property}{\textsc{property}\;} 126 | \newcommand{\ensures}{\;\textsc{ensures}\;} 127 | \[ 128 | \lay{\property x : (S)\ensures x\in S} 129 | {\type S} 130 | \] 131 | In effect, we're describing `context validity'. When we declare 132 | a property, we must give their embedding into judgments, where the components of the property must be used in output mode (for we shall have looked them up in the context). We acquire automatically the rules which explain when judgments hold of variables, and we are otherwise forbidden to write rules which examine the context. 133 | In doing so, we obtain stability under substitution, effectively the lifting of substitution from syntax to derivations. 134 | 135 | 136 | \section{Rules} 137 | 138 | A rule explains how to deduce one \emph{conclusion} judgment from zero or more \emph{premise} judgments. Let us consider how to check whether rules make sense. 139 | 140 | We presume that the value inputs of the conclusion are valid as specified by the judgment form. The syntactic inputs may be matched against patterns appropriate to the syntactic category: every schematic variable used in such patterns must appear in the subject of at least (exactly?) one premise: once such a variable has been a subject, we may use it in value positions. We must ensure that the value inputs of the premises are valid; we may assume that the value outputs of premises are valid. 141 | We must ensure that the value outputs of the conclusion are valid, and that information flows causally. For example, 142 | \[ 143 | \lay{\type [\quo\Uppi\:S\:\lam x T]} 144 | {\type S \\ x:S\vdash \type T} 145 | \] 146 | Here, we explain when the syntactic form of a $\Uppi$-type can be considered a type. Once $S$ has been validated as a type, we can form the property $:S$ and thus extend the context with a fresh variable which instantiates the de Bruijn index in $T$. 147 | We are \emph{not} saying that the codomain of a $\Uppi$-type can be any old first class function: quite the reverse, we are saying that this is a syntactic form which binds a variable. We may, however, say which uses of binding can be given $\Uppi$-types. 148 | \[ 149 | \lay{[\quo\Uppi\:S\:\lam x T]\ni \lam x t} 150 | {x:S\vdash T\ni t} 151 | \] 152 | Here we make essential use of the assumption that the input is valid. Inverting, 153 | we recover exactly what we need to ensure that the context extension is valid and the input of the premise is valid in that extended context. 154 | For applications, we then have 155 | \[ 156 | \lay{f\:s\in \{x = s:S\}T} 157 | {f\in [\quo\Uppi\:S\:\lam x T]\\ S\ni s} 158 | \] 159 | What's going on with that substitution? You can read the rules in a `small-step' way as being all about terms, and close judgments over appropriately directed computations: you can precompute inputs and post compute outputs. Or you can read them in a `big-step' way, regarding the substitution as hereditary, acting on values. 160 | But why do we know that the resulting type is well formed? We need two things. 161 | \[ 162 | \lay{x\in S}{x:S} \qquad\qquad 163 | \lay{s:S\in S}{\type S\\ S\ni s} 164 | \] 165 | The first of these we get for free when we declare the property $:S$. The second 166 | we may add. Substitution is justified when the assumptions made about the variable hold of the thing substituted for it. 167 | 168 | 169 | \section{Actions from $\beta$-Rules} 170 | 171 | How do we specify computation? 172 | 173 | \newcommand{\betar}{\textsc{beta}\;} 174 | We explain what happens when a \emph{typed} term is applied to another term. 175 | The usual $\beta$-rule becomes 176 | \[ 177 | \betar (\lam x t : [\quo\Uppi\:S\:\lam x T])\:s = 178 | \{x = s:S\}(t : T) 179 | \] 180 | How do we sanity-check such a thing? For a start, we have to check that the type information is used only to deliver other type information. That means we can extract the corresponding untyped value action. 181 | \[ 182 | (\lam{[\gamma]}t)\cdot v = \ev t (\gamma,v) 183 | \] 184 | 185 | It should be acceptable to allow the same untyped action from multiple typed actions. We might introduce some other function type, but we'd have to give it the same $\beta$-rule. 186 | 187 | 188 | \section{Quotation and $\eta$-Rules} 189 | 190 | The rules give rise to a default quotation procedure. If I have judgment which holds, I should be able to replace its subjects by their values, then reconstruct a judgment which also holds. Whichever rule typechecks abstractions must tell us how to generate the fresh variable which we can use to extend the environment in a functional value. 191 | 192 | \newcommand{\etar}{\textsc{eta}\;} 193 | However, we sometimes want to override that default. We write rules which 194 | put the equations defining quotation in the places for the subjects. 195 | For example 196 | \[ 197 | \lay{\etar [\quo\Uppi\:S\:\lam x T] \ni (f = \lam x t)} 198 | {x:S \vdash T \ni (f\:x = t)} 199 | \] 200 | How do we sanity-check that? 201 | 202 | 203 | \section{Example: $\Upsigma$-types} 204 | 205 | Let's give the theory of $\Upsigma$-types in this style. Formation and introduction: 206 | \[ 207 | \lay{\type[\quo\Upsigma\:S\:\lam x T]} 208 | {\type S \\ x:S\vdash \type T} 209 | \qquad 210 | \lay{[\quo\Upsigma\:S\:\lam x T] \ni [s, t]} 211 | {S\ni s\\ \{x=s:S\}T \ni t} 212 | \] 213 | Two specific applications are permitted. 214 | \newcommand{\fst}{\quo\mathsf{fst}} 215 | \newcommand{\snd}{\quo\mathsf{snd}} 216 | \[ 217 | \lay{n\:\fst \in S} 218 | {n\in [\quo\Upsigma\:S\:\lam x T]} 219 | \qquad 220 | \lay{n\:\snd \in \{x=n\:\fst\}T} 221 | {n\in [\quo\Upsigma\:S\:\lam x T]} 222 | \] 223 | The associated actions are delivered by 224 | \[ 225 | \betar ([s, t]:[\quo\Upsigma\:S\:\lam x T])\:\fst = s:S 226 | \qquad 227 | \betar ([s, t]:[\quo\Upsigma\:S\:\lam x T])\:\snd = t:\{x=s:S\}T 228 | \] 229 | The quotation behaviour is 230 | \[ 231 | \lay{\etar [\quo\Upsigma\:S\:\lam x T] \ni (p = [s, t])} 232 | {S\ni (p\:\fst = s)\\ \{x=p\:\fst\}T\ni (p\:\snd = t)} 233 | \] 234 | 235 | \end{document} 236 | -------------------------------------------------------------------------------- /MetaZEUG/Raw.hs: -------------------------------------------------------------------------------- 1 | module Raw where 2 | 3 | import Control.Applicative 4 | 5 | import Layout 6 | 7 | data RawTm 8 | = RawTag String 9 | | RawList [Sub RawTm] (Maybe (Sub RawTm)) 10 | | RawLam String (Sub RawTm) 11 | | RawEn RawEn 12 | deriving Show 13 | 14 | data RawEn 15 | = RawVar String 16 | | RawApp (Sub RawEn) (Sub RawTm) 17 | | RawTy (Sub RawTm) (Sub RawTm) 18 | deriving Show 19 | 20 | tag :: ParseTokens String 21 | tag = sym >>= \ x -> case x of 22 | '\'' : s -> return s 23 | _ -> empty 24 | 25 | var :: ParseTokens String 26 | var = sym >>= \ x -> case x of 27 | c : s | elem c "'\\" -> empty 28 | _ | elem ':' x -> empty 29 | _ -> return x 30 | 31 | smallTm :: ParseTokens RawTm -- definitely small 32 | smallTm 33 | = RawTag <$> tag 34 | <|> grp "[" (RawList <$> listTm <*> 35 | (Just <$ eat "," <* gap <*> sub bigTm <* gap 36 | <|> pure Nothing)) "]" 37 | <|> RawEn <$> (RawVar <$> var) 38 | <|> grp "(" (gap *> bigTm <* gap) ")" 39 | 40 | lamTm :: ParseTokens RawTm 41 | lamTm = RawLam <$ eat "\\" <* gap <*> var <* gap <* eat "." 42 | <* gap <*> sub midTm 43 | 44 | listTm :: ParseTokens [Sub RawTm] 45 | listTm = gap *> (ne <|> pure []) where 46 | ne = (: []) <$> sub lamTm <* gap 47 | <|> (:) <$> sub smallTm <*> listTm 48 | 49 | midTm :: ParseTokens RawTm -- small or middle-sized 50 | midTm = smallTm <|> lamTm <|> RawEn <$> midEn 51 | 52 | bigTm :: ParseTokens RawTm -- any term 53 | bigTm 54 | = midTm 55 | <|> RawEn <$> bigEn 56 | 57 | bigEn :: ParseTokens RawEn -- definitely big 58 | bigEn 59 | = RawTy <$> sub midTm <* gap <* eat ":" <* gap <*> sub bigTm 60 | 61 | midEn :: ParseTokens RawEn -- definitely middle-sized 62 | midEn = grow 1 smallEn $ \ f -> RawApp f <$ gap <*> sub smallTm 63 | 64 | smallEn :: ParseTokens RawEn -- definitely small 65 | smallEn 66 | = RawVar <$> var 67 | <|> grp "(" (gap *> bigEn <* gap) ")" 68 | 69 | 70 | data RawTree = ([Token], RawTm) :& [RawTree] deriving Show 71 | 72 | rawTreeFormat :: Format RawTree 73 | rawTreeFormat = Format (:&) (gap *> bigTm <* gap) rawTreeFormat 74 | 75 | rawTest :: String -> [[RawTree]] 76 | rawTest = document rawTreeFormat . layout 77 | -------------------------------------------------------------------------------- /PHOAS.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE RankNTypes, DataKinds, KindSignatures, GADTs, 2 | MultiParamTypeClasses, FunctionalDependencies, 3 | TypeFamilies, PolyKinds, UndecidableInstances, 4 | FlexibleInstances, FlexibleContexts, ScopedTypeVariables, StandaloneDeriving, 5 | PatternSynonyms, TypeOperators, ConstraintKinds, TupleSections #-} 6 | module PHOAS( 7 | var, 8 | var', 9 | lam, 10 | pi, 11 | sg, 12 | letin 13 | ) where 14 | 15 | import Prelude hiding (pi) 16 | import Data.Proxy 17 | import Utils 18 | import Syntax 19 | 20 | newtype Included (m :: Nat) (n :: Nat) = Included { rename :: Fin m -> Fin n } 21 | 22 | class CIncluded (m :: Nat) (n :: Nat) (b :: Bool) where 23 | included :: Proxy b -> Included m n 24 | 25 | instance CIncluded m m b where 26 | included _ = Included id 27 | 28 | instance CIncluded m n (NatLT m n) => CIncluded m (Suc n) True where 29 | included _ = Included $ FSuc . rename (included (Proxy :: Proxy (NatLT m n))) 30 | 31 | newtype FreshVar m w = FreshVar { var :: forall n. CIncluded (Suc m) n (NatLT (Suc m) n) => En (Syn n) w } 32 | 33 | withFreshVar :: forall m w. (FreshVar m w -> Tm (Syn (Suc m)) w) -> Tm (Syn (Suc m)) w 34 | withFreshVar f = f (FreshVar freshVar) where 35 | 36 | freshVar :: forall n. CIncluded (Suc m) n (NatLT (Suc m) n) => En (Syn n) w 37 | freshVar = V $ rename (included (Proxy :: Proxy (NatLT (Suc m) n))) (FZero :: Fin (Suc m)) 38 | 39 | var' :: forall m n w. CIncluded (Suc m) n (NatLT (Suc m) n) => FreshVar m w -> Tm (Syn n) w 40 | var' = En . var 41 | 42 | lam :: forall m w. (FreshVar m w -> Tm (Syn (Suc m)) w) -> Tm (Syn m) w 43 | lam f = Lam $ withFreshVar f 44 | 45 | pi :: forall m w. Tm (Syn m) w -> (FreshVar m w -> Tm (Syn (Suc m)) w) -> Tm (Syn m) w 46 | pi s t = Pi s $ withFreshVar t 47 | 48 | sg :: forall m w. Tm (Syn m) w -> (FreshVar m w -> Tm (Syn (Suc m)) w) -> Tm (Syn m) w 49 | sg s t = Sg s $ withFreshVar t 50 | 51 | letin :: forall m w. En (Syn m) w -> (FreshVar m w -> Tm (Syn (Suc m)) w) -> Tm (Syn m) w 52 | letin e t = Let e (withFreshVar t) 53 | 54 | -------------------------------------------------------------------------------- /PathEnds.txt: -------------------------------------------------------------------------------- 1 | syntax term Q, R, S, T, q, r, s, t -: 2 | [e] 3 | (x : S) -> T 4 | \ x -> t 5 | (x : S) * T 6 | s , t 7 | S = T 8 | a {Q} b 9 | t 10 | [S - T | f - g | q] 11 | 12 | syntax elimination E, F, e, f -: 13 | x 14 | f s 15 | E p 16 | f fst 17 | f snd 18 | s:S 19 | [|S - T | f - g | q|] p p' 20 | 21 | syntax point p -: 22 | i 23 | 0 24 | 1 25 | i[p0-p1] 26 | 27 | syntax end a, b -: 28 | @ 29 | t 30 | 31 | judgment TYPE T 32 | judgment E TYPE 33 | judgment T :> t 34 | judgment e <: S 35 | judgment T END a 36 | judgment POINT p 37 | judgment S <= T 38 | judgment S == T 39 | judgment T :> s == t 40 | judgment e == f <: S 41 | judgment T :> t REFINES a 42 | 43 | T :> [e] -: 44 | e <: S 45 | S <= T 46 | 47 | s:S <: S -: 48 | TYPE S 49 | S :> s 50 | 51 | x <: S -: 52 | x : S in context 53 | 54 | TYPE (x : S) -> T -: 55 | TYPE S 56 | x : S |- TYPE T 57 | 58 | (x : S) -> T :> \ x -> t -: 59 | x : S |- T :> t 60 | 61 | f s <: T[s:S/x] -: 62 | f <: (x : S) -> T 63 | S :> s 64 | 65 | TYPE (x : S) * T -: 66 | TYPE S 67 | x : S |- TYPE T 68 | 69 | (x : S) * T :> s , t -: 70 | S :> s 71 | T[s:S/x] :> t 72 | 73 | e fst <: S -: 74 | e <: (x : S) * T 75 | 76 | e snd <: T[e fst:S/x] -: 77 | e <: (x : S) * T 78 | 79 | TYPE S = T -: 80 | TYPE S 81 | TYPE T 82 | 83 | R = T :> S -: 84 | i |- TYPE T 85 | S[0/i] == R 86 | S[1/i] == T 87 | 88 | TYPE [E] -: 89 | E TYPE 90 | 91 | E p TYPE -: 92 | E <: S = T 93 | POINT p 94 | 95 | S =@ <= @=@ -: 96 | @= T <= @=@ -: 97 | S = T <= S =@ -: 98 | S = T <= @= T -: 99 | 100 | POINT 0 -: 101 | POINT 1 -: 102 | POINT i -: 103 | i in context 104 | POINT i[p0-p1] -: 105 | i in context 106 | POINT p0 107 | POINT p1 108 | 109 | TYPE a {Q} b -: 110 | [Q 0] END a 111 | [Q 1] END b 112 | 113 | T END @ -: 114 | T END t -: 115 | T :> t 116 | 117 | a {Q} b :> t -: 118 | i |- [Q i] :> t 119 | [Q 0] :> t[0/i] REFINES a 120 | [Q 1] :> t[1/i] REFINES b 121 | 122 | T :> t REFINES @ -: 123 | 124 | T :> t REFINES s -: 125 | T :> s == t 126 | 127 | q p <: Q p -: 128 | q <: a {Q} b 129 | POINT p 130 | 131 | defining p[p0-p1] -: 132 | 0[p0-p1] == p0 133 | 1[p0-p1] == p1 134 | i[p0-p1] == i[p0-p1] 135 | i[p0'-p1'][p0-p1] == i[p0'[p0-p1]-p1'[p0-p1]] 136 | i[p-p] == p 137 | i[0-1] == i 138 | 139 | Q . Q' <: R = T -: 140 | Q <: R = S 141 | Q' <: S' = T 142 | S == S' 143 | 144 | q . q' <: a {Q . Q'} d -: 145 | q <: a {Q} b 146 | q' <: c {Q'} d 147 | [Q 1] == [Q' 0] 148 | [Q 1] :> [q 1] == [q 0] 149 | 150 | [S - T | f - g | q] <: S = T -: 151 | TYPE S 152 | TYPE T 153 | S -> T :> f 154 | T -> S :> g 155 | (s : S)(t : T) -> (f s {<> T} t) = (s {<> S} g t) -- really? 156 | 157 | defining [S - T | f - g | q] p -: 158 | [S - T | f - g | q] 0 == S 159 | [S - T | f - g | q] 1 == T 160 | [S - T | f - g | q] p == S -: 161 | S == T 162 | f == g 163 | q == \ s t -> <>(s {<> S} t) 164 | 165 | [|S - T | f - g | q|] p p' <: [S - T | f - g | q] p -> [S - T | f - g | q] p' -: 166 | TYPE S 167 | TYPE T 168 | S -> T :> f 169 | T -> S :> g 170 | (s : S)(t : T) -> (f s {<> T} t) = (s {<> S} g t) -- really? 171 | POINT p 172 | POINT p' 173 | 174 | s -@ Q <: s {Q}@ -: 175 | Q <: S = T 176 | S :> s 177 | 178 | Q @- t <: @{Q} t -: 179 | Q <: S = T 180 | T :> t 181 | 182 | defining ~Q -: 183 | ~Q = Q i[1-0] 184 | 185 | defining Q @- t -: 186 | Q @- t = ~(t -@ ~Q) 187 | 188 | defining S(p-p') -: 189 | S(p-p') = S(j[p-p']) 190 | 191 | defining t-@ Q -: 192 | f -@ ( (x : S(i)) -> T(i,x)) i == 193 | \ x -> 194 | let eS = S(0-i) @- x 195 | eT = f (eS 0) -@ ( T(j[0-i],eS j)) 196 | in eT 1 197 | e -@ ( (x : S(i)) * T(i,x)) i == 198 | let eS = e fst -@ S(0-i) 199 | eT = e snd -@ ( T(j[0-i],eS j)) 200 | in eS 1 , eT 1 201 | e -@ (Q . Q') i == let e' = e -@ Q in e' . (e' 1 -@ Q') i 202 | E -@ ( A(i) = B(i)) i == 203 | A(i-0) . E . B(0-i) 204 | e -@ ( a(i) {Q(i)} b(i)) i == 205 | a(i-0) . e . b(0-i) 206 | e -@ ( [S - T | f - g | q] p(i)) i == 207 | f e if p(0) = 0 and p(i) = 1 208 | g e if p(0) = 1 and p(i) = 0 209 | e if p(0) = p(i) 210 | e if S == T, f == q, q == \ s t -> <>(s {<> S} t) 211 | 212 | 213 | 214 | -------------------------------------------------------------------------------- /ProofState.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE KindSignatures, PolyKinds, GADTs, DataKinds, TypeOperators, 2 | ScopedTypeVariables #-} 3 | 4 | module ProofState where 5 | 6 | import Debug.Trace 7 | 8 | import Prelude hiding ((/)) 9 | import Utils 10 | import Layout 11 | import Raw 12 | import Syntax 13 | import TypeCheck 14 | 15 | type Supply = (Bwd Int, Int) 16 | 17 | supply0 :: Supply 18 | supply0 = (B0, 0) 19 | supplySuc :: Supply -> Supply 20 | supplySuc (is, i) = (is,i+1) 21 | supplySic :: Supply -> Supply 22 | supplySic (is, i) = (is :< i,0) 23 | supply :: Supply -> LongName 24 | supply (is, i) = is <>> [i] 25 | 26 | 27 | data ProofState (b :: Bool)(u :: World) where 28 | (:!-:) :: Worldly w => (PZ b u w, Supply) -> PTip w -> ProofState b u 29 | 30 | ugly :: Int -> ProofState b u -> String 31 | ugly i ((ps,_) :!-: tip) = uglies ps ++ "\n" ++ replicate i ' ' ++ uglytip tip 32 | where 33 | uglies :: PZ b u w -> String 34 | uglies L0 = "" 35 | uglies (ps :<: step) = uglies ps ++ "\n" ++ replicate i ' ' ++ uglyStep step 36 | 37 | uglyStep :: PZStep b v w -> String 38 | uglyStep (Param n e ty) = concat ["(",n," : ",show ty,")"] 39 | uglyStep (Module n mg ps) = concat ["{",n,"\n",ugly (i+2) ps,"}"] 40 | uglyStep (Middle n ln ps) = concat ["after ",n,"\n",ugly i ps] 41 | 42 | uglytip :: PTip w -> String 43 | uglytip P0 = "" 44 | uglytip (PDef mt ty) = maybe "?" show mt ++ " : " ++ show ty 45 | uglytip (PRaw rm) = "Raw..." 46 | 47 | type PROOFSTATE = ProofState True W0 48 | 49 | ambulando :: PROOFSTATE -> PROOFSTATE 50 | ambulando ((ps,sup) :!-: PRaw (_ := RawTip (_ := RawBlank))) = 51 | ambulando ((ps,sup) :!-: P0) 52 | ambulando 53 | ((ps,sup) :!-: (PRaw (_ := RawTip (_ := RawDefn t (_:=ty))) :: PTip w)) = 54 | case help of 55 | No -> ambulando ((ps,sup) :!-: P0) 56 | Yes (TC t :&: ty) -> ambulando ((ps,sup) :!-: PDef t ty) 57 | where 58 | help :: TC (TC TERM :* TERM) w 59 | help = bake ps VNil ty >>>= \ ty -> (Kind >:>= ty) >>>= \ vty -> case t of 60 | Left _ -> Yes (No :&: ty) 61 | Right (_ := t) -> Yes ((bake ps VNil t >>>= \ t -> vty >:>= t >>>= \ _ -> 62 | Yes t) :&: ty) 63 | 64 | ambulando ((ps,sup) :!-: PRaw (_ := RawParam (x,_ := rs) m)) = 65 | case bake ps VNil rs >>>= 66 | \ bs -> (Kind >:>= bs) >>>= \ vs -> Yes (bs :&: vs) of 67 | Yes (bs :&: vs) -> 68 | ambulando ((ps :<: Param x (extend (Decl,vs)) bs,sup) :!-: PRaw m) 69 | No -> ambulando ((ps,sup) :!-: P0) 70 | ambulando ((ps,sup) :!-: PRaw (_ := RawSubMod (x,m) m')) = 71 | ambulando ((ps :<: Middle x (supply sup) ((L0,supplySuc sup) :!-: PRaw m') 72 | , supplySic sup) :!-: PRaw m) 73 | ambulando ((ps,sup) :!-: PRaw (_ := RawModComm mrs m)) = 74 | ambulando ((ps,sup) :!-: PRaw m) -- bad: binning the comment 75 | ambulando prfst@((ps,supi) :!-: tipi) = case help ps of 76 | Wit (L0 :&: _) -> prfst 77 | Wit ((pso :<: Middle x ln ((psu,supu) :!-: tipu)) :&: Flip psi) -> 78 | case lifter ps of 79 | Lifting del rho -> 80 | ambulando ( 81 | ((pso :<: Module x (globber ln prfst) ((psi,supi) :!-: tipi)) 82 | >>> lmap annoying psu,supu) 83 | :!-: tipu) 84 | -- should cache a thing rather just sticking in a Nothing 85 | where 86 | globber :: LongName -> PROOFSTATE -> Maybe (Ex Global) 87 | globber ln ((ps,_) :!-: PDef mt ty) = case lifter ps of 88 | Lifting del rho -> 89 | Just (Wit (Glob ln (del :=> varOp rho ty) (fmap (varOp rho) mt))) 90 | globber ln _ = Nothing 91 | 92 | annoying :: PZStep False u w -> PZStep True u w 93 | annoying (Param n e ty) = Param n e ty 94 | annoying (Module n g p) = Module n g p 95 | 96 | help :: PZ True u w -> RC (PZ True) (PZ False) u w 97 | help L0 = Wit (L0 :&: Flip L0) 98 | help ps@(_ :<: Middle _ _ _) = Wit (ps :&: Flip L0) 99 | help (ps :<: Param n e ty) = case help ps of 100 | Wit (ps :&: Flip ps') -> Wit (ps :&: Flip (ps' :<: Param n e ty)) 101 | help (ps :<: Module n mg x) = case help ps of 102 | Wit (ps :&: Flip ps') -> Wit (ps :&: Flip (ps' :<: Module n mg x)) 103 | 104 | -- b : Bool signifies if Middle is allowed 105 | type PZ b = LStar (PZStep b) 106 | 107 | data PZStep (b :: Bool)(v :: World) (w :: World) where 108 | Param :: Naming -> Extended v w -> TERM v -> PZStep b v w 109 | Module :: Naming -> Maybe (Ex Global) -> ProofState False w -> 110 | PZStep b w w 111 | -- middle is a back pointer 112 | Middle :: Naming -> LongName -> ProofState False w -> PZStep True w w 113 | 114 | data PTip (w :: World) where 115 | P0 :: PTip w 116 | PDef :: Maybe (TERM w) -> TERM w -> PTip w 117 | PRaw :: Sub RawModule -> PTip w 118 | 119 | data Resolution (w :: World) where 120 | RParam :: Ref w -> Resolution w 121 | RGlob :: Global n -> Bwd (TERM w) -> Resolution w 122 | 123 | instance Weakenable Resolution 124 | 125 | stripParams :: PZ True v w -> Bwd (TERM w) 126 | stripParams = stripParams' id where 127 | stripParams' :: (TERM u -> TERM w) -> PZ True v u -> Bwd (TERM w) 128 | stripParams' w L0 = B0 129 | stripParams' w (pz :<: Param y e _) = 130 | stripParams' (w . extwk e) pz :< w (En (P (extrRef e))) 131 | stripParams' w (pz :<: Module _ _ _) = stripParams' w pz 132 | stripParams' w (pz :<: Middle _ _ _) = stripParams' w pz 133 | 134 | data Lifting (w :: World) where 135 | Lifting :: LStar KStep Zero n -> VarOp Zero n w W0 -> Lifting w 136 | 137 | lifter :: PZ True W0 w -> Lifting w 138 | lifter L0 = Lifting L0 IdVO 139 | lifter (ps :<: Param x e ty) = case lifter ps of 140 | Lifting del rho -> Lifting (del :<: KS (varOp rho ty)) (Abst rho e) 141 | lifter (ps :<: Module _ _ _) = lifter ps 142 | lifter (ps :<: Middle _ _ _) = lifter ps 143 | 144 | eqNameStep :: Naming -> NameStep -> Either NameStep () 145 | eqNameStep y xi@(x,i) = case (x == y, i) of 146 | (True, 0) -> Right () 147 | (True, i) -> Left (x,i-1) 148 | (False, _) -> Left xi 149 | 150 | bake :: Worldly w => PZ True v w -> Vec Naming n -> RawTm -> TC (Tm (Syn n)) w 151 | bake ps ns (RawAtom x) = Yes (Atom x) 152 | bake ps ns (RawList [] Nothing) = Yes (Atom "") 153 | bake ps ns (RawList [] (Just (_ := t))) = bake ps ns t 154 | bake ps ns (RawList ((_ := t) : ts) mt) = 155 | bake ps ns t >>>= \ t -> 156 | bake ps ns (RawList ts mt) >>>= \ ts -> 157 | Yes (t :& ts) 158 | bake ps ns (RawLam x (_ := t)) = 159 | bake ps (VCons x ns) t >>>= \ t -> Yes (Lam t) 160 | bake ps ns (RawEn (_ := hd) tl) = 161 | map (bake ps ns . subproj) tl >>>== boil ps ns hd 162 | bake ps ns (RawComm (_ := t) _) = 163 | bake ps ns t -- should deal with the comments... 164 | 165 | boil :: Worldly w 166 | => PZ True v w 167 | -> Vec Naming n 168 | -> RawHd 169 | -> [Tm (Syn n) w] 170 | -> TC (Tm (Syn n)) w 171 | boil ps ns (RawTy (_ := t) (_ := ty)) ts = 172 | bake ps ns t >>>= \ t -> bake ps ns ty >>>= \ ty -> 173 | Yes (En ((t ::: ty) / ts)) 174 | boil ps ns (RawVar (x,xs)) ts = case blah x ns of 175 | Left x -> resolve (x,xs) ps >>>= \ res -> case res of 176 | RParam x -> Yes (En (P x / ts)) 177 | RGlob f tz -> case globArity f of 178 | (sz :=> _) -> case help sz (fmap vclosed tz <>> ts) of 179 | Nothing -> No 180 | Just (g,ts) -> Yes (En ((f :% g) / ts)) 181 | where 182 | help :: LStar KStep Zero m 183 | -> [Tm (Syn n) w] 184 | -> Maybe (Env (Tm (Syn n)) m w, [Tm (Syn n) w]) 185 | help L0 ts = return (E0, ts) 186 | help (sz :<: KS _) ts = do 187 | (g,t:ts) <- help sz ts 188 | return (ES g t,ts) 189 | Right i -> if null xs then Yes (En (V i / ts)) else No 190 | 191 | blah :: NameStep -> Vec Naming n -> Either NameStep (Fin n) 192 | blah x VNil = Left x 193 | blah x (VCons y ys) = case eqNameStep y x of 194 | Left x -> fmap FSuc $ blah x ys 195 | Right () -> Right FZero 196 | 197 | resolve :: RawLongName -> PZ True v w -> TC Resolution w 198 | resolve (xi,nsteps) = lookOut xi 199 | where 200 | lookOut :: NameStep -> PZ True v w -> TC Resolution w 201 | lookOut xi L0 = No 202 | lookOut xi (pz :<: Param y e _) = 203 | case eqNameStep y xi of 204 | -- found it! 205 | Right _ | null nsteps -> Yes $ RParam (extrRef e) 206 | -- looking for module components inside a param 207 | | otherwise -> No 208 | -- carry on looking 209 | Left xi -> extwk e $ lookOut xi pz 210 | lookOut xi (pz :<: Module y mglob ((pz',_) :!-: _)) = 211 | case eqNameStep y xi of 212 | -- found the 'corner'; look inside 213 | Right _ -> case lookInside nsteps mglob pz' of 214 | (Just (Wit glob)) -> Yes (RGlob glob (stripParams pz)) 215 | _ -> No 216 | -- carry on looking 217 | Left xi -> lookOut xi pz 218 | lookOut i (pz :<: Middle _ _ _) = lookOut i pz 219 | 220 | lookInside :: [NameStep] -> Maybe (Ex Global) -> PZ False v w -> 221 | Maybe (Ex Global) 222 | lookInside [] mglob pz = mglob -- found it! 223 | lookInside (xi : xs) _ pz = lookInside' xi pz 224 | where 225 | lookInside' :: NameStep -> PZ False v w -> Maybe (Ex Global) 226 | lookInside' _ L0 = Nothing 227 | lookInside' xi (pz :<: Param _ _ _) = lookInside' xi pz 228 | -- parameters are not in scope 229 | lookInside' xi (pz :<: Module y mglob ((pz',_) :!-: _)) = 230 | case eqNameStep y xi of 231 | -- found the next step 232 | Right _ -> lookInside xs mglob pz' 233 | -- carry on looking 234 | Left xi -> lookInside' xi pz 235 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ZEUG 2 | being the beginnings of at least zero typecheckers 3 | 4 | ## Introduction 5 | At Strathclyde, we're always experimenting with the design of type theories. These experiments would be much more fun if they were cheap to implement, so this project is intended to produce a forkable pile of code that can readily be adapted to variations. How much we can do "in software" by reconfiguration and how much by tinkering with the codebase remains to be seen. In trying to support diversity, we've made some mildly deviant design choices. 6 | 7 | ### Bidirectionality 8 | We make a syntactic separation between "terms" (S,T,s,t) and "eliminations" (E,F,e,f). The latter embed in the former, silently in the concrete syntax, but we underline e for the term made by embedding e when we're working metatheoretically. Apart from embedded eliminations, terms are canonical forms. Eliminations are things which would deliver terms if only they could compute further: they always take the form of a "head" with a spine of terms waiting to apply, and the head is either a variable (in which case the elimination is stuck) or a "cut", t:T (in which case computation may continue). The _term_ t:T computes to t, meaning that the cut has achieved its computational potential and been eliminated. A normal form is cut-free. 9 | We expect to be able to synthesize the type of an elimination: its head is either typed in the context or typed explicitly, and every type we can arrive at by successive eliminations is in some way accounted for by that head type. However, we expect only to check the types of terms (with respect to types, which are themselves terms). Moreover, we do not expect that a given term will have at most one type. Our syntactic choices make it easy to be sure that there is always a type around when a term might need to be checked. This reduces the extent to which terms must contain type information (e.g., type annotations on lambdas). 10 | 11 | ### Homoiconicity (nearly) 12 | We don't need our syntax to be pretty. It's for kernel theories into which prettier languages might one day elaborate. We do, however, aim to fix a single syntax which is good for multiple theories. Bidirectionality frees us to adopt a sixty-year-old solution to this problem (see XKCD 297). We're pretty much just doing LISP. OK, not quite. Terms are built from atoms and pairing...and lambda and embedded eliminations: concrete things are distinguished from computations which yield them; variables are not atoms (they're de Bruijn indices); there is no code introspection as-of-right. However, we might certainly become interested in type theories where syntaxes replace datatypes as the underlying concrete stuff, at which point we should want cheap reification of computations as constructions (how do you look under a binder? substitute a _constructor_ for the bound variable). For the moment, though, the point is that we have a cheap flexible syntax for both types and terms. We will have rules which say which lumps of syntax happen to be types and which lumps of syntax those types accept. We make new type theories by choosing new judgment forms and new rules, not new syntax. 13 | 14 | ### The Collaborative Commenter Model 15 | The basic interaction mode is batch mode. You send in a file, you get back a new version of your file. The machine is like a critical collaborator in a shared project. Diagnostic responses and updates arising from requests are communicated in the output, not via error messages. The process is idempotent: if you feed the machine's output back to itself, it hasn't anything to add or take away. It's not hard to turn this into an interactive process if your editor is half way customizable. The fun part will be making it incremental, so that the machine responds quickly to small patches. This model requires that we preserve the layout of the input as faithfully as possible in the output, so we need front end kit which finds structure on the input without throwing the input away. 16 | -------------------------------------------------------------------------------- /Raw.hs: -------------------------------------------------------------------------------- 1 | module Raw where 2 | 3 | import Control.Applicative 4 | import Data.List.Split 5 | 6 | import Layout 7 | 8 | type Naming = String 9 | 10 | type NameStep = (Naming, Int) 11 | type RawLongName = (NameStep, [NameStep]) 12 | 13 | getRawLongName :: String -> Maybe RawLongName 14 | getRawLongName s = 15 | do 16 | x:xs <- traverse splitHat (splitOn "." s) 17 | return (x,xs) 18 | where 19 | splitHat :: String -> Maybe NameStep 20 | splitHat s = case splitOn "^" s of 21 | [s] -> Just (s,0) 22 | [s,i] -> case reads i of 23 | [(i,"")] -> Just (s,i) 24 | _ -> Nothing 25 | _ -> Nothing 26 | 27 | data RawModule 28 | = RawTip (Sub RawTip) 29 | | RawParam (String, Sub RawTm) (Sub RawModule) 30 | | RawSubMod (String, Sub RawModule) (Sub RawModule) 31 | | RawModComm [Sub (Maybe RawSplice)] (Sub RawModule) 32 | deriving Show 33 | 34 | data RawTip 35 | = RawBlank 36 | | RawDefn (Either (Sub RawHole) (Sub RawTm)) (Sub RawTm) 37 | deriving Show 38 | 39 | data RawHole 40 | = RawQuestionMark 41 | deriving Show 42 | 43 | data RawTm 44 | = RawAtom String 45 | | RawList [Sub RawTm] (Maybe (Sub RawTm)) 46 | | RawLam String (Sub RawTm) 47 | | RawEn (Sub RawHd) [Sub RawTm] 48 | | RawComm (Sub RawTm) [Sub (Maybe RawSplice)] 49 | deriving Show 50 | 51 | type RawSplice = RawTm 52 | 53 | data RawHd 54 | = RawVar RawLongName 55 | | RawTy (Sub RawTm) (Sub RawTm) 56 | deriving Show 57 | 58 | tag :: ParseTokens String 59 | tag = sym >>= \ x -> case x of 60 | '\'' : s -> return s 61 | _ -> empty 62 | 63 | var :: ParseTokens String 64 | var = sym >>= \ x -> case x of 65 | c : s | elem c "'\\-" -> empty 66 | _ | elem ':' x -> empty 67 | _ -> return x 68 | 69 | bigMod :: ParseTokens RawModule 70 | bigMod = id <$ gap <*> (RawTip <$> sub smallTip 71 | <|> RawParam <$> (grp "(" ((,) <$ gap <*> var <* gap <* 72 | eat ":" <* gap <*> sub bigTm <* gap ) ")") 73 | <*> sub bigMod 74 | <|> RawSubMod <$> (grp "{" ((,) <$ gap <*> var <* gap 75 | <*> sub bigMod <* gap ) "}") 76 | <*> sub bigMod 77 | <|> RawModComm <$> (grp "{" (id <$ eat "-" <*> nonsense <* eat "-") "}") 78 | <*> sub bigMod) 79 | 80 | holeOrDef :: ParseTokens (Either (Sub RawHole) (Sub RawTm)) 81 | holeOrDef = Left <$> sub hole <|> Right <$> sub bigTm 82 | 83 | hole :: ParseTokens RawHole 84 | hole = RawQuestionMark <$ eat "?" 85 | 86 | smallTip :: ParseTokens RawTip 87 | smallTip = id <$ gap <*> (pure RawBlank 88 | <|> RawDefn <$ eat "=" <* gap <*> holeOrDef <* gap <* 89 | eat ":" <* gap <*> sub bigTm <* gap) 90 | 91 | smallTm :: ParseTokens RawTm -- definitely small 92 | smallTm 93 | = RawAtom <$> tag 94 | <|> grp "[" (RawList <$> listTm <*> 95 | (Just <$ eat "|" <* gap <*> sub bigTm <* gap 96 | <|> pure Nothing)) "]" 97 | <|> RawEn <$> sub smallHd <*> pure [] 98 | <|> grp "(" (gap *> bigTm <* gap) ")" 99 | 100 | lamTm :: ParseTokens RawTm 101 | lamTm = RawLam <$ eat "\\" <* gap <*> var <* gap <* eat "." 102 | <* gap <*> sub bigTm 103 | 104 | listTm :: ParseTokens [Sub RawTm] 105 | listTm = gap *> (ne <|> pure []) where 106 | ne = (: []) <$> sub lamTm <* gap 107 | <|> (:) <$> sub smallTm <*> listTm 108 | 109 | midTm :: ParseTokens RawTm -- small or middle-sized 110 | midTm = smallTm <|> lamTm 111 | 112 | individual :: ParseTokens () 113 | individual = tok >>= \ t -> case t of 114 | Grp _ _ _ -> empty 115 | _ -> return () 116 | 117 | splice :: ParseTokens RawSplice 118 | splice = bigTm 119 | 120 | nonsense :: ParseTokens [Sub (Maybe RawSplice)] 121 | nonsense = postnonsense <$> prenonsense 122 | where 123 | prenonsense :: ParseTokens [[Sub (Maybe RawSplice)]] 124 | prenonsense = 125 | many ((:[]) <$> sub (Just <$> grp "`" splice "`" <|> Nothing <$ individual) 126 | <|> regrp "(" ")" <|> regrp "[" "]" <|> regrp "{" "}") 127 | 128 | -- Glom together consecutive nothings and flatten 129 | postnonsense :: [[Sub (Maybe RawSplice)]] -> [Sub (Maybe RawSplice)] 130 | postnonsense = foldr 131 | (\x xs -> case (x,xs) of 132 | (ts := Nothing,ts' := Nothing : xs) -> (ts ++ ts') := Nothing : xs 133 | _ -> x : xs) 134 | [] . concat 135 | 136 | regrp :: String -> String -> ParseTokens [Sub (Maybe RawSplice)] 137 | regrp op cl = help <$> grp op nonsense cl 138 | where 139 | help :: [Sub (Maybe RawSplice)] -> [Sub (Maybe RawSplice)] 140 | help xs = ([ Sym op ] := Nothing : xs) ++ [[ Sym cl ] := Nothing] 141 | 142 | bigTm :: ParseTokens RawTm -- any term 143 | bigTm = stuff <|> RawComm <$> sub stuff <* gap <* eat "--" <*> nonsense 144 | where 145 | stuff = midTm 146 | <|> RawEn <$> sub smallHd <* gap 147 | <*> ((++) <$> many (sub smallTm <* gap) 148 | <*> ((:[]) <$> sub midTm)) 149 | 150 | smallHd :: ParseTokens RawHd -- definitely small 151 | smallHd 152 | = RawVar <$> refine getRawLongName var 153 | <|> grp "(" (RawTy <$ gap <*> sub bigTm <* gap <* 154 | eat ":" 155 | <* gap <*> sub bigTm <* gap) ")" 156 | 157 | data RawTree = ([Token], RawTm) :&&: [RawTree] deriving Show 158 | 159 | rawTreeFormat :: Format RawTree 160 | rawTreeFormat = Format (:&&:) (gap *> bigTm <* gap) rawTreeFormat 161 | 162 | rawTest :: String -> [[RawTree]] 163 | rawTest = document rawTreeFormat . layout 164 | -------------------------------------------------------------------------------- /Test.hs: -------------------------------------------------------------------------------- 1 | --{-# OPTIONS -Wall -fwarn-incomplete-patterns #-} 2 | {-# LANGUAGE KindSignatures, DataKinds, ScopedTypeVariables, PolyKinds, 3 | UndecidableInstances, MultiParamTypeClasses, 4 | FunctionalDependencies, PatternSynonyms, 5 | FlexibleInstances, GADTs, DeriveFunctor, RankNTypes, EmptyCase, 6 | TypeFamilies, StandaloneDeriving #-} 7 | module Test where 8 | 9 | import Prelude hiding (pi) 10 | import Utils 11 | import Syntax 12 | import PHOAS 13 | import TypeCheck 14 | 15 | import Layout 16 | import Raw hiding (var) 17 | import ProofState 18 | 19 | 20 | data Test where 21 | PARSE :: String -> Test 22 | ISKIND :: TERM W0 -> Test 23 | CHECK :: TERM W0 -> TERM W0 -> Test 24 | NORM :: ELIM W0 -> TERM W0 -> Test 25 | FAILS :: Test -> Test 26 | 27 | deriving instance Show Test 28 | 29 | pattern INFER e t = CHECK t (En e) 30 | 31 | runTest :: Test -> TC Happy W0 32 | runTest (PARSE s) = 33 | if length (parses bigMod (headline (layout s))) == 1 then Yes Happy else No 34 | runTest (ISKIND ty) = Kind >:> ty 35 | runTest (CHECK k t) = (Kind >:>= k) >>>= \ vty -> vty >:> t 36 | runTest (NORM e nf) = 37 | infer e >>>= \ (v :::: vty) -> 38 | if etaquote (v :::: vty) == nf then Yes Happy else No 39 | runTest (FAILS test) = case runTest test of 40 | No -> Yes Happy 41 | Yes _ -> No 42 | 43 | testReport :: (String,Test) -> IO () 44 | testReport (name,test) = case runTest test of 45 | Yes _ -> putStrLn (name ++ ": PASSED") 46 | No -> putStrLn (name ++ ": FAILED:") >> print test 47 | 48 | passtests = 49 | [("test-1",ISKIND (El (Pi (Set Ze) (Set Ze)))) 50 | ,("test0",CHECK (El (Pi (Set Ze) (Set Ze))) (lam var')) 51 | ,("test1",INFER ((lam var' ::: El (Pi (Set (Su Ze)) (Set (Su Ze)))) :/ (Set Ze)) (El (Set (Su Ze)))) 52 | ,("test1.5",ISKIND (El (pi (Set Ze) $ \ a -> Pi (var' a) (var' a)))) 53 | ,("test2",CHECK (El (pi (Set Ze) $ \ a -> Pi (var' a) (var' a))) (Lam $ lam $ \ x -> var' x)) 54 | ,("test3",INFER (Lam (Lam (En (V FZero))) ::: El (Pi (Set (Su Ze)) (Pi (En (V FZero)) (En (V (FSuc (FZero)))))) :/ (Set Ze)) (El (Pi (Set Ze) (Set Ze)))) 55 | ,("test4",INFER ((Lam $ lam var') ::: El (pi (Set (Su (Su Ze))) $ \ a -> Pi (var' a) (var' a)) :/ (Set (Su Ze)) :/ (Set Ze)) (El (Set (Su Ze)))) 56 | ,("test5",CHECK (El (Sg (Set (Su Ze)) (Set (Su Ze)))) ((Set Ze) :& (Set Ze))) 57 | ,("test6",INFER ((((Set Ze) :& (Set Ze)) ::: El (Sg (Set (Su Ze)) (Set (Su Ze)))) :/ Fst) (El (Set (Su Ze)))) 58 | ,("test7",INFER ((((Set Ze) :& (Set Ze)) ::: El (Sg (Set (Su Ze)) (Set (Su Ze)))) :/ Snd) (El (Set (Su Ze)))) 59 | ,("test8",CHECK (El (Sg (Set (Su (Su Ze))) (En (V FZero)))) ((Set (Su Ze)) :& (Set Ze))) 60 | ,("test9",CHECK (El (Pi (Sg (Set Ze) (Set Ze)) (Set Ze))) (Lam (En ((V FZero) :/ Fst)))) 61 | ,("test0",NORM ((lam var') ::: El (Pi (Sg (Set Ze) (Set Ze)) (Sg (Set Ze) (Set Ze)))) 62 | (lam $ \ p -> (En ((:/) (var p) (Atom "Fst")) :& En ((:/) (var p) (Atom "Snd"))))) 63 | ,("testLet",CHECK (El (Set (Su Ze))) (letin ((Set (Su Ze)) ::: El (Set (Su (Su Ze)))) $ \ x -> (En ((Set Ze) ::: El (var' x))))) 64 | ] 65 | 66 | failtests = map (fmap FAILS) 67 | [("test0",NORM ((Lam (En (V FZero)) ::: El (Set Ze)) :/ (Set Ze)) (El (Set Ze))) 68 | ,("test3",INFER (Lam (Lam (En (V FZero))) ::: El (Pi (Set Ze) (Pi (En (V FZero)) (En (V (FSuc (FZero)))))) :/ (Set Ze)) (El (Pi (Set Ze) (Set Ze)))) 69 | ,("test4",INFER (Lam (Lam (En (V FZero))) ::: El (Pi (Set Ze) (Pi (En (V FZero)) (En (V (FSuc (FZero)))))) :/ (Set Ze) :/ (Set Ze)) (El (Set Ze))) 70 | ,("testLet",CHECK (Set Ze) (En ((Lam (En ((Set Ze) ::: El (En (V FZero)))) ::: El (Pi (Set Ze) (Set Ze))) :/ (Set Ze)))) 71 | ] 72 | 73 | rawTests = 74 | [("rawtest0",PARSE "") 75 | ,("rawtest1",PARSE "(x : S)") 76 | ,("rawtest2",PARSE "(x : S){x}") 77 | ,("rawtest3",PARSE "(x : S){x = hello : world}")] 78 | 79 | -- proof state tests 80 | testRig :: String -> String 81 | testRig s = ugly 0 (ambulando ((L0,supply0) 82 | :!-: PRaw (snd (head (parses (sub bigMod) (headline (layout s))))))) 83 | 84 | ftestRig :: String -> IO () 85 | ftestRig s = do 86 | x <- readFile s 87 | putStrLn (testRig x) 88 | 89 | main = do 90 | mapM_ testReport (rawTests ++ passtests ++ failtests) 91 | -- can't do much else until we have a pretty printer 92 | ftestRig "tests/tests.zeug" 93 | 94 | blerk :: TC Val W0 95 | blerk = Kind >:>= El (Set Ze) 96 | -------------------------------------------------------------------------------- /TypeCheck.hs: -------------------------------------------------------------------------------- 1 | --{-# OPTIONS -Wall -fwarn-incomplete-patterns #-} 2 | {-# LANGUAGE KindSignatures, DataKinds, ScopedTypeVariables, PolyKinds, 3 | UndecidableInstances, MultiParamTypeClasses, 4 | FunctionalDependencies, TypeOperators, 5 | FlexibleInstances, GADTs, DeriveFunctor, RankNTypes, 6 | EmptyCase, TypeFamilies, PatternSynonyms, 7 | GeneralizedNewtypeDeriving #-} 8 | module TypeCheck where 9 | 10 | import Prelude hiding ((/)) 11 | import Utils 12 | import Syntax 13 | 14 | -- our monad is on world-indexed sets 15 | newtype TC t w = TC (Maybe (t w)) deriving Show 16 | 17 | pattern Yes t = TC (Just t) 18 | pattern No = TC Nothing 19 | 20 | instance Weakenable t => Weakenable (TC t) 21 | 22 | (>>>=) :: TC s w -> (s w -> TC t w) -> TC t w 23 | Yes s >>>= f = f s 24 | No >>>= _ = No 25 | 26 | (>>>==) :: [TC s w] -> ([s w] -> TC t w) -> TC t w 27 | [] >>>== k = k [] 28 | (x:xs) >>>== k = 29 | x >>>= \ x -> 30 | xs >>>== \ xs -> 31 | k (x:xs) 32 | 33 | tcBool :: Bool -> TC Happy w 34 | tcBool True = Yes Happy 35 | tcBool False = No 36 | 37 | instance Dischargeable f g => Dischargeable (TC f) (TC g) where 38 | discharge x No = No 39 | discharge x (Yes f) = Yes (discharge x f) 40 | 41 | 42 | -- actionOk 43 | (/:>) :: Worldly w => Kind w -> TERM w -> TC Happy w 44 | El (Pi _S _T) /:> s = El _S >:> s 45 | El (Sg _S _T) /:> Fst = Yes Happy 46 | El (Sg _S _T) /:> Snd = Yes Happy 47 | El (Path _S sig _T) /:> At p = Point sig >:> p 48 | _ /:> _ = No 49 | 50 | -- evaluate action safely 51 | (/:>=) :: Worldly w => Kind w -> TERM w -> TC Val w 52 | k /:>= t = k /:> t >>>= \ _ -> Yes (val t) 53 | 54 | -- check a term in a trusted kind 55 | (>:>) :: Worldly w => Kind w -> TERM w -> TC Happy w 56 | Kind >:> Type = Yes Happy 57 | Type >:> Set l = Level >:> l 58 | Type >:> Pi dom cod = 59 | (Type >:>= dom) >>>= \ dom -> 60 | (Decl,El dom) !- \ x -> Type >:> (cod // P x) 61 | Type >:> Sg dom cod = 62 | (Type >:>= dom) >>>= \ dom -> 63 | (Decl,El dom) !- \ x -> Type >:> (cod // P x) 64 | Kind >:> El t = Type >:> t 65 | El (Set l') >:> Set l = 66 | Level >:>= l >>>= \l -> 67 | l' `levelGT` l 68 | El (Set l) >:> Pi dom cod = 69 | (El (Set l) >:>= dom) >>>= \ dom -> 70 | (Decl,El dom) !- \ x -> wk (El (Set l)) >:> (cod // P x) 71 | El (Set l) >:> Sg dom cod = 72 | (El (Set l) >:>= dom) >>>= \ dom -> 73 | (Decl,El dom) !- \ x -> wk (El (Set l)) >:> (cod // P x) 74 | El (Pi dom cod) >:> Lam t = 75 | (Decl,El dom) !- \ x -> El (wk cod / x) >:> (t // P x) 76 | El (Sg dom cod) >:> (t :& u) = 77 | (El dom >:>= t) >>>= \ t -> 78 | El (cod / (t :::: El dom)) >:> u 79 | Kind >:> Level = Yes Happy 80 | Level >:> Ze = Yes Happy 81 | Level >:> Su n = Level >:> n 82 | 83 | Kind >:> Seg = Yes Happy 84 | Seg >:> Dash = Yes Happy 85 | Seg >:> Weld sig _T tau = 86 | Seg >:> sig >>>= \ _ -> 87 | Type >:> _T >>>= \ _ -> 88 | Seg >:> tau 89 | Kind >:> Point sig = Seg >:> sig 90 | Point sig >:> Point Ze = Yes Happy 91 | Point sig >:> Point One = Yes Happy 92 | Point (Weld sig _T tau) >:> Lft p = Point sig >:> p 93 | Point (Weld sig _T tau) >:> Rht p = Point tau >:> p 94 | Type >:> Path _S sig _T = 95 | Type >:> _S >>>= \ _ -> 96 | Seg >:> sig >>>= \ _ -> 97 | Type >:> _T 98 | El (Path _S sig _T) >:> Lam _M = 99 | (Decl,Point sig) !- \ i -> 100 | Type >:> (_M // P i) >>>= \ _ -> 101 | tcBool (kEq Type _S (Scope E0 _M / (Ze :::: Point sig))) >>>= \ _ -> 102 | tcBool (kEq Type _T (Scope E0 _M / (One :::: Point sig))) 103 | {-El (Path _S Dash _U) >:> Link _Q _M _Q' = 104 | Type >:>= _M >>>= \ _M -> 105 | El (Path _S Dash _M) >:> _Q >>>= \ _ -> 106 | El (Path _M Dash _U) >:> _Q' -} 107 | 108 | want >:> En e = 109 | infer e >>>= \ got -> 110 | kindOf got `subKind` want 111 | k >:> Let e t = 112 | infer e >>>= \ (v :::: j) -> 113 | (Local v,j) !- \ x -> wk k >:> (t // P x) 114 | _ >:> _ = No 115 | 116 | levelGT :: Worldly w => Val w -> Val w -> TC Happy w 117 | Ze `levelGT` _ = No 118 | Su l' `levelGT` Ze = Yes Happy 119 | Su l' `levelGT` Su l = l' `levelGT` l 120 | 121 | -- evaluate a term safely 122 | (>:>=) :: Worldly w => Kind w -> TERM w -> TC Val w 123 | k >:>= t = k >:> t >>>= \ _ -> Yes (val t) 124 | 125 | -- infer 126 | -- safely evaluate an elim and return a thing (evaluated elim + its kind) 127 | infer :: Worldly w => ELIM w -> TC THING w 128 | infer (P x) = Yes (refThing x) 129 | infer (e :/ s) = 130 | infer e >>>= \ e@(v :::: k) -> 131 | (k /:>= s) >>>= \ s -> 132 | Yes (e / s) 133 | infer (x :% g) = case globArity x of 134 | ks :=> cod -> 135 | goodInstance ks g >>>= \ vs -> 136 | Yes $ let k = eval (wk cod) vs in 137 | case globDefn x of 138 | Nothing -> En (x :% emap valOf vs) :::: k 139 | Just t -> eval (wk t) vs :::: k 140 | infer (t ::: k) = 141 | (Kind >:>= k) >>>= \ k -> 142 | (k >:>= t) >>>= \ t -> 143 | Yes (t :::: k) 144 | 145 | goodInstance :: Worldly w 146 | => LStar KStep Zero n 147 | -> Env TERM n w 148 | -> TC (Env THING n) w 149 | goodInstance L0 E0 = Yes E0 150 | goodInstance (ks :<: KS ty) (ES g t) = 151 | goodInstance ks g >>>= \ vs -> 152 | (eval (wk ty) vs >:>= t) >>>= \ v -> 153 | Yes (ES vs (v :::: eval (wk ty) vs)) 154 | 155 | subKind :: Worldly w => Val w -> Val w -> TC Happy w 156 | Type `subKind` Type = Yes Happy 157 | El (Pi dom0 cod0) `subKind` El (Pi dom1 cod1) = 158 | El dom1 `subKind` El dom0 >>>= \ _ -> 159 | (Decl,El dom1) !- \ x -> El (wk cod0 / x) `subKind` El (wk cod1 / x) 160 | El (Sg dom0 cod0) `subKind` El (Sg dom1 cod1) = 161 | El dom0 `subKind` El dom1 >>>= \ _ -> 162 | (Decl,El dom0) !- \ x -> El (wk cod0 / x) `subKind` El (wk cod1 / x) 163 | El (Set _) `subKind` Type = Yes Happy 164 | El (Set l) `subKind` El (Set l') = levelLEQ l l' 165 | Level `subKind` Level = Yes Happy 166 | 167 | El (Path _S _ _T) `subKind` El (Path _S' Dash _T') = 168 | tcBool (kEq Type _S _S') >>>= \ _ -> 169 | tcBool (kEq Type _T _T') 170 | El (Path _S (Weld sig _M tau) _T) `subKind` El (Path _S' (Weld sig' _M' tau') _T') = 171 | El (Path _S sig _M) `subKind` El (Path _S' sig' _M') >>>= \ _ -> 172 | El (Path _M tau _T) `subKind` El (Path _M' tau' _T') 173 | El this `subKind` El that = 174 | if kEq Type this that then Yes Happy else No 175 | En e0 `subKind` En e1 = 176 | if e0 == e1 then Yes Happy else No 177 | _ `subKind` _ = No 178 | 179 | levelLEQ :: Worldly w => Val w -> Val w -> TC Happy w 180 | Ze `levelLEQ` _ = Yes Happy 181 | Su l' `levelLEQ` Ze = No 182 | Su l' `levelLEQ` Su l = l' `levelLEQ` l 183 | -------------------------------------------------------------------------------- /Utils.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE KindSignatures, DataKinds, EmptyCase, GADTs, 2 | DeriveFunctor, StandaloneDeriving, PolyKinds, 3 | TypeOperators, ScopedTypeVariables, RankNTypes, 4 | TypeFamilies, UndecidableInstances #-} 5 | module Utils where 6 | 7 | type family EQ x y where 8 | EQ x x = True 9 | EQ x y = False 10 | 11 | type family OR x y where 12 | OR True y = True 13 | OR x True = True 14 | OR False y = y 15 | OR x False = x 16 | 17 | data Nat = Zero | Suc Nat deriving Show 18 | 19 | type One = Suc Zero 20 | 21 | type family NatLT (m :: Nat) (n :: Nat) where 22 | NatLT m (Suc n) = NatLE m n 23 | NatLT m n = False -- wildcards not supported in ghc<8 24 | 25 | type family NatLE (m :: Nat) (n :: Nat) where 26 | NatLE m n = OR (EQ m n) (NatLT m n) 27 | 28 | data Fin (n :: Nat) where 29 | FZero :: Fin (Suc n) 30 | FSuc :: Fin n -> Fin (Suc n) 31 | 32 | deriving instance Eq (Fin n) 33 | deriving instance Show (Fin n) 34 | 35 | absurd :: Fin Zero -> a 36 | absurd k = case k of {} 37 | 38 | data Vec x (n :: Nat) where 39 | VNil :: Vec x Zero 40 | VCons :: x -> Vec x n -> Vec x (Suc n) 41 | 42 | vlookup :: Fin n -> Vec x n -> x 43 | vlookup FZero (VCons x _ ) = x 44 | vlookup (FSuc i) (VCons _ xs) = vlookup i xs 45 | 46 | -- find the first x in the vector and return its index 47 | velemIndex :: Eq x => x -> Vec x n -> Maybe (Fin n) 48 | velemIndex x VNil = Nothing 49 | velemIndex x (VCons x' xs) = 50 | if x == x' then 51 | Just FZero 52 | else 53 | fmap FSuc (velemIndex x xs) 54 | 55 | -- find the nth x in the vector and return its index 56 | velemIndex' :: Eq x => x -> Nat -> Vec x n -> Maybe (Fin n) 57 | velemIndex' x n VNil = Nothing 58 | velemIndex' x n (VCons x' xs) = 59 | if x == x' then 60 | case n of 61 | Zero -> Just FZero 62 | Suc n -> fmap FSuc (velemIndex' x n xs) 63 | else 64 | fmap FSuc (velemIndex' x n xs) 65 | 66 | -- utilities 67 | data Bwd x = B0 | Bwd x :< x deriving Functor 68 | 69 | bmap :: (a -> b) -> Bwd a -> Bwd b 70 | bmap f B0 = B0 71 | bmap f (xs :< x) = bmap f xs :< f x 72 | 73 | (+<+) :: Bwd x -> Bwd x -> Bwd x 74 | xs +<+ B0 = xs 75 | xs +<+ (ys :< y) = (xs +<+ ys) :< y 76 | 77 | (<><) :: Bwd x -> [x] -> Bwd x 78 | xs <>< (y : ys) = (xs :< y) <>< ys 79 | xs <>< [] = xs 80 | 81 | (<>>) :: Bwd x -> [x] -> [x] 82 | B0 <>> ys = ys 83 | (xs :< x) <>> ys = xs <>> (x : ys) 84 | 85 | -- indexed unit type 86 | data Happy :: k -> * where 87 | Happy :: Happy k 88 | deriving Show 89 | data (:*) (s :: k -> *) (t :: k -> *) (i :: k) = s i :&: t i 90 | 91 | -- reflexive transitive closures 92 | 93 | data LStar r a b where 94 | L0 :: LStar r a a 95 | (:<:) :: LStar r a b -> r b c -> LStar r a c 96 | 97 | lextend :: (forall a b . r a b -> LStar s a b) -> LStar r a b -> LStar s a b 98 | lextend f L0 = L0 99 | lextend f (xs :<: x) = lextend f xs >>> f x 100 | 101 | lmap :: (forall a b . r a b -> s a b) -> LStar r a b -> LStar s a b 102 | lmap f xs = lextend (\ x -> L0 :<: f x) xs 103 | 104 | data RStar r a b where 105 | R0 :: RStar r a a 106 | (:>:) :: r a b -> RStar r b c -> RStar r a c 107 | 108 | class Category (hom :: obj -> obj -> *) where 109 | idCat :: hom x x 110 | (<<<) :: hom y z -> hom x y -> hom x z 111 | f <<< g = g >>> f 112 | (>>>) :: hom x y -> hom y z -> hom x z 113 | f >>> g = g <<< f 114 | 115 | instance Category (->) where 116 | idCat = id 117 | (<<<) = (.) 118 | 119 | instance Category (LStar r) where 120 | idCat = L0 121 | xs >>> L0 = xs 122 | xs >>> (ys :<: y) = (xs >>> ys) :<: y 123 | 124 | instance Category (RStar r) where 125 | idCat = R0 126 | R0 >>> ys = ys 127 | (x :>: xs) >>> ys = x :>: (xs >>> ys) 128 | 129 | -- existential 130 | 131 | data Ex (f :: k -> *) where 132 | Wit :: f i -> Ex f 133 | 134 | data Ex2 (f :: k -> l -> *)(j :: l) where 135 | Wit2 :: f i j -> Ex2 f j 136 | 137 | type Dot f g = Ex (f :* g) 138 | 139 | newtype Flip {- pin'eck -} f x y = Flip {pilf :: f y x} 140 | 141 | type RC r s x y = Dot (r x) (Flip s y) 142 | -------------------------------------------------------------------------------- /tests/tests.zeug: -------------------------------------------------------------------------------- 1 | { set_id (X : ['El ['Set ['zero]]]) = X : ['El ['Set ['zero]]] } 2 | { poly_id (X : ['El ['Set ['zero]]])(x : ['El X]) = x : X } 3 | { Nat 4 | { Ty = ['Pi ['Set ['zero]] \ X . ['Pi ['Pi X \ x . X] \ f . ['Pi X \ x . X]]] : ['Type] } 5 | { zero = \ f . \ x . x : ['El Ty] } 6 | { suc = \ n . \ f . \ x . f (n f x) : ['El ['Pi Ty \ x . Ty]] } } 7 | { two = Nat.suc (Nat.suc Nat.zero) : ['El Nat.Ty] } 8 | { List 9 | (X : ['Type]) 10 | { Ty = ? : ['Type] } 11 | { nil = ? : ['El Ty] } 12 | { cons = ? : ['El ['Pi X \ x . ['Pi Ty \ xs . Ty]]] } } 13 | { nums = List.cons Nat.Ty two 14 | (List.cons Nat.Ty Nat.zero (List.nil Nat.Ty)) 15 | : ['El (List.Ty Nat.Ty)] } 16 | { twos = [ two | two ] : ['El ['Sg Nat.Ty \ x . Nat.Ty ]] } 17 | { fst = twos 'Fst : ['El Nat.Ty] } 18 | { snd = twos 'Snd : ['El Nat.Ty] } 19 | --------------------------------------------------------------------------------