├── .gitignore ├── .vscode └── settings.json ├── Common.lagda ├── Makefile ├── README.md ├── acmart-preload-hook.tex ├── agda.sty ├── bib.bib ├── common-preamble.tex ├── core.lyx ├── diagrams ├── coeq-flex-flex.json ├── coeq-flex1.json ├── coeq-pointwise.json ├── pushout-coeq-simpl.json ├── pushout-coeq.json ├── pushout-in.json ├── pushout-stepwise.json └── sizeM-equaliser.json ├── draft.lyx ├── draft.pdf ├── ebutf8.sty ├── fullshort.sty ├── jfp.cls ├── jfp.layout ├── jfplike.bst ├── latex-agda ├── Common.tex ├── lc.tex ├── lcsig.tex ├── lib.tex └── main.tex ├── lc.lagda ├── lcsig.lagda ├── lib.lagda ├── lics24-response.md ├── lics24-reviews.md ├── lncs-long.lyx ├── lncs-long.pdf ├── lncs.lyx ├── main.lagda ├── main.lyx ├── main.ml ├── main.pdf ├── nominal.lyx ├── nominal.pdf ├── occurcheckind.agda ├── pbkB-present.json ├── pbkBsimp.json ├── pbkBsimp2.json ├── popl24-response.md ├── popl24-reviews.md ├── popl25-response.txt ├── popl25-reviews.md ├── quiver.sty ├── refstyle.cfg ├── refstyle.sty ├── short.lyx ├── short.pdf ├── slides.lyx ├── slides.pdf ├── splncs04.bst ├── systemF.agda ├── unification.agda-lib └── unifier-v12.json /.gitignore: -------------------------------------------------------------------------------- 1 | *.lyx# 2 | *.lyx~ 3 | *.agdai 4 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.exclude": { 3 | "**/*.vo": true, 4 | "**/*.vok": true, 5 | "**/*.vos": true, 6 | "**/*.aux": true, 7 | "**/*.glob": true, 8 | "**/.git": true, 9 | "**/.svn": true, 10 | "**/.hg": true, 11 | "**/.DS_Store": true, 12 | "**/Thumbs.db": true, 13 | "**/CVS": true 14 | }, 15 | "files.trimTrailingWhitespace": false, 16 | "agdaMode.highlighting.getHighlightWithThemeColors": false 17 | } -------------------------------------------------------------------------------- /Common.lagda: -------------------------------------------------------------------------------- 1 | \begin{code} 2 | open import Data.List as List hiding ([_]) 3 | open import Data.List.Membership.Propositional 4 | open import Data.List.Relation.Unary.Any renaming (_─_ to _⑊_ ) 5 | open import lib 6 | open import Data.Maybe.Base using (Maybe) renaming (nothing to ⊥ ; just to ⌊_⌋) 7 | open import Data.Product using (_,_; Σ; _×_) 8 | open import Agda.Primitive 9 | 10 | 11 | -- k' is typically instantiated as i ⊔ j ⊔ k 12 | module Common {i}{j}{k}(A : Set i) 13 | (hom : A → A → Set j) 14 | (id : ∀{a} → hom a a) 15 | (Tm-parameter : Maybe (List A) → A → Set (i ⊔ j ⊔ k)) 16 | where 17 | 18 | 19 | private 20 | k' = i ⊔ j ⊔ k 21 | MetaContext· = List A 22 | MetaContext = Maybe MetaContext· 23 | -- we don't use directly Tm-parameter because the generated latex 24 | -- font is ugly for a module parameter 25 | Tm = Tm-parameter 26 | Tm· = λ Γ a → Tm ⌊ Γ ⌋ a 27 | 28 | module SubstitutionDef where 29 | infix 3 _·⟶_ 30 | infix 3 _·⟶·_ 31 | infix 3 _⟶_ 32 | data _⟶_ : MetaContext → MetaContext → Set k' 33 | _·⟶_ : MetaContext· → MetaContext → Set k' 34 | _·⟶·_ : MetaContext· → MetaContext· → Set k' 35 | 36 | \end{code} 37 | %<*dotted-substitution> 38 | \begin{code} 39 | -- Proper substitutions 40 | Γ ·⟶ Δ = ⌊ Γ ⌋ ⟶ Δ 41 | \end{code} 42 | % 43 | %<*successful-substitution> 44 | \begin{code} 45 | -- Successful substitutions 46 | Γ ·⟶· Δ = ⌊ Γ ⌋ ⟶ ⌊ Δ ⌋ 47 | \end{code} 48 | % 49 | %<*substitution-def> 50 | \begin{code} 51 | data _⟶_ where 52 | [] : ∀ {Δ} → ([] ·⟶ Δ ) 53 | _,_ : ∀ {Γ Δ m} → Tm Δ m → (Γ ·⟶ Δ) → (m ∷ Γ ·⟶ Δ) 54 | 1⊥ : ⊥ ⟶ ⊥ 55 | \end{code} 56 | % 57 | \begin{code} 58 | 59 | nth : ∀ {Γ Δ m} → (Γ ·⟶ Δ) → m ∈ Γ → Tm Δ m 60 | nth (t , δ) Ο = t 61 | nth (t , δ) (1+ M) = nth δ M 62 | 63 | open SubstitutionDef 64 | 65 | module wkₛ 66 | (wkₜ : ∀ {Γ a m} → Tm· Γ a → Tm· (m ∷ Γ) a) 67 | where 68 | 69 | wkₛ : ∀{Γ Δ m} → (Γ ·⟶· Δ) → (Γ ·⟶· m ∷ Δ) 70 | wkₛ [] = [] 71 | wkₛ (t , σ) = wkₜ t , wkₛ σ 72 | 73 | 74 | module !ₛ (! : ∀ {a} → Tm ⊥ a) where 75 | !ₛ : ∀ {Γ} → Γ ⟶ ⊥ 76 | !ₛ {⊥} = 1⊥ 77 | !ₛ {⌊ [] ⌋} = [] 78 | !ₛ {⌊ m ∷ Γ ⌋} = ! , !ₛ 79 | 80 | module -[-]s 81 | (_[_]t-parameter : ∀ {Γ a} → Tm Γ a → ∀ {Δ} → (Γ ⟶ Δ) → Tm Δ a) where 82 | 83 | private 84 | -- we don't use directly Tm-parameter because the generated latex 85 | -- font is ugly for a module parameter 86 | _[_]t = _[_]t-parameter 87 | \end{code} 88 | %<*compose-substitution-proto> 89 | \begin{code} 90 | _[_]s : ∀ {Γ Δ E} → (Γ ⟶ Δ) → (Δ ⟶ E) → (Γ ⟶ E) 91 | \end{code} 92 | % 93 | %<*compose-substitution-def> 94 | \begin{code} 95 | [] [ σ ]s = [] 96 | (t , δ) [ σ ]s = t [ σ ]t , δ [ σ ]s 97 | 1⊥ [ 1⊥ ]s = 1⊥ 98 | \end{code} 99 | % 100 | \begin{code} 101 | 102 | module 1ₛ 103 | (wkₜ : ∀ {Γ a m} → Tm· Γ a → Tm· (m ∷ Γ) a) 104 | (_﹙_﹚ : ∀ {Γ a m} → m ∈ Γ → hom m a → Tm ⌊ Γ ⌋ a) 105 | where 106 | open wkₛ wkₜ 107 | \end{code} 108 | %<*id-subst> 109 | \begin{code} 110 | 1ₛ : ∀ {Γ} → Γ ⟶ Γ 111 | 1ₛ {⊥} = 1⊥ 112 | 1ₛ {⌊ [] ⌋} = [] 113 | 1ₛ {⌊ m ∷ Γ ⌋} = Ο ﹙ id ﹚ , wkₛ 1ₛ 114 | \end{code} 115 | % 116 | \begin{code} 117 | module Substitution 118 | (wkₜ : ∀ {Γ a m} → Tm· Γ a → Tm· (m ∷ Γ) a) 119 | (_﹙_﹚ : ∀ {Γ a m} → m ∈ Γ → hom m a → Tm ⌊ Γ ⌋ a) 120 | where 121 | open wkₛ wkₜ 122 | open 1ₛ wkₜ _﹙_﹚ 123 | 124 | _↦_,_ : ∀ {Γ Δ m} → (M : m ∈ Γ) → Tm Δ m 125 | → (Γ ⑊ M ·⟶ Δ) → (Γ ·⟶ Δ) 126 | Ο ↦ t , σ = t , σ 127 | 1+ M ↦ t , (u , σ) = u , M ↦ t , σ 128 | 129 | _↦-﹙_﹚ : ∀ {Γ m p} → (M : m ∈ Γ) → hom p m 130 | → Γ ·⟶· Γ [ M ∶ p ] 131 | Ο ↦-﹙ x ﹚ = Ο ﹙ x ﹚ , wkₛ 1ₛ 132 | 1+ M ↦-﹙ x ﹚ = Ο ﹙ id ﹚ , wkₛ (M ↦-﹙ x ﹚) 133 | 134 | -- precedence below _∷_, which is 4 135 | \end{code} 136 | %<*substitution-def> 137 | \begin{code} 138 | \end{code} 139 | % 140 | \begin{code} 141 | 142 | module occur-cases where 143 | {- ---------------------- 144 | Occur check 145 | -------------------------- -} 146 | data occur-cases {Γ m} (M : m ∈ Γ) a : Set k' where 147 | Same-MVar : hom m a → occur-cases M a 148 | Cycle : occur-cases M a 149 | No-Cycle : Tm· (Γ ⑊ M) a → occur-cases M a 150 | 151 | module PruneUnifyTypes where 152 | \end{code} 153 | %<*prune-type> 154 | \begin{code} 155 | record [_]∪_⟶? m Γ : Set k' where 156 | constructor _◄_﹔_ 157 | field 158 | Δ : MetaContext 159 | u : Tm Δ m 160 | σ : Γ ⟶ Δ 161 | \end{code} 162 | % 163 | %<*substfrom> 164 | \begin{code} 165 | record _⟶? Γ : Set k' where 166 | constructor _◄_ 167 | field 168 | Δ : MetaContext 169 | σ : Γ ⟶ Δ 170 | \end{code} 171 | % 172 | \begin{code} 173 | infix 19 _◄_﹔_ 174 | -- infix 19 _·◄_﹔_ 175 | -- pattern _·◄_﹔_ Δ σ δ = ⌊ Δ ⌋ ◄ σ ﹔ δ 176 | infix 19 _◄_ 177 | 178 | 179 | _·⟶? : MetaContext· → Set k' 180 | Γ ·⟶? = ⌊ Γ ⌋ ⟶? 181 | 182 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | agda_latex_dir = latex-agda 2 | agda_files = Common.lagda lib.lagda lc.lagda main.lagda systemF.agda lcsig.lagda 3 | agda_latex_files= $(agda_files:%.lagda=$(agda_latex_dir)/%.tex) 4 | long_paper = paper-with-appendices 5 | popl_paper = popl-paper 6 | 7 | 8 | .PHONY: all agdatex popl 9 | 10 | $(agda_latex_dir)/%.tex: %.lagda 11 | agda --latex --latex-dir=$(agda_latex_dir) $< 12 | 13 | all: draft.pdf 14 | agdatex: $(agda_latex_files) 15 | 16 | draft.pdf: draft.lyx core.lyx jfp.layout common-preamble.tex ebutf8.sty $(agda_latex_files) 17 | lyx --export pdf2 draft.lyx 18 | 19 | draft.tex: draft.lyx core.lyx common-preamble.tex ebutf8.sty $(agda_latex_files) 20 | lyx --export pdflatex draft.lyx -f all 21 | 22 | $(long_paper).pdf: draft.pdf 23 | cp $< $@ 24 | # 25 | $(popl_paper).pdf: draft.pdf 26 | pdftk $< cat 1-26 output $@ 27 | 28 | index.html: README.md 29 | pandoc -f markdown $< > $@ 30 | # 31 | supplemental-material.zip: $(agda_files) index.html 32 | # supplemental-material.zip: $(agda_files) README.md 33 | rm -f $@ 34 | zip $@ $^ 35 | 36 | 37 | popl: $(popl_paper).pdf supplemental-material.zip 38 | 39 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Generic pattern unification implemented in Agda (tested with v2.7.0.1, with standard library v2.1.1). 2 | 3 | Contents: 4 | - lib.lagda: some general purpose definitions and lemmas 5 | - lc.lagda (~450 LoC): pattern unification for λ-calculus 6 | - main.lagda (~300 LoC): generic pattern unification 7 | - common.lagda (~175 LoC): Agda code shared verbatim between lc.lagda and main.lagda 8 | - lcsig.lagda: signature for λ-calculus 9 | - systemF.agda (~800 LoC): signature for system F -------------------------------------------------------------------------------- /acmart-preload-hook.tex: -------------------------------------------------------------------------------- 1 | \let\LoadClassOrig\LoadClass 2 | \renewcommand\LoadClass[2][]{% 3 | \RequirePackage{refstyle} 4 | \LoadClassOrig[#1]{#2}% 5 | } -------------------------------------------------------------------------------- /common-preamble.tex: -------------------------------------------------------------------------------- 1 | % \usepackage{subcaption} 2 | % for subfigures/subtables 3 | %\usepackage[caption=false,font=footnotesize]{subfig} 4 | \usepackage{tikz-cd} 5 | \usepackage{adjustbox} 6 | \usepackage{quiver} 7 | \newcommand{\Ker}{\mathrm{Ker}} 8 | \usepackage{mathpartir} 9 | \usepackage{fullshort} 10 | \usepackage{color} 11 | \usepackage{caption} 12 | 13 | \newtheorem{theorem}{Theorem} 14 | \newtheorem{example}[theorem]{Example} 15 | \newtheorem{lemma}[theorem]{Lemma} 16 | \newtheorem{definition}[theorem]{Definition} 17 | \newtheorem{corollary}[theorem]{Corollary} 18 | \newtheorem{proposition}[theorem]{Proposition} 19 | 20 | % to comment for the short version 21 | \setboolean{fullpaper}{true} 22 | 23 | \newcommand{\pullbackcorner}[1][dl]{\save*!/#1-1pc/#1:(-1,1)@^{|-}\restore} 24 | \newcommand{\commentaire}[1]{} 25 | \usepackage{cmll} 26 | 27 | \makeatother 28 | \newcommand{\coeqr}[2]{\ar@<+.5ex>[r]^-{#1}\ar@<-.5ex>[r]_-{#2}} 29 | \makeatletter 30 | \newcommand{\hautr}[1]{\ar[r]^-{#1}} 31 | \newcommand{\hautl}[1]{\ar[l]_-{#1}} 32 | 33 | 34 | \newcommand{\vecif}[2]{\ifthenelse{\equal{#1}{}}{#2}{\vec{#2}}} 35 | 36 | %\newref{rem}{refcmd={Remark~\ref{#1}}} 37 | \newrefformat{rem}{Remark~\ref{#1}} 38 | %\newref{def}{refcmd={Definition~\ref{#1}}} 39 | \newrefformat{def}{Definition~\ref{#1}} 40 | %\newref{assu}{refcmd={Property~\ref{#1}}} 41 | \newrefformat{assu}{Property~\ref{#1}} 42 | %\newref{cor}{refcmd={Corollary~\ref{#1}}} 43 | %\newref{lem}{refcmd={Lemma~\ref{#1}}} 44 | %\newref{prop}{refcmd={Proposition~\ref{#1}}} 45 | %\newref{nota}{refcmd={Notation~\ref{#1}}} 46 | %\newref{tab}{refcmd={Table~\ref{#1}}} 47 | %\newref{subsec}{refcmd={Section~\S\ref{#1}}} 48 | 49 | %%% 50 | %% from the paper quantum lambda 51 | \usepackage{proof} 52 | 53 | \newcommand{\ass}{:=} 54 | \newcommand{\abs}[1]{|#1|} 55 | \newcommand{\homof}[2]{#1(#2)} 56 | \newcommand{\seq}{\subseteq} 57 | \newcommand{\such}{\,\,|\,\,} 58 | \newcommand{\pc}{\mathbin{;}} 59 | \newcommand{\id}{{\textrm{\rm id}}} 60 | \newcommand{\N}{\mathbb{N}} 61 | \newcommand{\categ}[1]{\textrm{\bf #1}} 62 | \newcommand{\FV}{{\rm FV}} 63 | \newcommand{\bang}{{!}} 64 | \newcommand{\tensor}{\otimes} 65 | \newcommand{\bor}{\ \ \rule[-.75ex]{.01in}{2.75ex}\ \ } 66 | %\newcommand{\C}{\mathbb{C}} 67 | \newcommand{\Rp}{{\mathbb{R}^+}} 68 | \newcommand{\sqleq}{\sqsubseteq} 69 | \newcommand{\sqgeq}{\sqsupseteq} 70 | \newcommand{\bit}{{\bf bit}} 71 | \newcommand{\qubit}{{\bf qubit}} 72 | \newcommand{\xmatrix}[4]{{\renewcommand{\arraystretch}{#1}% 73 | \arraycolsep=#2ex\left(\begin{array}{#3}#4\end{array}\right)}} 74 | \newcommand{\zmatrix}{\xmatrix{0.8}{0.8}} 75 | \newcommand{\zzmatrix}[2]{{\textrm{\scriptsize$\zmatrix{#1}{#2}$}}} 76 | \newcommand{\CPM}{\textrm{\bf CPM}} 77 | \newcommand{\CPMs}{\textrm{\bf CPM}_s} 78 | \newcommand{\inv}{^{-1}} 79 | \newcommand{\freecat}[1][C]{\ensuremath{\categ{#1}^{\oplus}}} 80 | \newcommand{\cpms}{\categ{CPMs}} 81 | \newcommand{\ccpms}{\ensuremath{\overline{\categ{CPMs}}}} 82 | \newcommand{\M}{{\cal M}} 83 | \newcommand{\supp}[1]{\vert #1\vert} 84 | \newcommand{\eq}[2][]{\mathop{eq^{#2}_{#1}}\nolimits} 85 | \newcommand*{\mystackrel}[2]{% 86 | \stackrel{\raise-2pt\hbox{$\scriptstyle\!#1$}}{#2}} 87 | \newcommand*{\redto}[1][]{\mystackrel{#1}{\rightarrow}} 88 | \newcommand*{\xredto}[1][]{% 89 | \xrightarrow{\raisebox{-2pt}{$\scriptstyle\!{#1}$}}} 90 | \newcommand{\qfin}[1]{\mathfrak{#1}} 91 | \newcommand{\web}[1]{\vert{#1}\vert} 92 | \newcommand{\swap}[1][]{\sigma_{#1}} 93 | \newcommand{\proj}[1]{\pi^{#1}} 94 | \newcommand{\inj}[1]{\iota^{#1}} 95 | \newcommand{\unit}{\mathbf{1}} 96 | \newcommand{\Cl}{\mathrm{Cl}} 97 | \newcommand{\Val}{\mathrm{Val}} 98 | \newcommand{\apprv}{\mathrel{\triangleleft}} 99 | \newcommand{\contr}{\mathtt{c}} 100 | \newcommand{\der}{\mathtt{d}} 101 | \newcommand{\weak}{\mathtt{w}} 102 | \newcommand{\dig}{\mathtt{dig}} 103 | \newcommand{\bierman}{\mathtt{m}} 104 | \newcommand{\cmatrix}[1]{\Lambda(#1)} 105 | \newcommand{\injl}[2][]{{\mathtt{in}_\ell^{#1}}~{#2}} 106 | \newcommand{\injr}[2][]{{\mathtt{in}}_r^{#1}~{#2}} 107 | \newcommand{\fprod}[2]{\langle #1\rangle_{#2}} 108 | \newcommand{\fcoprod}[2]{[#1]_{#2}} 109 | \newcommand{\pdistr}{{\mathtt{distr}}} 110 | \newcommand{\symgroup}{S} 111 | \newcommand{\Lowner}{L\"owner} 112 | \newcommand{\tr}{\mathop{\mathrm{\rm tr}}\nolimits} 113 | \newcommand{\cs}{c} 114 | \newcommand{\down}{{\downarrow}} 115 | \newcommand{\define}[1]{{\em #1}} 116 | \newcommand{\punit}{\mathtt{skip}} 117 | \newcommand{\tensterm}[2]{{{#1}\tensor{#2}}} 118 | \newcommand{\lettensterm}[4]{{\mathtt{let}~{#1}% 119 | \tensor{#2}~=~{#3}~\mathtt{in}~{#4}}} 120 | \newcommand{\letunitterm}[2]{{#1}\mathtt{;}{#2}} 121 | \newcommand{\ttrue}{\mathtt{tt}} 122 | \newcommand{\ffalse}{\mathtt{ff}} 123 | \newcommand{\iftermx}[3]{{{\mathtt{if}}~{#1}~\mathtt{then}~{#2}~% 124 | \mathtt{else}~{#3}}} 125 | \newcommand{\match}[5]{{{\mathtt{match}}~{#1}~{\mathtt{with}}% 126 | ~({#2:#3}\mid{#4:#5})}} 127 | \newcommand{\letrec}[4]{{{\mathtt{letrec}}~{#1}\,{#2}={#3}~{\mathtt{in}}~{#4}}} 128 | \newcommand{\letrecn}[5]{{{\mathtt{letrec}}^{#1}~{#2}% 129 | \,{#3}={#4}~{\mathtt{in}}~{#5}}} 130 | \newcommand{\nil}{{\mathtt{nil}}} 131 | \newcommand{\cons}[3][]{{{#2}\,{\mathtt :}{\mathtt :}^{#1}\,{#3}}} 132 | \newcommand{\splitlist}[1][]{{\mathtt{split}^{#1}}} 133 | \newcommand{\meas}{{\mathtt{meas}}} 134 | \newcommand{\new}{{\mathtt{new}}} 135 | \newcommand{\loli}{\multimap} 136 | \newcommand{\tlist}[1]{{{#1}^\ell}} 137 | \newcommand{\sumtype}{\oplus} 138 | \newcommand{\tunit}{{1}} 139 | \newcommand{\arity}[1]{d^{#1}} 140 | \newcommand{\symm}[1]{G^{#1}} 141 | \newcommand{\entail}{\vdash} 142 | \newcommand{\am}[1]{{[}{#1}{]}} 143 | \newcommand{\qarray}{q} 144 | \newcommand{\qlist}{\ell} 145 | \newcommand{\ket}[1]{{|{#1}\rangle}} 146 | \newcommand{\dual}[1]{{#1}^{\perp}} 147 | \newcommand{\denot}[1]{{\llbracket #1 \rrbracket}} 148 | 149 | \newcommand{\inferruletwo}[3]{\inferrule{#1 \\ #2}{#3}} 150 | 151 | \newref{rem}{refcmd={Remark~\ref{#1}}} 152 | \newref{def}{refcmd={Definition~\ref{#1}}} 153 | \newref{assu}{refcmd={Property~\ref{#1}}} 154 | \newref{cor}{refcmd={Corollary~\ref{#1}}} 155 | \newref{lem}{refcmd={Lemma~\ref{#1}}} 156 | \newref{prop}{refcmd={Proposition~\ref{#1}}} 157 | \newref{nota}{refcmd={Notation~\ref{#1}}} 158 | % \newref{tab}{refcmd={Table~\ref{#1}}} 159 | \newref{subsec}{refcmd={Section~\S\ref{#1}}} 160 | \newref{app}{refcmd={Appendix~\S\ref{#1}}} 161 | \newref{ex}{refcmd={Example~\ref{#1}}} 162 | \newref{not}{refcmd={Notation~\ref{#1}}} 163 | 164 | \newcommand\mydots{\makebox[1em][c]{.\hfil.\hfil.}} 165 | 166 | 167 | 168 | %%%%%% this was for popl 169 | %%%%%%% 170 | 171 | %% acmart 172 | %% Journal information 173 | %% Supplied to authors by publisher for camera-ready submission; 174 | %% use defaults for review submission. 175 | 176 | % \acmJournal{PACMPL} 177 | % \acmVolume{1} 178 | % \acmNumber{POPL} % CONF = POPL or ICFP or OOPSLA 179 | % \acmArticle{1} 180 | % \acmYear{2025} 181 | % \acmMonth{1} 182 | % \acmDOI{} % \acmDOI{10.1145/nnnnnnn.nnnnnnn} 183 | % \startPage{1} 184 | 185 | % \setcopyright{none} 186 | 187 | %%%%%%% 188 | %%%%%%%%%% 189 | 190 | 191 | % \setcopyright{acmcopyright} 192 | % \copyrightyear{2024} 193 | 194 | % %\citestyle{acmauthoryear} 195 | % \acmDOI{XXXXXXX.XXXXXXX} 196 | 197 | %% These commands are for a PROCEEDINGS abstract or paper. 198 | % \acmConference[Submitted to LICS '24]{Submitted to LICS 199 | % '24}{2024}{Tallinn} 200 | %% 201 | %% Uncomment \acmBooktitle if the title of the proceedings is different 202 | %% from ``Proceedings of ...''! 203 | %% 204 | %%\acmBooktitle{Woodstock '18: ACM Symposium on Neural Gaze Detection, 205 | %% June 03--05, 2018, Woodstock, NY} 206 | %\acmPrice{gratos} 207 | % \acmISBN{978-1-4503-XXXX-X/18/06} 208 | 209 | 210 | \newcommand{\cal}[1]{\mathcal{#1}} 211 | \usepackage{agda} 212 | %% vskip breaks the latex when occuring in subfigure 213 | %% with the belwo redefinition, it works by prefexing by AgdaNoSpaceAroundCode 214 | % \renewcommand{\Agda@NewlineWithVerticalSpace}[1]{\parskip=0pt\parindent=0pt\par% 215 | % \ifthenelse{\equal{#1}{0pt}}{}{\vskip #1}% 216 | % \noindent} 217 | \usepackage{catchfilebetweentags} 218 | \usepackage[ensuremath]{ebutf8} 219 | \DeclareUnicodeMathCharacter{FE54}{;} 220 | 221 | % \usepackage{newunicodechar} 222 | % \newunicodechar{λ}{\ensuremath{\mathnormal\lambda}} 223 | % \newunicodechar{←}{\ensuremath{\mathnormal\from}} 224 | % \newunicodechar{→}{\ensuremath{\mathnormal\to}} 225 | 226 | \newcommand{\agdalatexdir}{latex-agda} 227 | \newcommand{\agdacode}[1]{% 228 | \ExecuteMetaData[\agdalatexdir/lc.tex]{#1}% 229 | \ExecuteMetaData[\agdalatexdir/lib.tex]{#1}% 230 | \ExecuteMetaData[\agdalatexdir/Common.tex]{#1}% 231 | \ExecuteMetaData[\agdalatexdir/lcsig.tex]{#1}% 232 | \ExecuteMetaData[\agdalatexdir/main.tex]{#1}} 233 | 234 | % \newcommand{\agdacodenovspace}[1]{% 235 | % \AgdaNoSpaceAroundCode{} 236 | % \agdacode{#1} 237 | % \AgdaSpaceAroundCode{}} 238 | 239 | 240 | \newcommand{\texHighlighted}[1]{\colorbox{blue!10}{\ensuremath{#1}}} 241 | \newcommand{\texHighlightedFunction}[1]{\texHighlighted{\textrm{\textcolor{black}{#1}}}} 242 | 243 | \DeclareRobustCommand{\AgdaFormat}[2]{% 244 | \ifthenelse{ 245 | \equal{#1}{commonPositions} \OR 246 | \equal{#1}{commonValues} \OR 247 | \equal{#1}{equaliser} \OR 248 | \equal{#1}{pullback} 249 | }{\texHighlightedFunction{#1}}{% 250 | #2}} 251 | % {\ifthenelse{\equal{#1}{Tm}}{\{#1}}% 252 | % {#2}% 253 | % }}} 254 | % \begin{proof}[Alternative text] does not work with jfp 255 | \newenvironment{proofWithAlternative}[1][Proof]{\par\addvspace{6pt}\noindent\textbf{#1}\hskip5.5pt}{\hfill$\blacksquare$\par\addvspace{6pt}} 256 | -------------------------------------------------------------------------------- /diagrams/coeq-flex-flex.json: -------------------------------------------------------------------------------- 1 | {"graph":{"edges":[{"from":0,"id":6,"label":{"label":"Kf","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":1},{"from":1,"id":7,"label":{"label":"\\eta","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":2},{"from":2,"id":8,"label":{"label":"Tin_M","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":3},{"from":0,"id":9,"label":{"label":"Kg","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":4},{"from":4,"id":10,"label":{"label":"\\eta","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":5},{"from":5,"id":11,"label":{"label":"Tin_M","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":3}],"latexPreamble":"","nodes":[{"id":0,"label":{"isMath":true,"label":"KA","pos":[300,300]}},{"id":1,"label":{"isMath":true,"label":"KB","pos":[500,100]}},{"id":2,"label":{"isMath":true,"label":"TKB","pos":[700,100]}},{"id":3,"label":{"isMath":true,"label":"T(KB+\\dots)","pos":[900,300]}},{"id":4,"label":{"isMath":true,"label":"KB","pos":[500,500]}},{"id":5,"label":{"isMath":true,"label":"TKB","pos":[700,500]}}],"sizeGrid":200},"version":5} -------------------------------------------------------------------------------- /diagrams/coeq-flex1.json: -------------------------------------------------------------------------------- 1 | {"graph":{"edges":[{"from":0,"id":4,"label":{"label":"Kf","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":1},{"from":1,"id":5,"label":{"label":"in_1","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":2},{"from":2,"id":6,"label":{"label":"\\eta","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":3},{"from":0,"id":7,"label":{"label":"u","style":{"alignment":"right","bend":0.4,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":3}],"latexPreamble":"","nodes":[{"id":0,"label":{"isMath":true,"label":"KA","pos":[300,300]}},{"id":1,"label":{"isMath":true,"label":"KB","pos":[500,300]}},{"id":2,"label":{"isMath":true,"label":"KB+\\Gamma","pos":[700,300]}},{"id":3,"label":{"isMath":true,"label":"T(KB+C')","pos":[900,300]}}],"sizeGrid":200},"version":5} -------------------------------------------------------------------------------- /diagrams/coeq-pointwise.json: -------------------------------------------------------------------------------- 1 | {"graph":{"edges":[{"from":0,"id":11,"label":{"label":"t_1","style":{"alignment":"left","bend":-0.1,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":1},{"from":0,"id":12,"label":{"label":"u_1","style":{"alignment":"right","bend":0.1,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":1},{"from":1,"id":13,"label":{"label":"\\sigma_1","style":{"alignment":"left","bend":0,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":2},{"from":3,"id":14,"label":{"label":"t_2","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":4},{"from":4,"id":15,"label":{"label":"\\sigma_1","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":5},{"from":3,"id":16,"label":{"label":"u_2","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":6},{"from":6,"id":17,"label":{"label":"\\sigma_1","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":5},{"from":5,"id":18,"label":{"label":"\\sigma_2","style":{"alignment":"left","bend":0,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":7},{"from":8,"id":19,"label":{"label":"t_1,t_2","style":{"alignment":"left","bend":-0.1,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":9},{"from":8,"id":20,"label":{"label":"u_1,u_2","style":{"alignment":"right","bend":0.1,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":9},{"from":9,"id":21,"label":{"label":"\\sigma_2\\circ \\sigma_1","style":{"alignment":"left","bend":0,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":10}],"latexPreamble":"","nodes":[{"id":0,"label":{"isMath":true,"label":"A_1","pos":[300,300]}},{"id":1,"label":{"isMath":true,"label":"\\Gamma","pos":[500,300]}},{"id":2,"label":{"isMath":true,"label":"\\Delta_1","pos":[700,300]}},{"id":3,"label":{"isMath":true,"label":"A_2","pos":[900,300]}},{"id":4,"label":{"isMath":true,"label":"\\Gamma","pos":[1100,100]}},{"id":5,"label":{"isMath":true,"label":"\\Delta_1","pos":[1300,300]}},{"id":6,"label":{"isMath":true,"label":"\\Gamma","pos":[1100,500]}},{"id":7,"label":{"isMath":true,"label":"\\Delta_2","pos":[1500,300]}},{"id":8,"label":{"isMath":true,"label":"A_1+A_2","pos":[300,700]}},{"id":9,"label":{"isMath":true,"label":"\\Gamma","pos":[500,700]}},{"id":10,"label":{"isMath":true,"label":"\\Delta_2","pos":[700,700]}}],"sizeGrid":200},"version":5} -------------------------------------------------------------------------------- /diagrams/pushout-coeq-simpl.json: -------------------------------------------------------------------------------- 1 | {"graph":{"edges":[{"from":0,"id":9,"label":{"isPullshout":false,"label":"","style":{"alignment":"right","bend":0.2,"color":"black","dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"},"zindex":0},"to":1},{"from":0,"id":10,"label":{"isPullshout":false,"label":"","style":{"alignment":"left","bend":-0.2,"color":"black","dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"},"zindex":0},"to":2},{"from":2,"id":11,"label":{"isPullshout":false,"label":"in_1","style":{"alignment":"left","bend":-0.2,"color":"black","dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"},"zindex":0},"to":3},{"from":1,"id":12,"label":{"isPullshout":false,"label":"in_2","style":{"alignment":"right","bend":0.2,"color":"black","dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"},"zindex":0},"to":3},{"from":4,"id":13,"label":{"isPullshout":false,"label":"","style":{"alignment":"left","bend":0,"color":"black","dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"},"zindex":0},"to":5},{"from":5,"id":14,"label":{"isPullshout":false,"label":"f","style":{"alignment":"left","bend":0,"color":"black","dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"},"zindex":0},"to":6},{"from":4,"id":15,"label":{"isPullshout":false,"label":"","style":{"alignment":"left","bend":0,"color":"black","dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"},"zindex":0},"to":7},{"from":7,"id":16,"label":{"isPullshout":false,"label":"g","style":{"alignment":"right","bend":0,"color":"black","dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"},"zindex":0},"to":6},{"from":3,"id":17,"label":{"isPullshout":false,"label":"f,g","style":{"alignment":"left","bend":0,"color":"black","dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"},"zindex":0},"to":8}],"latexPreamble":"","nodes":[{"id":0,"label":{"isMath":true,"label":"A","pos":[266,70]}},{"id":1,"label":{"isMath":true,"label":"C","pos":[378,98]}},{"id":2,"label":{"isMath":true,"label":"B","pos":[378,42]}},{"id":3,"label":{"isMath":true,"label":"B+C","pos":[490,70]}},{"id":4,"label":{"isMath":true,"label":"A","pos":[70,42]}},{"id":5,"label":{"isMath":true,"label":"B","pos":[154,42]}},{"id":6,"label":{"isMath":true,"label":"D","pos":[154,126]}},{"id":7,"label":{"isMath":true,"label":"C","pos":[70,126]}},{"id":8,"label":{"isMath":true,"label":"D","pos":[602,70]}}],"sizeGrid":28},"version":9} -------------------------------------------------------------------------------- /diagrams/pushout-coeq.json: -------------------------------------------------------------------------------- 1 | {"graph":{"edges":[{"from":0,"id":4,"label":{"isPullshout":false,"label":"\\delta","style":{"alignment":"right","bend":0.2,"color":"black","dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"},"zindex":0},"to":1},{"from":0,"id":5,"label":{"isPullshout":false,"label":"\\overline{x}","style":{"alignment":"left","bend":-0.2,"color":"black","dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"},"zindex":0},"to":2},{"from":2,"id":6,"label":{"isPullshout":false,"label":"in_1","style":{"alignment":"left","bend":-0.2,"color":"black","dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"},"zindex":0},"to":3},{"from":1,"id":7,"label":{"isPullshout":false,"label":"in_2","style":{"alignment":"right","bend":0.2,"color":"black","dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"},"zindex":0},"to":3}],"latexPreamble":"","nodes":[{"id":0,"label":{"isMath":true,"label":"\\boldsymbol{\\Gamma'_1}","pos":[42,70]}},{"id":1,"label":{"isMath":true,"label":"\\Gamma","pos":[154,98]}},{"id":2,"label":{"isMath":true,"label":"\\boldsymbol{\\Gamma_2'}","pos":[154,42]}},{"id":3,"label":{"isMath":true,"label":"\\boldsymbol{\\Gamma'_2},\\Gamma","pos":[266,70]}}],"sizeGrid":28},"version":9} -------------------------------------------------------------------------------- /diagrams/pushout-in.json: -------------------------------------------------------------------------------- 1 | {"graph":{"edges":[{"from":0,"id":10,"label":{"label":"g","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":1},{"from":0,"id":11,"label":{"label":"f","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":2},{"from":1,"id":12,"label":{"label":"\\sigma","style":{"alignment":"right","bend":0,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":3},{"from":2,"id":13,"label":{"label":"u","style":{"alignment":"left","bend":0,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":3},{"from":4,"id":14,"label":{"label":"g","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":5},{"from":4,"id":15,"label":{"label":"f","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":6},{"from":5,"id":16,"label":{"label":"in_1","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":8},{"from":8,"id":17,"label":{"label":"\\sigma+Y","style":{"alignment":"right","bend":0,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":7},{"from":6,"id":18,"label":{"label":"u","style":{"alignment":"left","bend":0,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":9},{"from":9,"id":19,"label":{"label":"in_1","style":{"alignment":"left","bend":0,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":7}],"latexPreamble":"","nodes":[{"id":0,"label":{"isMath":true,"label":"A","pos":[900,100]}},{"id":1,"label":{"isMath":true,"label":"X","pos":[900,300]}},{"id":2,"label":{"isMath":true,"label":"B","pos":[1100,100]}},{"id":3,"label":{"isMath":true,"label":"Z","pos":[1100,300]}},{"id":4,"label":{"isMath":true,"label":"A","pos":[1300,100]}},{"id":5,"label":{"isMath":true,"label":"X","pos":[1300,300]}},{"id":6,"label":{"isMath":true,"label":"B","pos":[1500,100]}},{"id":7,"label":{"isMath":true,"label":"Z+Y","pos":[1500,500]}},{"id":8,"label":{"isMath":true,"label":"X+Y","pos":[1300,500]}},{"id":9,"label":{"isMath":true,"label":"Z","pos":[1500,300]}}],"sizeGrid":200},"version":5} -------------------------------------------------------------------------------- /diagrams/pushout-stepwise.json: -------------------------------------------------------------------------------- 1 | {"graph":{"edges":[{"from":0,"id":13,"label":{"label":"t_1,t_2","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":1},{"from":0,"id":14,"label":{"label":"f_1+f_2","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":2},{"from":3,"id":15,"label":{"label":"t_1","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":4},{"from":3,"id":16,"label":{"label":"f_1","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":5},{"from":4,"id":17,"label":{"label":"\\sigma_1","style":{"alignment":"right","bend":0,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":6},{"from":5,"id":18,"label":{"label":"u_1","style":{"alignment":"left","bend":0,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":6},{"from":7,"id":19,"label":{"label":"t_2","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":8},{"from":7,"id":20,"label":{"label":"f_2","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":9},{"from":8,"id":21,"label":{"label":"\\sigma_1","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":10},{"from":9,"id":22,"label":{"label":"u_2","style":{"alignment":"left","bend":0,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":11},{"from":10,"id":23,"label":{"label":"\\sigma_2","style":{"alignment":"right","bend":0,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":11},{"from":2,"id":24,"label":{"label":"\\sigma_2\\circ u_1, u_2","style":{"alignment":"left","bend":0,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":12},{"from":1,"id":25,"label":{"label":"\\sigma_2\\circ \\sigma_1","style":{"alignment":"right","bend":0,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":12}],"latexPreamble":"","nodes":[{"id":0,"label":{"isMath":true,"label":"A+B","pos":[1100,100]}},{"id":1,"label":{"isMath":true,"label":"\\Gamma","pos":[1100,300]}},{"id":2,"label":{"isMath":true,"label":"A'+B'","pos":[1500,100]}},{"id":3,"label":{"isMath":true,"label":"A","pos":[300,100]}},{"id":4,"label":{"isMath":true,"label":"\\Gamma","pos":[300,300]}},{"id":5,"label":{"isMath":true,"label":"A'","pos":[500,100]}},{"id":6,"label":{"isMath":true,"label":"\\Delta_1","pos":[500,300]}},{"id":7,"label":{"isMath":true,"label":"B","pos":[700,100]}},{"id":8,"label":{"isMath":true,"label":"\\Gamma","pos":[700,300]}},{"id":9,"label":{"isMath":true,"label":"B'","pos":[900,100]}},{"id":10,"label":{"isMath":true,"label":"\\Delta_1","pos":[700,500]}},{"id":11,"label":{"isMath":true,"label":"\\Delta_2","pos":[900,500]}},{"id":12,"label":{"isMath":true,"label":"\\Delta_2","pos":[1500,300]}}],"sizeGrid":200},"version":5} -------------------------------------------------------------------------------- /diagrams/sizeM-equaliser.json: -------------------------------------------------------------------------------- 1 | {"graph":{"edges":[{"from":0,"id":4,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"hook"}},"to":1},{"from":1,"id":5,"label":{"label":"","style":{"alignment":"left","bend":-0.1,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":2},{"from":2,"id":6,"label":{"label":"0","style":{"alignment":"left","bend":-0.1,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":3},{"from":1,"id":7,"label":{"label":"|-|_M","style":{"alignment":"right","bend":0.2,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":3}],"latexPreamble":"","nodes":[{"id":0,"label":{"isMath":true,"label":"T\\Gamma","pos":[300,300]}},{"id":1,"label":{"isMath":true,"label":"T(\\Gamma,M:b)","pos":[500,300]}},{"id":2,"label":{"isMath":true,"label":"1","pos":[700,100]}},{"id":3,"label":{"isMath":true,"label":"\\mathbb{N}","pos":[900,300]}}],"sizeGrid":200},"version":5} -------------------------------------------------------------------------------- /draft.lyx: -------------------------------------------------------------------------------- 1 | #LyX 2.3 created this file. For more info see http://www.lyx.org/ 2 | \lyxformat 544 3 | \begin_document 4 | \begin_header 5 | \save_transient_properties true 6 | \origin unavailable 7 | \textclass jfp 8 | \begin_preamble 9 | \input{common-preamble} 10 | %\newenvironment{remark}{\begin{rem}}{\end{rem}} 11 | %\newenvironment{lemma}{\begin{lem}}{\end{lem}} 12 | %\newenvironment{proposition}{\begin{prop}}{\end{prop}} 13 | %\newenvironment{corollary}{\begin{cor}}{\end{cor}} 14 | %\newenvironment{definition}{\begin{defn}}{\end{defn}} 15 | %\newenvironment{theorem}{\begin{thm}}{\end{thm}} 16 | %\newcommand{\theproposition}{\thethm} 17 | 18 | \setboolean{fullpaper}{false} 19 | 20 | \newcommand{\startappendix}{\appendix} 21 | %\newcommand{\startappendix}{\appendices} 22 | %\newcommand{\secappendix}[1]{\appendix[#1]} 23 | \newcommand{\secappendix}[1]{\section{#1}} 24 | 25 | 26 | % LICS 27 | % \newcommand{\keywords}[1]{\begin{IEEEkeywords}#1\end{IEEEkeywords}} 28 | \end_preamble 29 | \use_default_options false 30 | \begin_modules 31 | enumitem 32 | \end_modules 33 | \maintain_unincluded_children false 34 | \begin_local_layout 35 | #\DeclareLyXModule{Other} 36 | #DescriptionBegin 37 | #Defines Notation 38 | #DescriptionEnd 39 | #Category: theorems 40 | 41 | Format 66 42 | 43 | Requires amsmath 44 | 45 | # acmart document class includes amssymb 46 | Provides amssymb 1 47 | 48 | # The environments defined (regular and starred) are : 49 | # - Property 50 | 51 | Style Notation 52 | CopyStyle Theorem 53 | DependsOn Theorem 54 | LatexName notation 55 | LabelString "Notation \thethm." 56 | Preamble 57 | \AtEndPreamble{% 58 | \theoremstyle{acmdefinition} 59 | \newtheorem{notation}[theorem]{Notation}} 60 | EndPreamble 61 | End 62 | 63 | Style Remark 64 | CopyStyle Theorem 65 | DependsOn Theorem 66 | LatexName remark 67 | LabelString "Remark \thethm." 68 | Preamble 69 | \AtEndPreamble{% 70 | \theoremstyle{acmdefinition} 71 | \newtheorem{remark}[theorem]{Remark}} 72 | EndPreamble 73 | End 74 | 75 | Style Property 76 | CopyStyle Theorem 77 | DependsOn Theorem 78 | LatexName property 79 | LabelString "Property \thethm." 80 | Preamble 81 | \AtEndPreamble{% 82 | \theoremstyle{acmdefinition} 83 | \newtheorem{property}[theorem]{Property}} 84 | EndPreamble 85 | End 86 | 87 | 88 | Style "Personal Question" 89 | CopyStyle Theorem 90 | DependsOn Theorem 91 | LatexName personalquestion 92 | LabelString "Personal Question \thethm." 93 | Preamble 94 | %\newenvironment{personalquestion}{\shortfull{\comment}{\begin{question}\color{red}(personal)}}{\shortfull{\endcomment}{\end{question}}} 95 | \newenvironment{personalquestion}{\comment}{\endcomment} 96 | EndPreamble 97 | End 98 | 99 | 100 | Style "Long Proof" 101 | CopyStyle Proof 102 | DependsOn Proof 103 | LatexName longproof 104 | LabelString "Long proof." 105 | Preamble 106 | \newenvironment{longproof}{\shortfull{\comment}{\begin{proof}}}{\shortfull{\endcomment}{\end{proof}}} 107 | EndPreamble 108 | End 109 | 110 | Style "Appendix" 111 | CopyStyle Section 112 | DependsOn Section 113 | LatexName secappendix 114 | LabelString "Appendix" 115 | Preamble 116 | EndPreamble 117 | End 118 | \end_local_layout 119 | \language british 120 | \language_package none 121 | \inputencoding utf8 122 | \fontencoding global 123 | \font_roman "default" "default" 124 | \font_sans "default" "default" 125 | \font_typewriter "default" "default" 126 | \font_math "auto" "auto" 127 | \font_default_family default 128 | \use_non_tex_fonts false 129 | \font_sc false 130 | \font_osf false 131 | \font_sf_scale 100 100 132 | \font_tt_scale 100 100 133 | \use_microtype false 134 | \use_dash_ligatures false 135 | \graphics default 136 | \default_output_format default 137 | \output_sync 0 138 | \bibtex_command bibtex 139 | \index_command default 140 | \float_placement tbh 141 | \paperfontsize default 142 | \spacing single 143 | \use_hyperref true 144 | \pdf_title "Your Title" 145 | \pdf_author "Your Name" 146 | \pdf_bookmarks true 147 | \pdf_bookmarksnumbered true 148 | \pdf_bookmarksopen true 149 | \pdf_bookmarksopenlevel 1 150 | \pdf_breaklinks false 151 | \pdf_pdfborder true 152 | \pdf_colorlinks false 153 | \pdf_backref false 154 | \pdf_pdfusetitle false 155 | \pdf_quoted_options "pdfpagelayout=OneColumn, pdfnewwindow=true, pdfstartview=XYZ, plainpages=false" 156 | \papersize default 157 | \use_geometry true 158 | \use_package amsmath 1 159 | \use_package amssymb 1 160 | \use_package cancel 1 161 | \use_package esint 1 162 | \use_package mathdots 1 163 | \use_package mathtools 1 164 | \use_package mhchem 1 165 | \use_package stackrel 1 166 | \use_package stmaryrd 1 167 | \use_package undertilde 1 168 | \cite_engine natbib 169 | \cite_engine_type authoryear 170 | \biblio_style ACM-Reference-Format 171 | \use_bibtopic false 172 | \use_indices false 173 | \paperorientation portrait 174 | \suppress_date false 175 | \justification true 176 | \use_refstyle 0 177 | \use_minted 0 178 | \index Index 179 | \shortcut idx 180 | \color #008000 181 | \end_index 182 | \secnumdepth 3 183 | \tocdepth 3 184 | \paragraph_separation indent 185 | \paragraph_indentation default 186 | \is_math_indent 0 187 | \math_numbering_side default 188 | \quotes_style british 189 | \dynamic_quotes 0 190 | \papercolumns 1 191 | \papersides 1 192 | \paperpagestyle default 193 | \tracking_changes false 194 | \output_changes false 195 | \html_math_output 0 196 | \html_css_as_file 0 197 | \html_be_strict false 198 | \end_header 199 | 200 | \begin_body 201 | 202 | \begin_layout Standard 203 | \begin_inset ERT 204 | status open 205 | 206 | \begin_layout Plain Layout 207 | 208 | \end_layout 209 | 210 | \begin_layout Plain Layout 211 | 212 | %%%%%%%%%%% 213 | \end_layout 214 | 215 | \begin_layout Plain Layout 216 | 217 | % JFP 218 | \end_layout 219 | 220 | \begin_layout Plain Layout 221 | 222 | %%%%%%%%%%%%% 223 | \end_layout 224 | 225 | \begin_layout Plain Layout 226 | 227 | \end_layout 228 | 229 | \begin_layout Plain Layout 230 | 231 | 232 | \backslash 233 | journaltitle{JFP} 234 | \end_layout 235 | 236 | \begin_layout Plain Layout 237 | 238 | 239 | \backslash 240 | cpr{Cambridge University Press} 241 | \end_layout 242 | 243 | \begin_layout Plain Layout 244 | 245 | 246 | \backslash 247 | doival{10.1017/xxxxx} 248 | \end_layout 249 | 250 | \begin_layout Plain Layout 251 | 252 | \end_layout 253 | 254 | \begin_layout Plain Layout 255 | 256 | 257 | \backslash 258 | lefttitle{Semantics of pattern unification} 259 | \end_layout 260 | 261 | \begin_layout Plain Layout 262 | 263 | 264 | \backslash 265 | righttitle{Journal of Functional Programming} 266 | \end_layout 267 | 268 | \begin_layout Plain Layout 269 | 270 | \end_layout 271 | 272 | \begin_layout Plain Layout 273 | 274 | 275 | \backslash 276 | totalpg{ 277 | \backslash 278 | pageref{lastpage01}} 279 | \end_layout 280 | 281 | \begin_layout Plain Layout 282 | 283 | 284 | \backslash 285 | jnlDoiYr{2022} 286 | \end_layout 287 | 288 | \begin_layout Plain Layout 289 | 290 | \end_layout 291 | 292 | \begin_layout Plain Layout 293 | 294 | 295 | \backslash 296 | begin{authgrp} 297 | \end_layout 298 | 299 | \begin_layout Plain Layout 300 | 301 | 302 | \backslash 303 | author{Ambroise Lafont} 304 | \end_layout 305 | 306 | \begin_layout Plain Layout 307 | 308 | 309 | \backslash 310 | affiliation{Ecole Polytechnique, Palaiseau, France 311 | \end_layout 312 | 313 | \begin_layout Plain Layout 314 | 315 | ( 316 | \backslash 317 | email{ambroise.lafont@polytechnique.edu})} 318 | \end_layout 319 | 320 | \begin_layout Plain Layout 321 | 322 | 323 | \backslash 324 | author{Neel Krishnaswami} 325 | \end_layout 326 | 327 | \begin_layout Plain Layout 328 | 329 | 330 | \backslash 331 | affiliation{University of Cambridge, Cambridge, UK 332 | \end_layout 333 | 334 | \begin_layout Plain Layout 335 | 336 | ( 337 | \backslash 338 | email{nk480@cl.cam.ac.uk})} 339 | \end_layout 340 | 341 | \begin_layout Plain Layout 342 | 343 | 344 | \backslash 345 | end{authgrp} 346 | \end_layout 347 | 348 | \begin_layout Plain Layout 349 | 350 | \end_layout 351 | 352 | \begin_layout Plain Layout 353 | 354 | %%%%%%%%%%% 355 | \end_layout 356 | 357 | \begin_layout Plain Layout 358 | 359 | % End of JFP 360 | \end_layout 361 | 362 | \begin_layout Plain Layout 363 | 364 | %%%%%%%%%%%%% 365 | \end_layout 366 | 367 | \end_inset 368 | 369 | 370 | \end_layout 371 | 372 | \begin_layout Standard 373 | \begin_inset CommandInset include 374 | LatexCommand input 375 | filename "core.lyx" 376 | 377 | \end_inset 378 | 379 | 380 | \end_layout 381 | 382 | \end_body 383 | \end_document 384 | -------------------------------------------------------------------------------- /draft.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/amblafont/unification/7d658c077f2582d018806136dc956b35632454dd/draft.pdf -------------------------------------------------------------------------------- /fullshort.sty: -------------------------------------------------------------------------------- 1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 2 | %% %% 3 | %% Version control %% 4 | %% %% 5 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 6 | 7 | \RequirePackage{ifthen} 8 | \newboolean{fullpaper} 9 | \RequirePackage{moreverb} 10 | \newenvironment{full} 11 | {\ifthenelse{\boolean{fullpaper}}{}{\comment}} 12 | {\ifthenelse{\boolean{fullpaper}}{}{\endcomment}} 13 | \newenvironment{short} 14 | {\ifthenelse{\boolean{fullpaper}}{\comment}{}} 15 | {\ifthenelse{\boolean{fullpaper}}{\endcomment}{}} 16 | 17 | \newcommand{\ifshort}[1]{\ifthenelse{\boolean{fullpaper}}{}{#1}} 18 | \newcommand{\iffull}[1]{\ifthenelse{\boolean{fullpaper}}{#1}{}} 19 | \newcommand{\shortfull}[2]{\ifthenelse{\boolean{fullpaper}}{#2}{#1}} 20 | \newcommand{\fullshort}[2]{\ifthenelse{\boolean{fullpaper}}{#1}{#2}} 21 | 22 | %%% Local Variables: 23 | %%% mode: latex 24 | %%% TeX-master: t 25 | %%% End: 26 | -------------------------------------------------------------------------------- /jfp.layout: -------------------------------------------------------------------------------- 1 | #\DeclareLaTeXClass[jfp]{Journal of functional programming} 2 | 3 | Input acmart.layout 4 | 5 | Style Proof 6 | Category Reasoning 7 | Margin First_Dynamic 8 | LatexType Environment 9 | LatexName proofWithAlternative 10 | NextNoIndent 1 11 | ResetArgs 1 12 | Argument 1 13 | LabelString "Alternative Proof String" 14 | Tooltip "Alternative proof string" 15 | EndArgument 16 | LabelSep xx 17 | ParIndent MMM 18 | ParSkip 0.4 19 | ItemSep 0.2 20 | TopSep 0.7 21 | BottomSep 0.7 22 | ParSep 0.3 23 | Align Block 24 | AlignPossible Block, Left 25 | LabelType Static 26 | LabelString "Proof." 27 | EndLabelType Box 28 | Font 29 | Shape Up 30 | Size Normal 31 | EndFont 32 | LabelFont 33 | Shape Italic 34 | EndFont 35 | End -------------------------------------------------------------------------------- /latex-agda/lcsig.tex: -------------------------------------------------------------------------------- 1 | \begin{code}% 2 | \>[0]\AgdaKeyword{module}\AgdaSpace{}% 3 | \AgdaModule{lcsig}\AgdaSpace{}% 4 | \AgdaKeyword{where}\<% 5 | \\ 6 | % 7 | \\[\AgdaEmptyExtraSkip]% 8 | \>[0]\AgdaKeyword{open}\AgdaSpace{}% 9 | \AgdaKeyword{import}\AgdaSpace{}% 10 | \AgdaModule{lib}\<% 11 | \\ 12 | \>[0]\AgdaKeyword{open}\AgdaSpace{}% 13 | \AgdaKeyword{import}\AgdaSpace{}% 14 | \AgdaModule{Agda.Primitive}\<% 15 | \\ 16 | \>[0]\AgdaKeyword{open}\AgdaSpace{}% 17 | \AgdaKeyword{import}\AgdaSpace{}% 18 | \AgdaModule{main}\AgdaSpace{}% 19 | \AgdaKeyword{using}\AgdaSpace{}% 20 | \AgdaSymbol{(}\AgdaRecord{Signature}\AgdaSpace{}% 21 | \AgdaSymbol{;}\AgdaSpace{}% 22 | \AgdaRecord{isFriendly}\AgdaSymbol{)}\<% 23 | \\ 24 | \>[0]\AgdaKeyword{open}\AgdaSpace{}% 25 | \AgdaKeyword{import}\AgdaSpace{}% 26 | \AgdaModule{Data.Vec.Base}\AgdaSpace{}% 27 | \AgdaSymbol{as}\AgdaSpace{}% 28 | \AgdaModule{Vec}\AgdaSpace{}% 29 | \AgdaKeyword{using}\AgdaSpace{}% 30 | \AgdaSymbol{(}\AgdaDatatype{Vec}\AgdaSymbol{)}\<% 31 | \\ 32 | \>[0]\AgdaKeyword{open}\AgdaSpace{}% 33 | \AgdaKeyword{import}\AgdaSpace{}% 34 | \AgdaModule{Data.Nat}\AgdaSpace{}% 35 | \AgdaSymbol{as}\AgdaSpace{}% 36 | \AgdaModule{ℕ}\AgdaSpace{}% 37 | \AgdaKeyword{using}\AgdaSpace{}% 38 | \AgdaSymbol{(}\AgdaDatatype{ℕ}\AgdaSymbol{;}\AgdaSpace{}% 39 | \AgdaOperator{\AgdaPrimitive{\AgdaUnderscore{}+\AgdaUnderscore{}}}\AgdaSymbol{)}\<% 40 | \\ 41 | \>[0]\AgdaKeyword{open}\AgdaSpace{}% 42 | \AgdaKeyword{import}\AgdaSpace{}% 43 | \AgdaModule{Data.Fin}\AgdaSpace{}% 44 | \AgdaSymbol{as}\AgdaSpace{}% 45 | \AgdaModule{Fin}\AgdaSpace{}% 46 | \AgdaKeyword{using}\AgdaSpace{}% 47 | \AgdaSymbol{(}\AgdaDatatype{Fin}\AgdaSymbol{)}\<% 48 | \\ 49 | \>[0]\AgdaKeyword{open}\AgdaSpace{}% 50 | \AgdaKeyword{import}\AgdaSpace{}% 51 | \AgdaModule{Data.List}\AgdaSpace{}% 52 | \AgdaSymbol{as}\AgdaSpace{}% 53 | \AgdaModule{List}\AgdaSpace{}% 54 | \AgdaKeyword{hiding}\AgdaSpace{}% 55 | \AgdaSymbol{(}\AgdaFunction{map}\AgdaSpace{}% 56 | \AgdaSymbol{;}\AgdaSpace{}% 57 | \AgdaOperator{\AgdaFunction{[\AgdaUnderscore{}]}}\AgdaSpace{}% 58 | \AgdaSymbol{;}\AgdaSpace{}% 59 | \AgdaFunction{lookup}\AgdaSymbol{)}\<% 60 | \\ 61 | \>[0]\AgdaKeyword{open}\AgdaSpace{}% 62 | \AgdaKeyword{import}\AgdaSpace{}% 63 | \AgdaModule{Data.List.Relation.Binary.Pointwise}\AgdaSpace{}% 64 | \AgdaKeyword{using}\AgdaSpace{}% 65 | \AgdaSymbol{(}\AgdaDatatype{Pointwise}\AgdaSpace{}% 66 | \AgdaSymbol{;}\AgdaSpace{}% 67 | \AgdaInductiveConstructor{[]}\AgdaSpace{}% 68 | \AgdaSymbol{;}\AgdaSpace{}% 69 | \AgdaOperator{\AgdaInductiveConstructor{\AgdaUnderscore{}∷\AgdaUnderscore{}}}\AgdaSymbol{)}\<% 70 | \\ 71 | \>[0]\AgdaKeyword{import}\AgdaSpace{}% 72 | \AgdaModule{lc}\<% 73 | \\ 74 | \>[0]\AgdaKeyword{open}\AgdaSpace{}% 75 | \AgdaKeyword{import}\AgdaSpace{}% 76 | \AgdaModule{lc}\AgdaSpace{}% 77 | \AgdaKeyword{using}\AgdaSpace{}% 78 | \AgdaSymbol{(}\AgdaFunction{hom}\AgdaSpace{}% 79 | \AgdaSymbol{;}\AgdaSpace{}% 80 | \AgdaOperator{\AgdaFunction{\AgdaUnderscore{}↑}}\AgdaSymbol{)}\<% 81 | \end{code} 82 | %<*lc-sig> 83 | \begin{code}% 84 | \>[0]\AgdaKeyword{data}\AgdaSpace{}% 85 | \AgdaDatatype{O}\AgdaSpace{}% 86 | \AgdaBound{n}\AgdaSpace{}% 87 | \AgdaSymbol{:}\AgdaSpace{}% 88 | \AgdaPrimitive{Set}\AgdaSpace{}% 89 | \AgdaKeyword{where}\<% 90 | \\ 91 | \>[0][@{}l@{\AgdaIndent{0}}]% 92 | \>[3]\AgdaInductiveConstructor{Var}\AgdaSpace{}% 93 | \AgdaSymbol{:}\AgdaSpace{}% 94 | \AgdaDatatype{Fin}\AgdaSpace{}% 95 | \AgdaBound{n}\AgdaSpace{}% 96 | \AgdaSymbol{→}\AgdaSpace{}% 97 | \AgdaDatatype{O}\AgdaSpace{}% 98 | \AgdaBound{n}\<% 99 | \\ 100 | % 101 | \>[3]\AgdaInductiveConstructor{App}\AgdaSpace{}% 102 | \AgdaSymbol{:}\AgdaSpace{}% 103 | \AgdaDatatype{O}\AgdaSpace{}% 104 | \AgdaBound{n}\<% 105 | \\ 106 | % 107 | \>[3]\AgdaInductiveConstructor{Lam}\AgdaSpace{}% 108 | \AgdaSymbol{:}\AgdaSpace{}% 109 | \AgdaDatatype{O}\AgdaSpace{}% 110 | \AgdaBound{n}\<% 111 | \\ 112 | % 113 | \\[\AgdaEmptyExtraSkip]% 114 | \>[0]\AgdaFunction{α}\AgdaSpace{}% 115 | \AgdaSymbol{:}\AgdaSpace{}% 116 | \AgdaSymbol{\{}\AgdaBound{n}\AgdaSpace{}% 117 | \AgdaSymbol{:}\AgdaSpace{}% 118 | \AgdaDatatype{ℕ}\AgdaSymbol{\}}\AgdaSpace{}% 119 | \AgdaSymbol{→}\AgdaSpace{}% 120 | \AgdaDatatype{O}\AgdaSpace{}% 121 | \AgdaBound{n}\AgdaSpace{}% 122 | \AgdaSymbol{→}\AgdaSpace{}% 123 | \AgdaDatatype{List}\AgdaSpace{}% 124 | \AgdaDatatype{ℕ}\<% 125 | \\ 126 | \>[0]\AgdaFunction{α}\AgdaSpace{}% 127 | \AgdaSymbol{(}\AgdaInductiveConstructor{Var}\AgdaSpace{}% 128 | \AgdaBound{x}\AgdaSymbol{)}\AgdaSpace{}% 129 | \AgdaSymbol{=}\AgdaSpace{}% 130 | \AgdaInductiveConstructor{[]}\<% 131 | \\ 132 | \>[0]\AgdaFunction{α}\AgdaSpace{}% 133 | \AgdaSymbol{\{}\AgdaBound{n}\AgdaSymbol{\}}\AgdaSpace{}% 134 | \AgdaInductiveConstructor{App}\AgdaSpace{}% 135 | \AgdaSymbol{=}\AgdaSpace{}% 136 | \AgdaBound{n}\AgdaSpace{}% 137 | \AgdaOperator{\AgdaInductiveConstructor{∷}}\AgdaSpace{}% 138 | \AgdaBound{n}\AgdaSpace{}% 139 | \AgdaOperator{\AgdaInductiveConstructor{∷}}\AgdaSpace{}% 140 | \AgdaInductiveConstructor{[]}\<% 141 | \\ 142 | \>[0]\AgdaFunction{α}\AgdaSpace{}% 143 | \AgdaSymbol{\{}\AgdaBound{n}\AgdaSymbol{\}}\AgdaSpace{}% 144 | \AgdaInductiveConstructor{Lam}\AgdaSpace{}% 145 | \AgdaSymbol{=}\AgdaSpace{}% 146 | \AgdaNumber{1}\AgdaSpace{}% 147 | \AgdaOperator{\AgdaPrimitive{+}}\AgdaSpace{}% 148 | \AgdaBound{n}\AgdaSpace{}% 149 | \AgdaOperator{\AgdaInductiveConstructor{∷}}\AgdaSpace{}% 150 | \AgdaInductiveConstructor{[]}\<% 151 | \\ 152 | % 153 | \\[\AgdaEmptyExtraSkip]% 154 | \>[0]\AgdaOperator{\AgdaFunction{\AgdaUnderscore{}{\AgdaUnderscore{}}}}\AgdaSpace{}% 155 | \AgdaSymbol{:}\AgdaSpace{}% 156 | \AgdaSymbol{∀}\AgdaSpace{}% 157 | \AgdaSymbol{\{}\AgdaBound{a}\AgdaSpace{}% 158 | \AgdaBound{b}\AgdaSpace{}% 159 | \AgdaSymbol{:}\AgdaSpace{}% 160 | \AgdaDatatype{ℕ}\AgdaSymbol{\}}\AgdaSpace{}% 161 | \AgdaSymbol{→}\AgdaSpace{}% 162 | \AgdaDatatype{O}\AgdaSpace{}% 163 | \AgdaBound{a}\AgdaSpace{}% 164 | \AgdaSymbol{→}\AgdaSpace{}% 165 | \AgdaFunction{hom}\AgdaSpace{}% 166 | \AgdaBound{a}\AgdaSpace{}% 167 | \AgdaBound{b}\AgdaSpace{}% 168 | \AgdaSymbol{→}\AgdaSpace{}% 169 | \AgdaDatatype{O}\AgdaSpace{}% 170 | \AgdaBound{b}\<% 171 | \\ 172 | \>[0]\AgdaInductiveConstructor{Var}\AgdaSpace{}% 173 | \AgdaBound{x}\AgdaSpace{}% 174 | \AgdaOperator{\AgdaFunction{{}}\AgdaSpace{}% 175 | \AgdaBound{s}\AgdaSpace{}% 176 | \AgdaOperator{\AgdaFunction{}}}\AgdaSpace{}% 177 | \AgdaSymbol{=}\AgdaSpace{}% 178 | \AgdaInductiveConstructor{Var}\AgdaSpace{}% 179 | \AgdaSymbol{(}\AgdaFunction{Vec.lookup}\AgdaSpace{}% 180 | \AgdaBound{s}\AgdaSpace{}% 181 | \AgdaBound{x}\AgdaSymbol{)}\<% 182 | \\ 183 | \>[0]\AgdaInductiveConstructor{App}\AgdaSpace{}% 184 | \AgdaOperator{\AgdaFunction{{}}\AgdaSpace{}% 185 | \AgdaBound{s}\AgdaSpace{}% 186 | \AgdaOperator{\AgdaFunction{}}}\AgdaSpace{}% 187 | \AgdaSymbol{=}\AgdaSpace{}% 188 | \AgdaInductiveConstructor{App}\<% 189 | \\ 190 | \>[0]\AgdaInductiveConstructor{Lam}\AgdaSpace{}% 191 | \AgdaOperator{\AgdaFunction{{}}\AgdaSpace{}% 192 | \AgdaBound{s}\AgdaSpace{}% 193 | \AgdaOperator{\AgdaFunction{}}}\AgdaSpace{}% 194 | \AgdaSymbol{=}\AgdaSpace{}% 195 | \AgdaInductiveConstructor{Lam}\<% 196 | \\ 197 | % 198 | \\[\AgdaEmptyExtraSkip]% 199 | \>[0]\AgdaComment{--\ Pointwise\ hom\ [a₁,\ ⋯,\ aₙ]\ [b₁,\ ⋯,\ bₙ]\ is\ the\ type\ of\ the}\<% 200 | \\ 201 | \>[0]\AgdaComment{--\ lists\ of\ the\ shape\ [c₁,\ ⋯,\ cₙ]\ with\ c­ᵢ\ :\ hom\ aᵢ\ bᵢ}\<% 202 | \\ 203 | \>[0]\AgdaOperator{\AgdaFunction{\AgdaUnderscore{}\textasciicircum{}\AgdaUnderscore{}}}\AgdaSpace{}% 204 | \AgdaSymbol{:}\AgdaSpace{}% 205 | \AgdaSymbol{\{}\AgdaBound{a}\AgdaSpace{}% 206 | \AgdaBound{b}\AgdaSpace{}% 207 | \AgdaSymbol{:}\AgdaSpace{}% 208 | \AgdaDatatype{ℕ}\AgdaSymbol{\}}\AgdaSpace{}% 209 | \AgdaSymbol{(}\AgdaBound{x}\AgdaSpace{}% 210 | \AgdaSymbol{:}\AgdaSpace{}% 211 | \AgdaFunction{hom}\AgdaSpace{}% 212 | \AgdaBound{a}\AgdaSpace{}% 213 | \AgdaBound{b}\AgdaSymbol{)}\AgdaSpace{}% 214 | \AgdaSymbol{(}\AgdaBound{o}\AgdaSpace{}% 215 | \AgdaSymbol{:}\AgdaSpace{}% 216 | \AgdaDatatype{O}\AgdaSpace{}% 217 | \AgdaBound{a}\AgdaSymbol{)}\AgdaSpace{}% 218 | \AgdaSymbol{→}\AgdaSpace{}% 219 | \AgdaDatatype{Pointwise}\AgdaSpace{}% 220 | \AgdaFunction{hom}\AgdaSpace{}% 221 | \AgdaSymbol{(}\AgdaFunction{α}\AgdaSpace{}% 222 | \AgdaBound{o}\AgdaSymbol{)}\AgdaSpace{}% 223 | \AgdaSymbol{(}\AgdaFunction{α}\AgdaSpace{}% 224 | \AgdaSymbol{(}\AgdaBound{o}\AgdaSpace{}% 225 | \AgdaOperator{\AgdaFunction{{}}\AgdaSpace{}% 226 | \AgdaBound{x}\AgdaSpace{}% 227 | \AgdaOperator{\AgdaFunction{}}}\AgdaSymbol{))}\<% 228 | \\ 229 | \>[0]\AgdaBound{x}\AgdaSpace{}% 230 | \AgdaOperator{\AgdaFunction{\textasciicircum{}}}\AgdaSpace{}% 231 | \AgdaInductiveConstructor{Var}\AgdaSpace{}% 232 | \AgdaBound{y}\AgdaSpace{}% 233 | \AgdaSymbol{=}\AgdaSpace{}% 234 | \AgdaInductiveConstructor{[]}\<% 235 | \\ 236 | \>[0]\AgdaBound{x}\AgdaSpace{}% 237 | \AgdaOperator{\AgdaFunction{\textasciicircum{}}}\AgdaSpace{}% 238 | \AgdaInductiveConstructor{App}\AgdaSpace{}% 239 | \AgdaSymbol{=}\AgdaSpace{}% 240 | \AgdaBound{x}\AgdaSpace{}% 241 | \AgdaOperator{\AgdaInductiveConstructor{∷}}\AgdaSpace{}% 242 | \AgdaBound{x}\AgdaSpace{}% 243 | \AgdaOperator{\AgdaInductiveConstructor{∷}}\AgdaSpace{}% 244 | \AgdaInductiveConstructor{[]}\<% 245 | \\ 246 | \>[0]\AgdaBound{x}\AgdaSpace{}% 247 | \AgdaOperator{\AgdaFunction{\textasciicircum{}}}\AgdaSpace{}% 248 | \AgdaInductiveConstructor{Lam}\AgdaSpace{}% 249 | \AgdaSymbol{=}\AgdaSpace{}% 250 | \AgdaSymbol{(}\AgdaBound{x}\AgdaSpace{}% 251 | \AgdaOperator{\AgdaFunction{↑}}\AgdaSymbol{)}\AgdaSpace{}% 252 | \AgdaOperator{\AgdaInductiveConstructor{∷}}\AgdaSpace{}% 253 | \AgdaInductiveConstructor{[]}\<% 254 | \end{code} 255 | % 256 | \begin{code}% 257 | \>[0]\AgdaFunction{signature}\AgdaSpace{}% 258 | \AgdaSymbol{:}\AgdaSpace{}% 259 | \AgdaRecord{Signature}\AgdaSpace{}% 260 | \AgdaPrimitive{lzero}\AgdaSpace{}% 261 | \AgdaPrimitive{lzero}\AgdaSpace{}% 262 | \AgdaPrimitive{lzero}\<% 263 | \\ 264 | \>[0]\AgdaFunction{signature}\AgdaSpace{}% 265 | \AgdaSymbol{=}\AgdaSpace{}% 266 | \AgdaKeyword{record}% 267 | \>[189I]\AgdaSymbol{\{}\AgdaSpace{}% 268 | \AgdaField{A}\AgdaSpace{}% 269 | \AgdaSymbol{=}\AgdaSpace{}% 270 | \AgdaDatatype{ℕ}\AgdaSymbol{;}\<% 271 | \\ 272 | \>[189I][@{}l@{\AgdaIndent{0}}]% 273 | \>[20]\AgdaField{hom}\AgdaSpace{}% 274 | \AgdaSymbol{=}\AgdaSpace{}% 275 | \AgdaFunction{hom}\AgdaSymbol{;}\<% 276 | \\ 277 | % 278 | \>[20]\AgdaField{id}\AgdaSpace{}% 279 | \AgdaSymbol{=}\AgdaSpace{}% 280 | \AgdaFunction{lc.id}\AgdaSymbol{;}\<% 281 | \\ 282 | % 283 | \>[20]\AgdaOperator{\AgdaField{\AgdaUnderscore{}∘\AgdaUnderscore{}}}\AgdaSpace{}% 284 | \AgdaSymbol{=}\AgdaSpace{}% 285 | \AgdaOperator{\AgdaFunction{lc.\AgdaUnderscore{}∘\AgdaUnderscore{}}}\AgdaSymbol{;}\<% 286 | \\ 287 | % 288 | \>[20]\AgdaField{O}\AgdaSpace{}% 289 | \AgdaSymbol{=}\AgdaSpace{}% 290 | \AgdaDatatype{O}\AgdaSymbol{;}\<% 291 | \\ 292 | % 293 | \>[20]\AgdaField{α}\AgdaSpace{}% 294 | \AgdaSymbol{=}\AgdaSpace{}% 295 | \AgdaFunction{α}\AgdaSymbol{;}\<% 296 | \\ 297 | % 298 | \>[20]\AgdaOperator{\AgdaField{\AgdaUnderscore{}{\AgdaUnderscore{}}}}\AgdaSpace{}% 299 | \AgdaSymbol{=}\AgdaSpace{}% 300 | \AgdaOperator{\AgdaFunction{\AgdaUnderscore{}{\AgdaUnderscore{}}}}\AgdaSymbol{;}\<% 301 | \\ 302 | % 303 | \>[20]\AgdaOperator{\AgdaField{\AgdaUnderscore{}\textasciicircum{}\AgdaUnderscore{}}}\AgdaSpace{}% 304 | \AgdaSymbol{=}\AgdaSpace{}% 305 | \AgdaOperator{\AgdaFunction{\AgdaUnderscore{}\textasciicircum{}\AgdaUnderscore{}}}\AgdaSymbol{\}}\<% 306 | \end{code} -------------------------------------------------------------------------------- /lc.lagda: -------------------------------------------------------------------------------- 1 | \begin{code} 2 | module lc where 3 | 4 | 5 | open import Data.Vec.Base as Vec using (Vec; []; _∷_) 6 | open import Data.Vec.Membership.Propositional renaming (_∈_ to _∈̬_ ) 7 | open import Data.Vec.Membership.Propositional.Properties as VecProp 8 | open import Data.Vec.Relation.Unary.Any as VecAny using (here ; there) 9 | open import Data.Vec.Relation.Unary.Any.Properties as VecProp hiding (map-id) 10 | open import Data.List as List using (List ; [] ; _∷_) 11 | open import Data.List.Membership.Propositional 12 | open import Data.List.Relation.Unary.Any renaming (_─_ to _⑊_ ) 13 | open import Data.Fin as Fin using (Fin) 14 | open import Data.Nat using (ℕ; _≟_ ; _+_) 15 | open import Data.Product as Product using (_,_; Σ; _×_) 16 | open import Data.Maybe.Base using (Maybe) renaming (nothing to ⊥ ; just to ⌊_⌋) 17 | open import Data.Bool.Base 18 | open import Data.Empty using (⊥-elim) 19 | open import Relation.Binary.PropositionalEquality as ≡ using (_≡_ ) renaming (refl to 1ₑ) 20 | import Relation.Unary 21 | open import Relation.Nullary using (yes ; no ; does) 22 | 23 | open import lib 24 | 25 | \end{code} 26 | %<*lc-renamings> 27 | \begin{code} 28 | hom : ℕ → ℕ → Set 29 | hom m n = Vec (Fin n) m 30 | \end{code} 31 | % 32 | \begin{code} 33 | 34 | 35 | \end{code} 36 | %<*compose-renamings> 37 | \begin{code} 38 | _∘_ : ∀ {p q r} → hom q r → hom p q → hom p r 39 | xs ∘ ys = Vec.map (Vec.lookup xs) ys 40 | \end{code} 41 | % 42 | %<*id-renaming> 43 | \begin{code} 44 | id : ∀{n} → hom n n 45 | id {n} = Vec.allFin n 46 | \end{code} 47 | % 48 | %<*wk-renamings> 49 | \begin{code} 50 | _↑ : ∀ {p q} → hom p q → hom (1 + p) (1 + q) 51 | _↑ {p}{q} x = Vec.insertAt (Vec.map Fin.inject₁ x) 52 | (Fin.fromℕ p) (Fin.fromℕ q) 53 | \end{code} 54 | % 55 | \begin{code} 56 | 57 | _{_} : ∀ {n p} → Fin n → hom n p → Fin p 58 | i { x } = Vec.lookup x i 59 | 60 | _{_}⁻¹ : ∀ {n p}(x : Fin p) → ∀ (f : hom n p) → Maybe (pre-image (_{ f }) x) 61 | i { x }⁻¹ = MoreVec.lookup⁻¹ Fin._≟_ i x 62 | 63 | {- ---------------------- 64 | 65 | Common positions 66 | 67 | -------------------------- -} 68 | \end{code} 69 | %<*common-positions> 70 | \begin{code} 71 | commonPositions : ∀ {m n} → (x y : hom m n) → Σ ℕ (λ p → hom p m) 72 | commonPositions [] [] = 0 , [] 73 | commonPositions {ℕ.suc m} (x₀ ∷ x) (y₀ ∷ y) = 74 | let p , z = commonPositions x y in 75 | let z' = Vec.map Fin.suc z in 76 | if does (x₀ Fin.≟ y₀) then 77 | 1 + p , Fin.zero ∷ z' 78 | else 79 | p , z' 80 | \end{code} 81 | % 82 | \begin{code} 83 | 84 | -- sanity check: any common position must be in the vector of common positions 85 | commonPositions-property : ∀ {n m i} → (x y : hom m n) → Vec.lookup x i ≡ Vec.lookup y i → 86 | let (p , z) = commonPositions x y in 87 | i ∈̬ z 88 | commonPositions-property {i = i}(x ∷ xs) (y ∷ ys) e' with i | x Fin.≟ y 89 | ... | Fin.zero | no e = ⊥-elim (e e') 90 | ... | Fin.suc j | no e = VecProp.∈-map⁺ Fin.suc (commonPositions-property xs ys e') 91 | ... | Fin.zero | yes e = here 1ₑ 92 | ... | Fin.suc j | yes 1ₑ = there (∈-map⁺ Fin.suc (commonPositions-property xs ys e')) 93 | 94 | 95 | 96 | {- ---------------------- 97 | 98 | Common values 99 | 100 | -------------------------- -} 101 | 102 | 103 | \end{code} 104 | %<*common-values> 105 | \begin{code} 106 | commonValues : ∀ {m m' n} → (x : hom m n) → (y : hom m' n) → Σ ℕ (λ p → hom p m × hom p m') 107 | commonValues [] y = 0 , [] , [] 108 | commonValues {ℕ.suc m } (x₀ ∷ x) y = 109 | let p , l , r = commonValues {m} x y in 110 | let indices = MoreVec.find-indices (λ x' → x' Fin.≟ x₀) y in 111 | -- count is at most 1 for injective renamings 112 | let count = List.length indices in 113 | count + p , 114 | Vec.replicate _ Fin.zero Vec.++ Vec.map Fin.suc l , 115 | Vec.fromList indices Vec.++ r 116 | \end{code} 117 | % 118 | \begin{code} 119 | 120 | -- sanity check: any common value must be in the vectors of common value positions 121 | module _ where 122 | open import Data.Vec.Properties using (lookup-zip ; lookup-replicate ; map-zip ; map-id) 123 | 124 | commonValues-property : ∀ {m m' n v} → (x : hom m n) (y : hom m' n) → (vx : v ∈̬ x) → (vy : v ∈̬ y) → 125 | let p , l , r = commonValues x y in 126 | (VecAny.index vx , VecAny.index vy) ∈̬ Vec.zip l r 127 | commonValues-property .(x ∷ xs) ys (here {x = x} {xs = xs} px) vy 128 | with p , l , r ← commonValues xs ys 129 | | indices ← (MoreVec.find-indices (Fin._≟ x) ys) 130 | | indice∈ ← MoreVec.find-indices-∈ (Fin._≟ x) vy px 131 | = let count = List.length indices in 132 | let vindices = Vec.fromList indices in 133 | ≡.subst₂ (λ a → a ∈̬_ ) eq 134 | (≡.sym (Data.Vec.Properties.zipWith-++ _,_ (Vec.replicate _ Fin.zero) (Vec.map Fin.suc l) vindices r)) 135 | ( 136 | ∈-++⁺ˡ {xs = Vec.zip (Vec.replicate _ Fin.zero) vindices} 137 | (VecProp.∈-lookup (VecAny.index (VecProp.∈-fromList⁺ indice∈)) _) 138 | ) 139 | where 140 | eq : Vec.lookup (Vec.zip (Vec.replicate _ Fin.zero) (Vec.fromList indices) ) 141 | (VecAny.index (VecProp.∈-fromList⁺ indice∈)) 142 | ≡ (Fin.zero , VecAny.index vy) 143 | -- eq rewrite MoreVec.index-∈-fromList⁺ indice∈ = ≡.trans 144 | eq = ≡.trans 145 | ( lookup-zip (VecAny.index (VecProp.∈-fromList⁺ indice∈)) (Vec.replicate _ Fin.zero) (Vec.fromList indices) ) 146 | (≡.cong₂ _,_ 147 | (≡.trans 148 | ( ≡.cong (Vec.lookup (Vec.replicate _ Fin.zero)) (VecProp.index-∈-fromList⁺ indice∈) ) 149 | (lookup-replicate (index indice∈) Fin.zero)) 150 | (≡.sym (VecProp.lookup-index (VecProp.∈-fromList⁺ indice∈)))) 151 | 152 | commonValues-property .(_ ∷ _) ys (there {x = x}{xs = xs} vx) vy with 153 | p , l , r ← commonValues xs ys 154 | | indices ← MoreVec.find-indices (Fin._≟ x) ys 155 | | rec ← commonValues-property xs ys vx vy 156 | rewrite Data.Vec.Properties.zipWith-++ _,_ (Vec.replicate _ Fin.zero) (Vec.map Fin.suc l) (Vec.fromList indices) r = 157 | ∈-++⁺ʳ (Vec.zip (Vec.replicate _ Fin.zero) (Vec.fromList indices)) 158 | ( ≡.subst (_ ∈̬_) 159 | (≡.trans (map-zip Fin.suc (λ m → m) _ _) (≡.cong (Vec.zip _) (map-id r))) 160 | (∈-map⁺ (Product.map Fin.suc (λ m → m)) rec) ) 161 | 162 | 163 | {- ---------------------- 164 | 165 | Syntax 166 | 167 | -------------------------- -} 168 | 169 | \end{code} 170 | %<*lc-metacontext> 171 | \begin{code} 172 | MetaContext· = List ℕ 173 | MetaContext = Maybe MetaContext· 174 | \end{code} 175 | % 176 | \begin{code} 177 | Tm· : MetaContext· → ℕ → Set 178 | \end{code} 179 | %<*lc-syntax-decl> 180 | \begin{code} 181 | data Tm : MetaContext → ℕ → Set 182 | Tm· Γ n = Tm ⌊ Γ ⌋ n 183 | \end{code} 184 | % 185 | \begin{code} 186 | \end{code} 187 | %<*lc-syntax-ind> 188 | \begin{code} 189 | data Tm where 190 | App· : ∀ {Γ n} → Tm· Γ n → Tm· Γ n 191 | → Tm· Γ n 192 | Lam· : ∀ {Γ n} → Tm· Γ (1 + n) 193 | → Tm· Γ n 194 | Var· : ∀ {Γ n} → Fin n → Tm· Γ n 195 | _﹙_﹚ : ∀ {Γ n m} → m ∈ Γ → hom m n 196 | → Tm· Γ n 197 | ! : ∀ {n} → Tm ⊥ n 198 | \end{code} 199 | % 200 | %<*lc-syntax-app-decl> 201 | \begin{code} 202 | App : ∀ {Γ n} → Tm Γ n → 203 | Tm Γ n → Tm Γ n 204 | \end{code} 205 | % 206 | %<*lc-syntax-lam-decl> 207 | \begin{code} 208 | Lam : ∀ {Γ n} → Tm Γ (1 + n) 209 | → Tm Γ n 210 | \end{code} 211 | % 212 | %<*lc-syntax-var-decl> 213 | \begin{code} 214 | Var : ∀ {Γ n} → Fin n 215 | → Tm Γ n 216 | \end{code} 217 | % 218 | %<*lc-syntax-app-def> 219 | \begin{code} 220 | App {⊥} ! ! = ! 221 | App {⌊ Γ ⌋} t u = App· t u 222 | \end{code} 223 | % 224 | %<*lc-syntax-lam-def> 225 | \begin{code} 226 | Lam {⊥} ! = ! 227 | Lam {⌊ Γ ⌋} t = Lam· t 228 | \end{code} 229 | % 230 | %<*lc-syntax-var-def> 231 | \begin{code} 232 | Var {⊥} i = ! 233 | Var {⌊ Γ ⌋} i = Var· i 234 | \end{code} 235 | % 236 | \begin{code} 237 | 238 | import Common as C 239 | module Common = C ℕ hom id Tm 240 | open Common.SubstitutionDef public 241 | 242 | {- ---------------------- 243 | 244 | Renaming 245 | 246 | -------------------------- -} 247 | 248 | \end{code} 249 | %<*lc-renaming> 250 | \begin{code} 251 | _❴_❵ : ∀ {Γ n p} → Tm Γ n → hom n p → Tm Γ p 252 | 253 | (App· t u) ❴ x ❵ = App· (t ❴ x ❵) (u ❴ x ❵) 254 | Lam· t ❴ x ❵ = Lam· (t ❴ x ↑ ❵) 255 | Var· i ❴ x ❵ = Var· (i { x }) 256 | M ﹙ y ﹚ ❴ x ❵ = M ﹙ x ∘ y ﹚ 257 | ! ❴ x ❵ = ! 258 | \end{code} 259 | % 260 | \begin{code} 261 | 262 | {- ---------------------- 263 | 264 | Weakening 265 | 266 | -------------------------- -} 267 | wkₜ : ∀ {Γ n m} → Tm· Γ n → Tm· (m ∷ Γ) n 268 | 269 | wkₜ (App· t u) = App· (wkₜ t) (wkₜ u) 270 | wkₜ (Lam· t) = Lam· (wkₜ t) 271 | wkₜ (Var· x) = Var· x 272 | wkₜ (M ﹙ x ﹚) = 1+ M ﹙ x ﹚ 273 | 274 | open Common.wkₛ wkₜ 275 | 276 | {- ---------------------- 277 | 278 | Substitution 279 | 280 | -------------------------- -} 281 | open Common.!ₛ ! public 282 | 283 | \end{code} 284 | %<*lc-substitution> 285 | \begin{code} 286 | _[_]t : ∀ {Γ n} → Tm Γ n → ∀ {Δ} → (Γ ⟶ Δ) → Tm Δ n 287 | (App· t u) [ σ ]t = App (t [ σ ]t) (u [ σ ]t) 288 | Lam· t [ σ ]t = Lam (t [ σ ]t) 289 | Var· i [ σ ]t = Var i 290 | -- _❴_❵ : Tm Γ n → hom n p → Tm Γ p 291 | -- is renaming (code omitted) 292 | M ﹙ x ﹚ [ σ ]t = nth σ M ❴ x ❵ 293 | ! [ 1⊥ ]t = ! 294 | \end{code} 295 | % 296 | \begin{code} 297 | 298 | open Common.-[-]s _[_]t public 299 | open Common.1ₛ wkₜ _﹙_﹚ public 300 | open Common.Substitution wkₜ _﹙_﹚ public 301 | 302 | {- ---------------------- 303 | 304 | Occur check 305 | 306 | -------------------------- -} 307 | 308 | \end{code} 309 | % <*lc-occur-check> 310 | \begin{code} 311 | module _ where 312 | open Data.Maybe.Base using (_>>=_) 313 | infixl 20 _⑊?ₜ_ 314 | _⑊?ₜ_ : ∀ {Γ m a} → Tm· Γ a → (M : m ∈ Γ) → Maybe (Tm· (Γ ⑊ M) a) 315 | Var· i ⑊?ₜ M = ⌊ Var· i ⌋ 316 | App· t u ⑊?ₜ M = do 317 | t' ← t ⑊?ₜ M 318 | u' ← u ⑊?ₜ M 319 | ⌊ App· t' u' ⌋ 320 | Lam· t ⑊?ₜ M = do 321 | t' ← t ⑊?ₜ M 322 | ⌊ Lam· t' ⌋ 323 | M' ﹙ y ﹚ ⑊?ₜ M with M' ⑊? M 324 | ... | ⊥ = ⊥ 325 | ... | ⌊ M' ⌋ = ⌊ M' ﹙ y ﹚ ⌋ 326 | 327 | open Common.occur-cases public 328 | occur-check : ∀ {Γ m n} → (M : m ∈ Γ) → Tm· Γ n → occur-cases M n 329 | occur-check M (M' ﹙ x ﹚) with M' ⑊? M 330 | ... | ⊥ = Same-MVar x 331 | ... | ⌊ M' ⌋ = No-Cycle (M' ﹙ x ﹚) 332 | occur-check M t with t ⑊?ₜ M 333 | ... | ⊥ = Cycle 334 | ... | ⌊ t' ⌋ = No-Cycle t' 335 | \end{code} 336 | % 337 | \begin{code} 338 | 339 | {- ---------------------- 340 | 341 | Pruning 342 | 343 | -------------------------- -} 344 | 345 | open Common.PruneUnifyTypes 346 | pattern _∶_﹙_﹚ M m x = _﹙_﹚ {m = m} M x 347 | 348 | {-# TERMINATING #-} 349 | \end{code} 350 | %<*lc-prune-proto> 351 | \begin{code} 352 | prune : ∀ {Γ m n} → Tm Γ n → hom m n → [ m ]∪ Γ ⟶? 353 | \end{code} 354 | % 355 | %<*prune-app> 356 | \begin{code} 357 | prune (App· t u) x = 358 | let Δ₁ ◄ t' ﹔ σ₁ = prune t x 359 | Δ₂ ◄ u' ﹔ σ₂ = prune (u [ σ₁ ]t) x 360 | in Δ₂ ◄ (App (t' [ σ₂ ]t) u') ﹔ (σ₁ [ σ₂ ]s) 361 | \end{code} 362 | % 363 | %<*prune-lam> 364 | \begin{code} 365 | prune (Lam· t) x = 366 | let Δ ◄ t' ﹔ σ = prune t (x ↑) 367 | in Δ ◄ Lam t' ﹔ σ 368 | \end{code} 369 | % 370 | %<*prune-var> 371 | \begin{code} 372 | prune {Γ} (Var· i) x with i { x }⁻¹ 373 | ... | ⊥ = ⊥ ◄ ! ﹔ !ₛ 374 | ... | ⌊ PreImage j ⌋ = Γ ◄ Var j ﹔ 1ₛ 375 | \end{code} 376 | % 377 | % -- prune {⌊ Γ ⌋} (M ∶ m ﹙ x ﹚) y = 378 | %<*lc-prune-flex> 379 | \begin{code} 380 | prune {⌊ Γ ⌋} (M ﹙ x ﹚) y = 381 | let p , x' , y' = commonValues x y in 382 | ⌊ Γ [ M ∶ p ] ⌋ ◄ (M ∶ p) ﹙ y' ﹚ ﹔ M ↦-﹙ x' ﹚ 383 | \end{code} 384 | % 385 | %<*prune-fail> 386 | \begin{code} 387 | prune ! y = ⊥ ◄ ! ﹔ !ₛ 388 | \end{code} 389 | % 390 | 391 | {- ---------------------- 392 | 393 | Unification 394 | 395 | -------------------------- -} 396 | 397 | \end{code} 398 | %<*lc-unify-flex-proto> 399 | \begin{code} 400 | unify-flex-* : ∀ {Γ m n} → m ∈ Γ → hom m n → Tm· Γ n → Γ ·⟶? 401 | \end{code} 402 | % 403 | %<*lc-unify-flex-def> 404 | \begin{code} 405 | unify-flex-* {Γ} M x t 406 | with occur-check M t 407 | ... | Same-MVar y = 408 | let p , z = commonPositions x y 409 | in ⌊ Γ [ M ∶ p ] ⌋ ◄ M ↦-﹙ z ﹚ 410 | ... | Cycle = ⊥ ◄ !ₛ 411 | ... | No-Cycle t' = 412 | let Δ ◄ u ﹔ σ = prune t' x 413 | in Δ ◄ M ↦ u , σ 414 | \end{code} 415 | % 416 | \begin{code} 417 | 418 | 419 | {-# TERMINATING #-} 420 | \end{code} 421 | %<*lc-unifyprototype> 422 | \begin{code} 423 | unify : ∀ {Γ n} → Tm Γ n → Tm Γ n → Γ ⟶? 424 | \end{code} 425 | % 426 | %<*unify-flex> 427 | \begin{code} 428 | unify t (M ﹙ x ﹚) = unify-flex-* M x t 429 | unify (M ﹙ x ﹚) t = unify-flex-* M x t 430 | \end{code} 431 | % 432 | %<*unify-app> 433 | \begin{code} 434 | unify (App· t u) (App· t' u') = 435 | let Δ₁ ◄ σ₁ = unify t t' 436 | Δ₂ ◄ σ₂ = unify (u [ σ₁ ]t) (u' [ σ₁ ]t) 437 | in Δ₂ ◄ σ₁ [ σ₂ ]s 438 | \end{code} 439 | % 440 | %<*unify-lam> 441 | \begin{code} 442 | unify (Lam· t) (Lam· t') = unify t t' 443 | \end{code} 444 | % 445 | %<*unify-var> 446 | \begin{code} 447 | unify {Γ} (Var· i) (Var· j) with i Fin.≟ j 448 | ... | no _ = ⊥ ◄ !ₛ 449 | ... | yes _ = Γ ◄ 1ₛ 450 | \end{code} 451 | % 452 | %<*unify-bot> 453 | \begin{code} 454 | unify ! ! = ⊥ ◄ !ₛ 455 | \end{code} 456 | % 457 | %<*unify-last> 458 | \begin{code} 459 | unify _ _ = ⊥ ◄ !ₛ 460 | \end{code} 461 | % 462 | -------------------------------------------------------------------------------- /lcsig.lagda: -------------------------------------------------------------------------------- 1 | \begin{code} 2 | module lcsig where 3 | 4 | open import lib 5 | open import Agda.Primitive 6 | open import main using (Signature ; isFriendly) 7 | open import Data.Vec.Base as Vec using (Vec) 8 | open import Data.Nat as ℕ using (ℕ; _+_) 9 | open import Data.Fin as Fin using (Fin) 10 | open import Data.List as List hiding (map ; [_] ; lookup) 11 | open import Data.List.Relation.Binary.Pointwise using (Pointwise ; [] ; _∷_) 12 | import lc 13 | open import lc using (hom ; _↑) 14 | \end{code} 15 | %<*lc-sig> 16 | \begin{code} 17 | data O n : Set where 18 | Var : Fin n → O n 19 | App : O n 20 | Lam : O n 21 | 22 | α : {n : ℕ} → O n → List ℕ 23 | α (Var x) = [] 24 | α {n} App = n ∷ n ∷ [] 25 | α {n} Lam = 1 + n ∷ [] 26 | 27 | _{_} : ∀ {a b : ℕ} → O a → hom a b → O b 28 | Var x { s } = Var (Vec.lookup s x) 29 | App { s } = App 30 | Lam { s } = Lam 31 | 32 | -- Pointwise hom [a₁, ⋯, aₙ] [b₁, ⋯, bₙ] is the type of the 33 | -- lists of the shape [c₁, ⋯, cₙ] with c­ᵢ : hom aᵢ bᵢ 34 | _^_ : {a b : ℕ} (x : hom a b) (o : O a) → Pointwise hom (α o) (α (o { x })) 35 | x ^ Var y = [] 36 | x ^ App = x ∷ x ∷ [] 37 | x ^ Lam = (x ↑) ∷ [] 38 | \end{code} 39 | % 40 | \begin{code} 41 | signature : Signature lzero lzero lzero 42 | signature = record { A = ℕ; 43 | hom = hom; 44 | id = lc.id; 45 | _∘_ = lc._∘_; 46 | O = O; 47 | α = α; 48 | _{_} = _{_}; 49 | _^_ = _^_} 50 | \end{code} -------------------------------------------------------------------------------- /lib.lagda: -------------------------------------------------------------------------------- 1 | \begin{code} 2 | module lib where 3 | 4 | open import Data.Vec.Base as Vec using (Vec; []; _∷_) 5 | open import Data.Vec.Membership.Propositional using () renaming (_∈_ to _∈̬_ ) 6 | import Data.Vec.Membership.Propositional.Properties as VecProp 7 | import Data.Vec.Relation.Unary.Any.Properties as VecProp 8 | open import Data.Vec.Relation.Unary.Any as VecAny using (here ; there) 9 | open import Data.List as List using (List ; _++_ ; _∷_ ; [] ) 10 | import Data.List.Properties as ListProp 11 | open import Data.List.Relation.Unary.Any as ListAny using (here ; there ; any ; _∷=_ ) renaming (_─_ to _⑊_ ) 12 | open import Data.List.Membership.Propositional using (_∈_) 13 | open import Data.List.Membership.Propositional.Properties as ListProp 14 | open import Data.List.Membership.Setoid.Properties using (∈-∷=⁺-updated) 15 | open import Relation.Binary.PropositionalEquality as ≡ using (_≡_ ; _≢_ ; setoid) renaming (refl to 1ₑ) 16 | open import Data.Sum using (_⊎_ ; inj₁ ; inj₂) 17 | open import Data.Fin as Fin using (Fin) 18 | open import Data.Fin.Properties using ( suc-injective) 19 | open import Data.Product using (_,_; Σ) 20 | open import Data.Product.Properties using (,-injective) 21 | import Data.Nat as Nat 22 | open import Data.Bool.Base 23 | open import Data.Maybe.Base using (Maybe) renaming (nothing to ⊥ ; just to ⌊_⌋) 24 | open import Relation.Nullary hiding (⌊_⌋) 25 | open import Relation.Nullary.Decidable using (dec-true ; dec-false) 26 | import Relation.Binary 27 | import Relation.Unary 28 | open import Agda.Primitive 29 | 30 | 31 | pre-image : ∀ {i j}{A : Set i}{B : Set j} (f : A → B) → B → Set (i ⊔ j) 32 | pre-image f b = Σ _ (λ a → f a ≡ b) 33 | -- a simple way to (de)construct pre images 34 | pattern PreImage a = a , 1ₑ 35 | 36 | -- simple (de)constructors for _∈_ 37 | pattern Ο {l} = here {xs = l} 1ₑ 38 | pattern 1+ {x}{l} a = there {x}{l} a 39 | 40 | -- Some notations 41 | module _ {i}{A : Set i} where 42 | _[_∶_] : ∀ (Γ : List A) {m} → m ∈ Γ → A → List A 43 | ℓ [ M ∶ m ] = M ∷= m 44 | 45 | _∶_ : ∀ {Γ : List A} {m} → (M : m ∈ Γ) → (a : A) → a ∈ (Γ [ M ∶ a ]) 46 | M ∶ m = ∈-∷=⁺-updated (setoid A) M 47 | 48 | 49 | data _Maybe-⑊_ {ℓ : List A}{a}(a∈ : a ∈ ℓ) : ∀ {a'} → a' ∈ ℓ → Set i where 50 | ⊥ : a∈ Maybe-⑊ a∈ 51 | ⌊_⌋ : ∀ {a'}{a'∈ : a' ∈ ℓ} → (a∈' : a ∈ (ℓ ⑊ a'∈)) → a∈ Maybe-⑊ a'∈ 52 | 53 | _⑊?_ : ∀ {l : List A}{a}(a∈ : a ∈ l){a'} → (a'∈ : a' ∈ l) → a∈ Maybe-⑊ a'∈ 54 | Ο ⑊? Ο = ⊥ 55 | Ο ⑊? 1+ a'∈ = ⌊ Ο ⌋ 56 | 1+ a∈ ⑊? Ο = ⌊ a∈ ⌋ 57 | 1+ a∈ ⑊? 1+ a'∈ with a∈ ⑊? a'∈ 58 | ... | ⌊ a∈ ⌋ = ⌊ 1+ a∈ ⌋ 59 | ... | ⊥ = ⊥ 60 | 61 | module VecList where 62 | 63 | -- VecList B [l₀ ; .. ; lₙ] ≃ B l₀ × .. × B lₙ 64 | data VecList {A : Set}(B : A → Set) : List A → Set where 65 | [] : VecList B [] 66 | _,_ : ∀ {a as} → B a → VecList B as → VecList B (a ∷ as) 67 | 68 | 69 | map : ∀ {A : Set}{B B' : A → Set}{l : List A} → (∀ a → B a → B' a) → VecList B l → VecList B' l 70 | map f [] = [] 71 | map f (x , xs) = f _ x , map f xs 72 | 73 | lookup : ∀ {A : Set}{B : A → Set}{l : List A}{a} → VecList B l → a ∈ l → B a 74 | lookup (t , _) Ο = t 75 | lookup (_ , ts) (1+ a∈) = lookup ts a∈ 76 | 77 | module _ {A : Set}{B : A → Set}(_≟_ : Relation.Binary.Decidable (_≡_ {A = A})) 78 | (_≟B_ : ∀ {a} → Relation.Binary.Decidable (_≡_ {A = B a})) where 79 | private 80 | _≟B'_ : ∀ {a a'}(b : B a) (b' : B a') → Dec (Σ (a ≡ a') (λ e → ≡.subst B e b ≡ b')) 81 | _≟B'_ {a} {a'} b b' with a ≟ a' 82 | ... | no p = no λ { (x , _) → p x} 83 | ... | yes 1ₑ with b ≟B b' 84 | ... | no p = no λ { ( 1ₑ , x ) → p x} 85 | ... | yes p = yes (1ₑ , p) 86 | 87 | 88 | lookup⁻¹ : ∀ {a} (b : B a) {l} (xs : VecList B l) → Maybe (pre-image (lookup xs) b) 89 | lookup⁻¹ b [] = ⊥ 90 | lookup⁻¹ b (b' , xs) with b ≟B' b' 91 | ... | yes (1ₑ , 1ₑ) = ⌊ PreImage Ο ⌋ 92 | ... | no _ = do 93 | PreImage i ← lookup⁻¹ b xs 94 | ⌊ PreImage (1+ i) ⌋ 95 | where open Data.Maybe.Base using (_>>=_) 96 | 97 | tabulate : ∀ {A : Set}{B : A → Set} → (∀ a → B a) → (ℓ : List A) → VecList B ℓ 98 | tabulate f [] = [] 99 | tabulate f (x ∷ ℓ) = f x , tabulate f ℓ 100 | 101 | module MoreFin where 102 | inject₁-≢-fromℕ : ∀ {n} i → Fin.inject₁ i ≢ Fin.fromℕ n 103 | inject₁-≢-fromℕ Fin.zero = λ () 104 | inject₁-≢-fromℕ (Fin.suc i) = λ e → inject₁-≢-fromℕ i (suc-injective e) 105 | 106 | module MoreList where 107 | 108 | module _ {i}{A : Set i} where 109 | 110 | _≟∈_ : ∀ {l}{a : A}(a∈ a∈' : a ∈ l) → Dec (a∈ ≡ a∈') 111 | Ο ≟∈ Ο = yes 1ₑ 112 | 1+ p ≟∈ 1+ p' with p ≟∈ p' 113 | ... | yes q = yes (≡.cong 1+ q) 114 | ... | no q = no λ {1ₑ → q 1ₑ} 115 | Ο ≟∈ 1+ p = no λ () 116 | 1+ q ≟∈ Ο = no λ () 117 | 118 | module _ {ℓ : Level} {A : Set ℓ} {ℓ' : Level} {B : Set ℓ} where 119 | import Data.List.Membership.Setoid (setoid A) as SetoidA 120 | open import Data.List.Relation.Unary.Any.Properties using (map⁻) 121 | ∈-map⁺-map⁻ : 122 | (f : A → B) {y : B} {xs : List A} → 123 | (y∈ : y ∈ List.map f xs) → 124 | let (y' , i'' , e) = ∈-map⁻ f y∈ 125 | in ∈-map⁺ f i'' ≡ ≡.subst (_∈ _) e y∈ 126 | ∈-map⁺-map⁻ f {xs = x ∷ xs} Ο = 1ₑ 127 | ∈-map⁺-map⁻ f {y}{xs = x ∷ xs} (1+ y∈) rewrite ∈-map⁺-map⁻ f y∈ 128 | with SetoidA.find (map⁻ y∈) 129 | ... | y' , i'' , e = aux y∈ e 130 | where 131 | aux : ∀ {y} (y∈ : y ∈ List.map f xs) (e : y ≡ f y') → 132 | 1+ (≡.subst (_∈ (List.map f xs)) e y∈) ≡ 133 | ≡.subst (_∈ (f x ∷ List.map f xs)) e (1+ y∈) 134 | aux y∈ 1ₑ = 1ₑ 135 | 136 | 137 | module MoreVec where 138 | module _ {i}{A : Set i}(_≟_ : Relation.Binary.Decidable (_≡_ {A = A})) where 139 | 140 | open import Data.Vec.Membership.DecPropositional _≟_ 141 | open VecAny using (index) 142 | lookup⁻¹ : ∀ a {n} (l : Vec A n) → Maybe (pre-image (Vec.lookup l) a) 143 | lookup⁻¹ a {n} l with a ∈? l 144 | ... | no _ = ⊥ 145 | ... | yes a∈ rewrite VecProp.lookup-index a∈ = ⌊ PreImage (index a∈) ⌋ 146 | 147 | insert-lookup-last< : ∀ {i}{A : Set i}{n} k a (l : Vec A n) 148 | → Vec.lookup (Vec.insertAt l (Fin.fromℕ n) a) (Fin.inject₁ k) ≡ Vec.lookup l k 149 | insert-lookup-last< Fin.zero a (x ∷ l) = 1ₑ 150 | insert-lookup-last< (Fin.suc k) a (x ∷ l) = insert-lookup-last< k a l 151 | 152 | insert-last-++ : ∀ {i}{A : Set i}{n m}(xs : Vec A n)(ys : Vec A m) a → 153 | Vec.toList (Vec.insertAt (xs Vec.++ ys) (Fin.fromℕ _) a) ≡ 154 | Vec.toList (xs Vec.++ (Vec.insertAt ys (Fin.fromℕ _) a)) 155 | insert-last-++ [] ys a = 1ₑ 156 | insert-last-++ (x ∷ xs) ys a = ≡.cong (x ∷_) (insert-last-++ xs ys a) 157 | 158 | 159 | module _ {i}{j}{A : Set i}{P : A → Set j}(P? : Relation.Unary.Decidable P) where 160 | -- return the list of indices that satfies P 161 | find-indices : ∀ {n} → (xs : Vec A n) → List (Fin n) 162 | find-indices [] = [] 163 | find-indices (x ∷ xs) with ys ← List.map Fin.suc (find-indices xs) 164 | | does (P? x) 165 | ... | true = Fin.zero ∷ ys 166 | ... | false = ys 167 | 168 | open VecAny using (here ; there ) 169 | 170 | find-indices-∈ : ∀ {n a} → {xs : Vec A n} → (a∈ : a ∈̬ xs) → P a → (VecAny.index a∈ ∈ find-indices xs) 171 | find-indices-∈ (here 1ₑ) Pa rewrite dec-true (P? _) Pa = here 1ₑ 172 | find-indices-∈ (there {x = x}{xs = xs} a∈) Pa 173 | with rec ← ListProp.∈-map⁺ Fin.suc (find-indices-∈ a∈ Pa) 174 | | does (P? x) 175 | ... | true = there rec 176 | ... | false = rec 177 | 178 | find-indices-insert-last : ∀ {n} → (xs : Vec A n) → ∀ {a} → P a → 179 | find-indices (Vec.insertAt xs (Fin.fromℕ n) a) ≡ 180 | List.map Fin.inject₁ (find-indices xs) ++ (Fin.fromℕ n ∷ []) 181 | find-indices-insert-last [] Pa rewrite dec-true (P? _) Pa = 1ₑ 182 | find-indices-insert-last (x ∷ xs) Pa rewrite find-indices-insert-last xs Pa 183 | with P? x 184 | ... | yes p rewrite 185 | ListProp.map-++ Fin.suc (List.map Fin.inject₁ (find-indices xs) ) (Fin.fromℕ _ ∷ []) 186 | | ≡.sym (ListProp.map-∘ {g = Fin.suc}{Fin.inject₁}(find-indices xs)) 187 | | ≡.sym (ListProp.map-∘ {g = Fin.inject₁}{Fin.suc}(find-indices xs)) 188 | = 1ₑ 189 | 190 | 191 | ... | no p rewrite 192 | ListProp.map-++ Fin.suc (List.map Fin.inject₁ (find-indices xs) ) (Fin.fromℕ _ ∷ []) 193 | | ≡.sym (ListProp.map-∘ {g = Fin.suc}{Fin.inject₁}(find-indices xs)) 194 | | ≡.sym (ListProp.map-∘ {g = Fin.inject₁}{Fin.suc}(find-indices xs)) 195 | = 1ₑ 196 | 197 | find-indices-insert-last-⊥ : ∀ {n} → (xs : Vec A n) → ∀ {a} → ¬ P a → 198 | find-indices (Vec.insertAt xs (Fin.fromℕ n) a) ≡ 199 | List.map Fin.inject₁ (find-indices xs) 200 | find-indices-insert-last-⊥ [] Pa rewrite dec-false (P? _) Pa = 1ₑ 201 | find-indices-insert-last-⊥ (x ∷ xs) Pa rewrite find-indices-insert-last-⊥ xs Pa 202 | | ≡.sym (ListProp.map-∘ {g = Fin.suc}{Fin.inject₁}(find-indices xs)) 203 | with P? x 204 | ... | yes p 205 | rewrite ≡.sym (ListProp.map-∘ {g = Fin.inject₁}{Fin.suc}(find-indices xs)) 206 | = 1ₑ 207 | ... | no p 208 | rewrite ≡.sym (ListProp.map-∘ {g = Fin.inject₁}{Fin.suc}(find-indices xs)) 209 | = 1ₑ 210 | 211 | 212 | find-indices-0 : ∀ {n} → (xs : Vec A n) → (∀ {a} → a ∈̬ xs → ¬ P a) → 213 | find-indices xs ≡ [] 214 | find-indices-0 [] ¬P = 1ₑ 215 | find-indices-0 (x ∷ xs) ¬P rewrite dec-false (P? x) (¬P (here 1ₑ)) 216 | | find-indices-0 xs (λ z → ¬P (there z)) 217 | = 1ₑ 218 | 219 | find-indices-map⁻ : ∀ {i}{j}{k}{A : Set i}{B : Set j} 220 | {PA : A → Set j}(PA? : Relation.Unary.Decidable PA) 221 | {PB : B → Set k}(PB? : Relation.Unary.Decidable PB) → 222 | ∀ (f : A → B) {n} → 223 | (∀ {a} → PA a → PB (f a)) → 224 | (∀ {a} → PB (f a) → PA a) → 225 | (xs : Vec A n) → 226 | find-indices PB? (Vec.map f xs) ≡ find-indices PA? xs 227 | 228 | find-indices-map⁻ PA? PB? f AB BA [] = 1ₑ 229 | find-indices-map⁻ PA? PB? f AB BA (x ∷ xs) with PA? x 230 | ... | yes p rewrite dec-true (PB? (f x)) (AB p) | find-indices-map⁻ PA? PB? f AB BA xs = 1ₑ 231 | ... | no p rewrite dec-false (PB? (f x)) (λ x₁ → p (BA x₁)) | find-indices-map⁻ PA? PB? f AB BA xs = 1ₑ 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | \end{code} 240 | 241 | 242 | 243 | -------------------------------------------------------------------------------- /lics24-response.md: -------------------------------------------------------------------------------- 1 | We thank the referees for their useful comments and suggestions. 2 | 3 | We begin by addressing Reviewer 3's perceived lack of theoretical depth in our work. To clarify, in our eyes, the main theoretical contribution is the categorical semantics of higher-order pattern unification, as computing equalisers in suitable Lawvere theories. We are not aware of any direct semantic account in the litterature in this style. We believe that it sheds light on the similarity between higher-order pattern unification and first-order unification algorithms, which has been noticed many times in the litterature. 4 | We chose to focus on explaining the semantics in the main body of the paper while relegating examples to the appendices. However, multiple reviewers argued that it would be helful to introduce at least one other example than untyped lambda-calculus to give more intuitions about GB-signatures. If accepted, we propose to sketch Miller's original setting based on normalised simply-typed lambda-calculus (Appendix B.3) in the final version. 5 | 6 | Our theoretical investigation led to a single parameterisable algorithm that applies in many settings (this is where the full generality of GB-signatures pays off, to answer reviewer 1). 7 | 8 | 9 | DETAILED RESPONSE 10 | ------------------- 11 | 12 | We give detailed answers to the questions in each of the reviews below. 13 | 14 | Review 1 15 | ----------------- 16 | 17 | > 176: this motivates avoiding the use of "some kind of error monad", but then Maybe (which is canonically "some kind of error monad") is used anyway. So it makes it sound like you are doing something different here, rather than something uncontroversial and fairly standard in pure Haskell/Agda settings. Or at least the difference was not clear to me. 18 | 19 | We do use Maybe, but not in the place that is usally expected in a pure functional setting. Indeed, the result type of the unification algorithm is not embedded into a Maybe type (see Figure 3). Therefore, the semantics of our algorithm is a complete function rather than a partial function (as is the case in Rydeheard-Burstall's semantic account of first-order unification): the keypoint is that what is usually interpreted as failure can in fact be interpreted as a fully fledged coequaliser in a slightly modified category (Section 3.1). This little trick makes the completeness argument straightforward (Section 6.2). 20 | 21 | > 220- regarding the extra requirements that metavariable arguments be distinct, > I would thin kin Agda one could enforce this by requiring the mappings m => n > to be injective rather than arbitrary finite maps from [1...n] to [1...m], 22 | > right? Why not do this? 23 | 24 | We could indeed implement this restriction (this will be necessary when we mechanise the proof of correctness, which is future work). We opted for a lighter implementation approach, omitting certain required properties for correctness. Thus, the algorithm is guaranteed to produce valid outputs only under the assumptions that the inputs are valid. 25 | 26 | 27 | > 353: why leave n implicit and not other things? because usually this judgment > is used with n=0, or because it's always easy to figure out what n should be? 28 | 29 | This judgement is not usually used when n=0. We choose to emphasize on metacontexts and leave the variable context implicit to alleviate the notation. 30 | 31 | > 408: "then obviously there is no unifier" - there hasn't really been any 32 | > discussion so far of the formal statement of the unification problem so to 33 | > make it clearer why this is obvious here it might help to point out that in 34 | > HOU/HOPU the substitutions for the metavariables have to be closed - ie no 35 | > dependences on bound variables other than those explicitly given by the 36 | > arguments. 37 | 38 | We thank the referee for this clarifying observation, that we will include 39 | when we define metavariable substitution (paragraph starting line 221) as it may not be obvious. 40 | 41 | > 633: is the long double arrow here between alpha o and alpha (o {x}) the same > kind of arrow as in x : a => b or in line 626?? 42 | 43 | They are not the same: the double long arrow denotes a renaming between two metacontexts (Notation 2.10) while the short one denotes a renaming between two natural numbers (see below Remark 2.1). 44 | 45 | Review 2 46 | --------- 47 | 48 | - how can we adapt our work to the dependently typed setting? 49 | 50 | We plan to investigate Uemura's notion of representable map categories which seems to play a role analogous to that of Lawvere theories, but for dependently typed syntax. 51 | 52 | - are there any known ways to modify the paper's contribution to capture some equational theories? 53 | 54 | We were thinking of devising a version of Hur and Fiore's notion of equational systems [1] compatible with unification. We thank the reviewer for suggesting looking at pra monads: this looks like a very interesting path to explore. Let us finally note that our setting can handle the case where there are normal forms that can be specified by a GB-signature (as it is the case for simply-typed lambda calculus modulo beta/eta, see Appendix B.3). 55 | 56 | [1] Marcelo P. Fiore, Chung-Kil Hur, Equational Systems and Free Constructions, ICALP 2007 57 | 58 | -------------------------------------------------------------------------------- /lics24-reviews.md: -------------------------------------------------------------------------------- 1 | ----------------------- REVIEW 1 --------------------- 2 | SUBMISSION: 62 3 | TITLE: Semantics of pattern unification 4 | AUTHORS: Ambroise Lafont and Neelakantan Krishnaswami 5 | 6 | ----------- Overall evaluation ----------- 7 | SCORE: 1 (weak accept) 8 | ----- TEXT: 9 | Summary 10 | 11 | Higher-order pattern unification (HOPU) is a special case of higher-order unification (unification of lambda-calculus terms) in which restrictions are placed on the use of metavariables which ensure that unification is efficiently decidable and unitary (most general unifiers exist and are unique up to renaming). As introduced by Miller, HOPU is a HOU problem in which metavariable occurrences are always of the form X y1 ... yn where y1...yn are distinct bound variables (i.e introduced by lambda-abstractions above the occurrence of X). 12 | 13 | This paper proposes a generalization of HOPU with the following features: 14 | 1. Metavariables have arities rather than being thought of as having lambda-calculus types (thus, unification of untyped terms is accommodated). 15 | 2. Languages can be specified by binding signatures or generalizations thereof. 16 | 3. Correctness of the general case is proved with respect to a semantics formulated via category theory, where certain steps of unification correspond to categorical constructions like (co)equalizer and pullback/pushout constructions 17 | 4. Unification algorithm fore generalized binding signatures/terms is parameterized by a small number of language-specific functions/proof obligations, such as providing the equalizer and pullback operations 18 | 5. Implementation in Agda using de Bruijn levels syntax - dependent types used to help keep various things straight but not to fully formalize correctness. 19 | 20 | Evaluation 21 | 22 | I do not have the background to fully assess/check the categorical semantics so limit my comments to the parts I am qualified to judge and proceed on the assumption that there are no major technical problems in the details I am not following. 23 | 24 | With that caveat, my view of the paper is as follows. It develops an interesting generalization of higher-order pattern unification which to some extent distills the higher-order pattern restriction to an essence that can be transferred to other settings, via the generalization of binding signatures in section 2.2. However in its current form the paper is really dense even for someone familiar with HOU/HOPU. The paper doesn't really ever explain what HOPU (in its classic/conventional form) really is nor how the formulation taken in this paper is equivalent - I believe it is but unless there's a similar treatment out there somewhere (that the paper should be citing) this needs a little more exposition. Similarly, the extensive use of Agda and de Bruijn syntax in the first half of the paper and category theoretic notation in the second half sometimes makes the paper somewhat heavy going. Finally the paper is a little short on motivation of and examples for the general case 25 | of generalized binding signatures, specifically, it is not clearly explained in the paper how various examples (detailed in the appendix) fit the framework and how the full generality of GB signatures pays off in terms of applications. 26 | 27 | I think many of these issues could be addressed by relatively simple revisions to the paper (not affecting the technical content at all just the presentation and what is said in the main body vs appendix) if that is possible within the LICS review process. Also, it is possible that to more specialized reviewers the significance and depth of the results would carry greater weight. So overall I am weakly in favor of acceptance of the paper. 28 | 29 | Detailed comments 30 | 31 | 41-65: this paragraph makes it sound like Dunfield and Krishnaswami and Zhao et al. studied different type inference algorithms, but as far as I can see Zhao et al. set out to formalize Dunfield and Krishnaswami. Maybe more could be said about why there isn't much reuse opportunity (if there isn't). But also as far as I know (could be wrong) higher order *pattern* unification plays no role in these algorithms. 32 | 33 | 70: "as simple as innocuous" 34 | 35 | 36 | 170: in/around 170 I wondered what about types/ It seems this is possible to accommodate as a special case of the general framework in section 2.2. But little is said explicitly. I think the paper would be improved generally by mentioning the (even if there are few details) especially for simply-typed lambda / beta normal eta long terms since these are often taken as the starting point for HOPU. 37 | 38 | 170: also around this point it became clear to someone already familiar that you ar using de Bruijn syntax (levels not indices) but this should be said explicitly, and while it is pretty obvious to the PL-in-Agda community it is better not to self- limit the paper's audience to people that already know/guess this from context. 39 | 40 | 176: this motivates avoiding the use of "some kind of error monad", btu then Maybe (which is canonically "some kind of error monad" is used anyway. So it makes it sound like you are doing something different here, rather than something uncontroversial and fairly standard in pure Haskell/Agda settings. Or at least the difference was not clear to me. 41 | 42 | 220- regarding the extra requirements that metavariable arguments be distinct, I would thin kin Agda one could enforce this by requiring the mappings m => n to be injective rather than arbitrary finite maps from [1...n] to [1...m], right? Why not do this? 43 | 44 | 243: might be worth noting that the m \in Gamma argument is "proof relevant", i.e. there could be several copies of m and the argument will take different values to refer to different ones (usually membership will essentially be a unary natural number index into the list). 45 | 46 | 263-280: even if technically you don't have to in Agda, it is not vert readable to me to say App. t u [sigma]t instead of (App. t u)[sigma]t, adding parentheses in this way to the various cases would help readability. 47 | 48 | 313: starting around here it seems there is a convention that x : m => n is a variable renaming (i.e.a finite / injective map), I would find it helpful as a reminder that it's a more complex structure to write \vec{x} or \bar{x} or something 49 | 50 | 353: why leave n implicit and not other things? because usually this judgment is used with n=0, or because it's always easy to figure out what n should be? 51 | 52 | 394: "keeps M but changes its arity to p" - this was a place where the at least superficial differences to conventional HOPU become clearer. Conventionally one substitutes M with \lambda x1...xn. M' y1...yn where the mapping p is derived from the choices of x's and y's. Changing M's type/arity requires making sure all other occurrences of M are so changed, which is fine here I think as long as the substitution and context management is done consistently 53 | 54 | 408: "then obviously there is no unifier" - there hasn't really been any discussion so far of the formal statement of the unification problem so to make it clearer why this is obvious here it might help to point out that in HOU/HOPU the substitutions for the metavariables have to be closed - ie no dependences on bound variables other than those explicitly given by the arguments. 55 | 56 | 468-476 it took me a while to unpack and follow the definitions of the various ->? judgments, which are not mentioned anywhere in the text either (though partly due to slightly less familiarity with Agda record syntax) 57 | 58 | 633: is the long double arrow here between alpha o and alpha (o {x}) the same kind of arrow as in x : a => b or in line 626?? 59 | 60 | 721: it might help presentationally to say right away how Aczel's binding signatures are an instance of GB-signatures (example 2.8), and to give (ideally a simple) example of a non-discrete category - as the motivation for generalizing to GB signatures in this way is not clear. 61 | 62 | 820: "convenient define" 63 | 64 | Lemma 3.6: capitalize "metacontexts" 65 | 66 | 67 | Defnition 3.15: "is said *to be* pattern-friendly" 68 | 69 | 1076: missing period at end of sentence 70 | 71 | 1364: In addition to Cheney's early work on relating nominal and higher-order pattern unification, Levy and Villaret studied the relationship later (see reference) 72 | 73 | 1426: Reference 27 is missing information about where/how published 74 | 75 | Reference 76 | 77 | Jordi Levy and Mateu Villaret. 2012. Nominal Unification from a Higher-Order Perspective. ACM Trans. Comput. Logic 13, 2, Article 10 (April 2012), 31 pages. https://doi.org/10.1145/2159531.2159532 78 | 79 | 80 | ----------------------- REVIEW 2 --------------------- 81 | SUBMISSION: 62 82 | TITLE: Semantics of pattern unification 83 | AUTHORS: Ambroise Lafont and Neelakantan Krishnaswami 84 | 85 | ----------- Overall evaluation ----------- 86 | SCORE: 2 (accept) 87 | ----- TEXT: 88 | Evaluation: 89 | 90 | The paper under review clearly provides an interesting contribution to the theory of higher-order unification. Grouping different versions of the same pattern unification algorithm together using a “unified” category-theoretic semantics is an important step towards understanding more complex unification problems. The definitions of GB-signature and the mathematical proofs are concise and elegant. 91 | 92 | The paper also provides avenues for future research. In particular, the semantics of the generalised algorithm lead one to believe that similar techniques can be used to reason about pattern unification problems for dependently typed signatures. 93 | 94 | Therefore I recommend that it be accepted to LICS 2024. 95 | 96 | (see attached PDF for detailed review) 97 | 98 | 100 | 101 | ----------------------- REVIEW 3 --------------------- 102 | SUBMISSION: 62 103 | TITLE: Semantics of pattern unification 104 | AUTHORS: Ambroise Lafont and Neelakantan Krishnaswami 105 | 106 | ----------- Overall evaluation ----------- 107 | SCORE: 1 (weak accept) 108 | ----- TEXT: 109 | ** SUMMARY ** 110 | 111 | This paper gives a semantics-driven formulation of the unification phase in type inference. It is based on the syntax with binding signatures and meta-variables and their presheaf-based semantics (originating from the seminal work by Fiore, Plotkin and Turi, with further refinement by Hamana and others). As a result, we obtain a generic form of the unification algorithm which can be systematically instantiated to various languages. 112 | 113 | ** EVALUATION ** 114 | 115 | This work gives a nice example of applying semantic ideas and frameworks to an important problem in programming languages practice. Thus, I buy it as a good semantic-engineering paper. The presentation is well-organized; it begins with concrete syntax and algorithms and then proceeds to the semantic proof of the correctness. 116 | 117 | On the other hand, this work is rather light-weighted: it does not give much new theoretical insight, just presents a very cleaver use of known techniques, and the outcome is not too surprising. I think this lightness is nice as an engineering paper, but not necessarily so in a conference like LICS. I feel that this work would be more suitable for some PL conferences (like POPL and ICFP). 118 | 119 | ** COMMENTS ** 120 | 121 | - page 9, line 952 (and many places after that): 122 | I think MCon_\bot(S) is MCon(S)_\bot . -------------------------------------------------------------------------------- /lncs-long.lyx: -------------------------------------------------------------------------------- 1 | #LyX 2.3 created this file. For more info see http://www.lyx.org/ 2 | \lyxformat 544 3 | \begin_document 4 | \begin_header 5 | \save_transient_properties true 6 | \origin unavailable 7 | \textclass llncs 8 | \begin_preamble 9 | \input{common-preamble} 10 | \newcommand{\startappendix}{\appendix} 11 | \newcommand{\secappendix}[1]{\section{#1}} 12 | 13 | \newcommand{\keywords}[1]{} 14 | \end_preamble 15 | \use_default_options true 16 | \begin_modules 17 | enumitem 18 | \end_modules 19 | \maintain_unincluded_children false 20 | \begin_local_layout 21 | #\DeclareLyXModule{Other} 22 | #DescriptionBegin 23 | #Defines Notation 24 | #DescriptionEnd 25 | #Category: theorems 26 | 27 | Format 66 28 | 29 | Requires amsmath 30 | 31 | # The environments defined (regular and starred) are : 32 | # - Property 33 | 34 | Style Notation 35 | CopyStyle Theorem 36 | DependsOn Theorem 37 | LatexName notation 38 | LabelString "Notation \thethm." 39 | Preamble 40 | \spnewtheorem{notation}{Notation}{\bfseries}{\itshape} 41 | EndPreamble 42 | End 43 | 44 | 45 | Style "Personal Question" 46 | CopyStyle Theorem 47 | DependsOn Theorem 48 | LatexName personalquestion 49 | LabelString "Personal Question \thethm." 50 | Preamble 51 | %\newenvironment{personalquestion}{\shortfull{\comment}{\begin{question}\color{red}(personal)}}{\shortfull{\endcomment}{\end{question}}} 52 | \newenvironment{personalquestion}{\comment}{\endcomment} 53 | EndPreamble 54 | End 55 | 56 | 57 | Style "Long Proof" 58 | CopyStyle Proof 59 | DependsOn Proof 60 | LatexName longproof 61 | LabelString "Long proof." 62 | Preamble 63 | \newenvironment{longproof}{\shortfull{\comment}{\begin{proof}}}{\shortfull{\endcomment}{\end{proof}}} 64 | EndPreamble 65 | End 66 | \end_local_layout 67 | \language british 68 | \language_package default 69 | \inputencoding auto 70 | \fontencoding global 71 | \font_roman "default" "default" 72 | \font_sans "default" "default" 73 | \font_typewriter "default" "default" 74 | \font_math "auto" "auto" 75 | \font_default_family default 76 | \use_non_tex_fonts false 77 | \font_sc false 78 | \font_osf false 79 | \font_sf_scale 100 100 80 | \font_tt_scale 100 100 81 | \use_microtype false 82 | \use_dash_ligatures true 83 | \graphics default 84 | \default_output_format default 85 | \output_sync 0 86 | \bibtex_command default 87 | \index_command default 88 | \paperfontsize default 89 | \spacing single 90 | \use_hyperref true 91 | \pdf_bookmarks true 92 | \pdf_bookmarksnumbered false 93 | \pdf_bookmarksopen false 94 | \pdf_bookmarksopenlevel 1 95 | \pdf_breaklinks false 96 | \pdf_pdfborder false 97 | \pdf_colorlinks false 98 | \pdf_backref false 99 | \pdf_pdfusetitle true 100 | \papersize default 101 | \use_geometry false 102 | \use_package amsmath 1 103 | \use_package amssymb 1 104 | \use_package cancel 1 105 | \use_package esint 1 106 | \use_package mathdots 1 107 | \use_package mathtools 1 108 | \use_package mhchem 1 109 | \use_package stackrel 1 110 | \use_package stmaryrd 1 111 | \use_package undertilde 1 112 | \cite_engine basic 113 | \cite_engine_type default 114 | \biblio_style splncs04 115 | \use_bibtopic false 116 | \use_indices false 117 | \paperorientation portrait 118 | \suppress_date false 119 | \justification true 120 | \use_refstyle 1 121 | \use_minted 0 122 | \index Index 123 | \shortcut idx 124 | \color #008000 125 | \end_index 126 | \secnumdepth 3 127 | \tocdepth 3 128 | \paragraph_separation indent 129 | \paragraph_indentation default 130 | \is_math_indent 0 131 | \math_numbering_side default 132 | \quotes_style english 133 | \dynamic_quotes 0 134 | \papercolumns 1 135 | \papersides 1 136 | \paperpagestyle default 137 | \tracking_changes false 138 | \output_changes false 139 | \html_math_output 0 140 | \html_css_as_file 0 141 | \html_be_strict false 142 | \end_header 143 | 144 | \begin_body 145 | 146 | \begin_layout Title 147 | Generic pattern unification: a categorical approach 148 | \end_layout 149 | 150 | \begin_layout Author 151 | Ambroise Lafont 152 | \begin_inset ERT 153 | status open 154 | 155 | \begin_layout Plain Layout 156 | 157 | 158 | \backslash 159 | orcidID{0000-0002-9299-641X} 160 | \end_layout 161 | 162 | \end_inset 163 | 164 | 165 | \begin_inset ERT 166 | status open 167 | 168 | \begin_layout Plain Layout 169 | 170 | 171 | \backslash 172 | and 173 | \end_layout 174 | 175 | \end_inset 176 | 177 | Neel Krishnaswami 178 | \begin_inset ERT 179 | status open 180 | 181 | \begin_layout Plain Layout 182 | 183 | 184 | \backslash 185 | orcidID{0000-0003-2838-5865} 186 | \end_layout 187 | 188 | \end_inset 189 | 190 | 191 | \end_layout 192 | 193 | \begin_layout Institute 194 | University of Cambridge 195 | \end_layout 196 | 197 | \begin_layout Standard 198 | \begin_inset CommandInset include 199 | LatexCommand input 200 | filename "core.lyx" 201 | 202 | \end_inset 203 | 204 | 205 | \end_layout 206 | 207 | \end_body 208 | \end_document 209 | -------------------------------------------------------------------------------- /lncs-long.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/amblafont/unification/7d658c077f2582d018806136dc956b35632454dd/lncs-long.pdf -------------------------------------------------------------------------------- /main.lagda: -------------------------------------------------------------------------------- 1 | \begin{code} 2 | {-# OPTIONS --no-termination-check #-} 3 | module main where 4 | 5 | open import Relation.Nullary using (Dec ; yes ; no) 6 | open import Data.List as List hiding (map ; [_]) 7 | open import Data.List.Relation.Binary.Pointwise using (Pointwise ; [] ; _∷_) 8 | open import Data.List.Relation.Unary.Any renaming (_─_ to _⑊_ ) 9 | open import Data.Product using (_,_; Σ; _×_ ) 10 | open import Data.Maybe.Base using (Maybe) renaming (nothing to ⊥ ; just to ⌊_⌋) 11 | open import Relation.Binary.PropositionalEquality as ≡ using (_≡_) 12 | open import Agda.Primitive 13 | open import Data.List.Membership.Propositional 14 | 15 | open import lib 16 | 17 | \end{code} 18 | %<*signature-core> 19 | \begin{code} 20 | record Signature i j k : Set (lsuc (i ⊔ j ⊔ k)) where 21 | field 22 | A : Set i 23 | hom : A → A → Set j 24 | id : ∀ {a} → hom a a 25 | _∘_ : ∀ {a b c} → hom b c → hom a b → hom a c 26 | O : A → Set k 27 | α : ∀ {a} → O a → List A 28 | \end{code} 29 | % 30 | \begin{code} 31 | -- [a₁,⋯, aₙ] ⇢ [b₁,⋯, bₘ] is isomorphic to a₁⇒b₁ × ⋯ × aₙ⇒bₙ if n=m 32 | -- Otherwise, it is isomorphic to the empty type. 33 | \end{code} 34 | %<*renaming-vectors> 35 | \begin{code} 36 | _⇢_ : List A → List A → Set (i ⊔ j) 37 | as ⇢ bs = Pointwise hom as bs 38 | \end{code} 39 | % 40 | \begin{code} 41 | field 42 | \end{code} 43 | %<*signature-functoriality> 44 | \begin{code} 45 | -- Functoriality components 46 | _{_} : ∀ {a b} → O a → hom a b → O b 47 | _^_ : ∀ {a b}(x : hom a b)(o : O a) → α o ⇢ α (o { x } ) 48 | \end{code} 49 | % 50 | 51 | %<*friendlysignature1> 52 | \begin{code} 53 | record isFriendly {i j k}(S : Signature i j k) : Set (i ⊔ j ⊔ k) where 54 | \end{code} 55 | % 56 | \begin{code} 57 | open Signature S 58 | \end{code} 59 | % on economise une ligne 60 | %<*friendlysignature2> 61 | \begin{code} 62 | field 63 | equaliser : ∀ {m a} → (x y : hom m a) → Σ A (λ p → hom p m) 64 | pullback : ∀ {m m' a} → hom m a → hom m' a → Σ A (λ p → hom p m × hom p m') 65 | _≟_ : ∀ {a}(o o' : O a) → Dec (o ≡ o') 66 | _{_}⁻¹ : ∀ {a}(o : O a) → ∀ {b}(x : hom b a) → Maybe (pre-image (_{ x }) o) 67 | \end{code} 68 | % 69 | \begin{code} 70 | module Tm {i j k}(S : Signature i j k) where 71 | open Signature S 72 | MetaContext : Set i 73 | MetaContext· : Set i 74 | \end{code} 75 | %<*metacontext> 76 | \begin{code} 77 | MetaContext· = List A 78 | MetaContext = Maybe MetaContext· 79 | \end{code} 80 | % 81 | \begin{code} 82 | Tm· : MetaContext· → A → Set (i ⊔ j ⊔ k) 83 | \end{code} 84 | %<*syntax-decl> 85 | \begin{code} 86 | data Tm : MetaContext → A → Set (i ⊔ j ⊔ k) 87 | Tm· Γ a = Tm ⌊ Γ ⌋ a 88 | \end{code} 89 | % 90 | \begin{code} 91 | import Common as C 92 | module Common = C {k = k} A hom id Tm 93 | open Common.SubstitutionDef public 94 | \end{code} 95 | %<*syntax-def> 96 | \begin{code} 97 | data Tm where 98 | Rigid· : ∀ {Γ a}(o : O a) → (α o ·⟶· Γ) 99 | → Tm· Γ a 100 | _﹙_﹚ : ∀ {Γ a m} → m ∈ Γ → hom m a 101 | → Tm· Γ a 102 | ! : ∀ {a} → Tm ⊥ a 103 | \end{code} 104 | % 105 | \begin{code} 106 | Rigid : ∀ {Γ a}(o : O a) → ( α o ·⟶ Γ ) → Tm Γ a 107 | Rigid {⊥} o δ = ! 108 | Rigid {⌊ Γ ⌋} o δ = Rigid· o δ 109 | 110 | {- ---------------------- 111 | 112 | Renaming 113 | 114 | -------------------------- -} 115 | _❴_❵ : ∀ {Γ a b} → Tm Γ a → hom a b → Tm Γ b 116 | _❴_❵s : ∀ {Γ Γ' Δ} → Γ ·⟶ Δ 117 | → Γ ⇢ Γ' → Γ' ·⟶ Δ 118 | 119 | (Rigid· o ts) ❴ x ❵ = Rigid· (o { x }) (ts ❴ x ^ o ❵s) 120 | M ﹙ y ﹚ ❴ x ❵ = M ﹙ x ∘ y ﹚ 121 | ! ❴ f ❵ = ! 122 | 123 | [] ❴ [] ❵s = [] 124 | (t , ts) ❴ f ∷ fs ❵s = t ❴ f ❵ , ts ❴ fs ❵s 125 | 126 | {- ---------------------- 127 | 128 | Weakening 129 | 130 | -------------------------- -} 131 | wkₜ : ∀ {Γ a m} → Tm· Γ a → Tm· (m ∷ Γ) a 132 | 133 | open Common.wkₛ wkₜ public 134 | 135 | wkₜ (Rigid· o ts) = Rigid· o (wkₛ ts) 136 | wkₜ (M ﹙ x ﹚) = 1+ M ﹙ x ﹚ 137 | 138 | 139 | {- ---------------------- 140 | 141 | Substitution 142 | 143 | -------------------------- -} 144 | open Common.!ₛ ! public 145 | 146 | \end{code} 147 | %<*gen-substitution-proto> 148 | \begin{code} 149 | _[_]t : ∀ {Γ a} → Tm Γ a → ∀ {Δ} → (Γ ⟶ Δ) → Tm Δ a 150 | \end{code} 151 | % 152 | \begin{code} 153 | 154 | open Common.-[-]s _[_]t public 155 | 156 | \end{code} 157 | %<*gen-substitution-def> 158 | \begin{code} 159 | (Rigid· o δ) [ σ ]t = Rigid o (δ [ σ ]s) 160 | M ﹙ x ﹚ [ σ ]t = nth σ M ❴ x ❵ 161 | ! [ 1⊥ ]t = ! 162 | \end{code} 163 | % 164 | \begin{code} 165 | 166 | 167 | open Common.1ₛ wkₜ _﹙_﹚ public 168 | open Common.Substitution wkₜ _﹙_﹚ public 169 | 170 | 171 | 172 | {- ---------------------- 173 | 174 | Occur check 175 | 176 | -------------------------- -} 177 | 178 | infixl 20 _⑊?ₜ_ 179 | \end{code} 180 | % <*occur-check> 181 | \begin{code} 182 | _⑊?ₜ_ : ∀ {Γ m a} → Tm· Γ a → (M : m ∈ Γ) → Maybe (Tm· (Γ ⑊ M) a) 183 | _⑊?ₛ_ : ∀ {Γ m Δ} → Δ ·⟶· Γ → (M : m ∈ Γ) → Maybe (Δ ·⟶· Γ ⑊ M ) 184 | 185 | Rigid· o ts ⑊?ₜ M = do 186 | ts' ← ts ⑊?ₛ M 187 | ⌊ Rigid· o ts' ⌋ 188 | where open Data.Maybe.Base using (_>>=_) 189 | M' ﹙ y ﹚ ⑊?ₜ M with M' ⑊? M 190 | ... | ⊥ = ⊥ 191 | ... | ⌊ M' ⌋ = ⌊ M' ﹙ y ﹚ ⌋ 192 | 193 | _⑊?ₛ_ (t , ts) M = do 194 | ts' ← ts ⑊?ₛ M 195 | t' ← t ⑊?ₜ M 196 | ⌊ t' , ts' ⌋ 197 | where open Data.Maybe.Base using (_>>=_) 198 | _⑊?ₛ_ [] M = ⌊ [] ⌋ 199 | 200 | open Common.occur-cases public 201 | 202 | occur-check : ∀ {Γ m n} → (M : m ∈ Γ) → Tm· Γ n → occur-cases M n 203 | occur-check M (M' ﹙ x ﹚) with M' ⑊? M 204 | ... | ⊥ = Same-MVar x 205 | ... | ⌊ M' ⌋ = No-Cycle (M' ﹙ x ﹚) 206 | occur-check M t with t ⑊?ₜ M 207 | ... | ⊥ = Cycle 208 | ... | ⌊ t' ⌋ = No-Cycle t' 209 | 210 | module Unification {i j k}(S : Signature i j k) (F : isFriendly S) where 211 | open Signature S 212 | open Tm S 213 | open isFriendly F 214 | 215 | {- ---------------------- 216 | 217 | Pruning 218 | 219 | -------------------------- -} 220 | open Common.PruneUnifyTypes 221 | 222 | \end{code} 223 | %<*prune-sigma-return-type> 224 | \begin{code} 225 | record _∪_⟶? (Γ'' : MetaContext·)(Γ : MetaContext) 226 | : Set (i ⊔ j ⊔ k) where 227 | constructor _◄_﹔_ 228 | field 229 | Δ : MetaContext 230 | δ : Γ'' ·⟶ Δ 231 | σ : Γ ⟶ Δ 232 | \end{code} 233 | % 234 | %<*prune-proto> 235 | \begin{code} 236 | prune : ∀ {Γ a m} → Tm Γ a → hom m a → [ m ]∪ Γ ⟶? 237 | \end{code} 238 | % 239 | %<*prune-sigma-proto> 240 | \begin{code} 241 | prune-σ : ∀ {Γ Γ' Γ''} → (Γ' ·⟶ Γ) → (Γ'' ⇢ Γ') → Γ'' ∪ Γ ⟶? 242 | \end{code} 243 | % 244 | %<*prune-subst> 245 | \begin{code} 246 | prune-σ {Γ} [] [] = Γ ◄ [] ﹔ 1ₛ 247 | prune-σ (t , δ) (x₀ ∷ xs) = 248 | let Δ₁ ◄ t' ﹔ σ₁ = prune t x₀ 249 | Δ₂ ◄ δ' ﹔ σ₂ = prune-σ (δ [ σ₁ ]s) xs 250 | in Δ₂ ◄ (t' [ σ₂ ]t , δ') ﹔ (σ₁ [ σ₂ ]s) 251 | \end{code} 252 | % 253 | %<*prune-rigid> 254 | \begin{code} 255 | prune (Rigid· o δ) x with o { x }⁻¹ 256 | ... | ⊥ = ⊥ ◄ ! ﹔ !ₛ 257 | ... | ⌊ PreImage o' ⌋ = 258 | let Δ ◄ δ' ﹔ σ = prune-σ δ (x ^ o') 259 | in Δ ◄ Rigid o' δ' ﹔ σ 260 | \end{code} 261 | % 262 | %<*prune-flex> 263 | \begin{code} 264 | prune {⌊ Γ ⌋} (M ﹙ x ﹚) y = 265 | let p , x' , y' = pullback x y in 266 | ⌊ Γ [ M ∶ p ] ⌋ ◄ (M ∶ p) ﹙ y' ﹚ ﹔ M ↦-﹙ x' ﹚ 267 | \end{code} 268 | % 269 | \begin{code} 270 | prune ! y = ⊥ ◄ ! ﹔ !ₛ 271 | 272 | 273 | {- ---------------------- 274 | 275 | Unification 276 | 277 | -------------------------- -} 278 | 279 | 280 | \end{code} 281 | %<*unify-flex-prototype> 282 | \begin{code} 283 | unify-flex-* : ∀ {Γ m a} → m ∈ Γ → hom m a → Tm· Γ a → Γ ·⟶? 284 | \end{code} 285 | % 286 | %<*unify-flex-def> 287 | \begin{code} 288 | unify-flex-* {Γ} M x t 289 | with occur-check M t 290 | ... | Same-MVar y = 291 | let p , z = equaliser x y 292 | in ⌊ Γ [ M ∶ p ] ⌋ ◄ M ↦-﹙ z ﹚ 293 | ... | Cycle = ⊥ ◄ !ₛ 294 | ... | No-Cycle t' = 295 | let Δ ◄ u ﹔ σ = prune t' x 296 | in Δ ◄ M ↦ u , σ 297 | \end{code} 298 | % 299 | %<*unifyprototype> 300 | \begin{code} 301 | unify : ∀ {Γ a} → Tm Γ a → Tm Γ a → Γ ⟶? 302 | \end{code} 303 | % 304 | %<*unify-sigma-prototype> 305 | \begin{code} 306 | unify-σ : ∀ {Γ Γ'} → (Γ' ⟶ Γ) → (Γ' ⟶ Γ) → (Γ ⟶?) 307 | \end{code} 308 | % 309 | %<*unify-subst> 310 | \begin{code} 311 | unify-σ {Γ} [] [] = Γ ◄ 1ₛ 312 | unify-σ (t₁ , δ₁) (t₂ , δ₂) = 313 | let Δ ◄ σ = unify t₁ t₂ 314 | Δ' ◄ σ' = unify-σ (δ₁ [ σ ]s) (δ₂ [ σ ]s) 315 | in Δ' ◄ σ [ σ' ]s 316 | 317 | unify-σ 1⊥ 1⊥ = ⊥ ◄ !ₛ 318 | \end{code} 319 | % 320 | \begin{code} 321 | unify t (M ﹙ x ﹚) = unify-flex-* M x t 322 | unify (M ﹙ x ﹚) t = unify-flex-* M x t 323 | \end{code} 324 | %<*unify-rigid> 325 | \begin{code} 326 | unify (Rigid· o δ) (Rigid· o' δ') with o ≟ o' 327 | ... | no _ = ⊥ ◄ !ₛ 328 | ... | yes ≡.refl = unify-σ δ δ' 329 | \end{code} 330 | % 331 | %<*unify-fail> 332 | \begin{code} 333 | unify ! ! = ⊥ ◄ !ₛ 334 | \end{code} 335 | % 336 | \begin{code} 337 | -------------------------------------------------------------------------------- /main.ml: -------------------------------------------------------------------------------- 1 | let rec map3 f l1 l2 l3 = 2 | match (l1, l2, l3) with 3 | ([], [],[]) -> [] 4 | | (a1::l1, a2::l2, a3::l3) -> let r = f a1 a2 a3 in r :: map3 f l1 l2 l3 5 | | (_, _,_) -> invalid_arg "List.map3" 6 | 7 | 8 | module type Signature = 9 | sig 10 | type arity 11 | type variableContext = arity 12 | type symbol 13 | type renaming 14 | 15 | val renamingToString : renaming -> string 16 | val symbolToString : symbol -> string 17 | 18 | val composeRenamings : renaming -> renaming -> renaming 19 | val renameSymbol : symbol -> renaming -> symbol 20 | val symbolArity : variableContext -> symbol -> arity list 21 | val arityFunctor : variableContext -> symbol -> renaming -> renaming list 22 | val pullback : renaming -> renaming -> arity * renaming * renaming 23 | val equaliser : renaming -> renaming -> arity * renaming 24 | end 25 | 26 | module type BindingSignature = 27 | sig 28 | type symbol 29 | val arity : symbol -> int list 30 | val symbolToString : symbol -> string 31 | end 32 | 33 | module SignatureOfBSig (S : BindingSignature) = 34 | struct 35 | type arity = int 36 | type variableContext = arity 37 | type symbol = 38 | O of S.symbol 39 | | Var of int 40 | type renaming = int list 41 | 42 | let symbolToString : symbol -> string = function Var n -> string_of_int n 43 | | O o -> S.symbolToString o 44 | 45 | 46 | let renv (x : arity)(r : renaming) : arity = 47 | List.nth r (x - 1) 48 | 49 | let renamingToString (l : renaming) : string = 50 | String.concat ", " (List.map Int.to_string l) 51 | 52 | let renameSymbol (o : symbol) (x : renaming) = match o with 53 | Var n -> Var (renv n x) 54 | |_ -> o;; 55 | 56 | let symbolArity (n : variableContext) = function 57 | Var _ -> [] 58 | | O o -> List.map ((+) n) (S.arity o) 59 | 60 | 61 | let composeRenamings (x : renaming)(y : renaming) : renaming = 62 | List.map (fun n -> renv n y) x 63 | 64 | let equaliser (x : renaming)(y : renaming) : arity * renaming = 65 | let rec aux idx l1 l2 = 66 | match l1, l2 with 67 | [], [] -> [] 68 | | t1 :: q1, t2 :: q2 -> 69 | if t1 = t2 then idx :: aux (idx + 1) q1 q2 else aux (idx + 1) q1 q2 70 | | _ -> failwith "invalid arg: equaliser_lc" 71 | in 72 | let z = aux 1 x y in 73 | List.length z, z 74 | 75 | 76 | let rec find_idx (x : 'a) = function [] -> raise Not_found 77 | | t :: q -> 78 | if t = x then 79 | 1 80 | else 81 | 1 + find_idx x q 82 | 83 | let pullback (x : renaming)(y : renaming) : arity * renaming * renaming = 84 | let rec aux1 idx l1 : renaming * renaming = 85 | match l1 with 86 | [] -> [], [] 87 | | t1 :: q1 -> 88 | let (p,q) = aux1 (idx + 1) q1 in 89 | try let i = find_idx t1 y in 90 | (idx::p, i :: q) 91 | with 92 | Not_found -> (p,q) 93 | in 94 | let (l1, l2) = aux1 1 x in 95 | (List.length l1, l1, l2) 96 | 97 | let rec arityFunctor (n : arity) (o : symbol) (r : renaming) : renaming list = 98 | match o with 99 | Var _ -> [] 100 | | O o -> 101 | List.map (fun ar -> r @ List.init ar (fun p -> n + p + 1)) (S.arity o) 102 | end 103 | 104 | (*** 105 | 106 | Example: lambda calculus 107 | 108 | ** *) 109 | 110 | module LambdaCalculusBSig = struct 111 | 112 | type symbol = App | Abs 113 | let arity = function App -> [0; 0] | Abs -> [1];; 114 | 115 | let symbolToString = function App -> "@" | Abs -> "λ" 116 | 117 | end 118 | 119 | module LambdaCalculus = SignatureOfBSig(LambdaCalculusBSig) 120 | 121 | (* module LCSig : Signature = struct 122 | include LambdaCalculus;; 123 | end *) 124 | 125 | (* 126 | 127 | Unification algorithm 128 | 129 | *) 130 | 131 | module Unification (S : Signature) = struct 132 | exception NoUnifier 133 | open S 134 | type name = string 135 | 136 | type syntax = 137 | Op of symbol * syntax list 138 | | MVar of name * renaming 139 | 140 | let rec syntaxToString = 141 | function 142 | MVar (n, r) -> n ^ "(" ^ renamingToString r ^ ")" 143 | | Op (o, l) -> if l = [] then symbolToString o else 144 | symbolToString o ^ "(" ^ 145 | (String.concat ", " 146 | (List.map syntaxToString l)) 147 | ^ ")" ;; 148 | 149 | let rec renameSyntax (variableContext : arity) (t : syntax) (r : renaming) : syntax = 150 | (match t with 151 | MVar (n, x) -> MVar (n, composeRenamings r x) 152 | | Op (o, argument) -> Op (renameSymbol o r, 153 | map3 renameSyntax 154 | (symbolArity variableContext o) 155 | argument 156 | (arityFunctor variableContext o r)));; 157 | 158 | 159 | type substitution = (name * arity * syntax) list 160 | 161 | let substitutionToString (sigma : substitution) : string = 162 | String.concat ", " 163 | (List.map (function (n, _, t) -> n ^ " ↦ " ^ 164 | syntaxToString t) sigma) 165 | 166 | let rec substitute (t : syntax)(sigma : substitution) : syntax = 167 | match t with 168 | Op (o, args) -> Op (o, List.map (fun t -> substitute t sigma) args) 169 | | MVar (name, x) -> 170 | try 171 | let _, a, u = (List.find (fun (name2, _, _) -> name == name2) sigma) in 172 | renameSyntax a u x 173 | with 174 | Not_found -> MVar (name,x) 175 | 176 | let rec composeSubstitution (sigma1 : substitution) (sigma2 : substitution) = 177 | (List.map (function (x,a,t) -> (x,a, substitute t sigma2)) sigma1) 178 | @ sigma2 179 | 180 | 181 | let id_substitution : substitution = [] 182 | 183 | let replaceMVar (x : name)(a : arity) (t : syntax) (sigma : substitution) 184 | : substitution = (x, a, t)::sigma 185 | 186 | 187 | let rec occur_check (x : name)(t : syntax) : bool = 188 | match t with 189 | MVar (n, _) -> x == n 190 | | Op (_, l) -> List.exists (occur_check x) l 191 | 192 | let rec prune (variableContext : variableContext) (t : syntax)(x : renaming) : 193 | (syntax * substitution) = 194 | match t with 195 | | Op (o, args) -> 196 | let (ws, sigma) = prune_list (symbolArity variableContext o) args (arityFunctor variableContext o x) in 197 | Op (o, ws), sigma 198 | | MVar (name, y) -> 199 | let (newArity, p, q) = pullback x y in 200 | MVar(name, p), replaceMVar name newArity (MVar (name, q)) id_substitution 201 | 202 | and prune_list (variableContexts : variableContext list) 203 | (ts : syntax list) (xs : renaming list) 204 | : syntax list * substitution = 205 | match variableContexts, ts, xs with 206 | [], [], [] -> [], id_substitution 207 | | a1 :: al1, t1 :: q1 , x1 :: x2 -> 208 | let (w1, sigma1) = prune a1 t1 x1 in 209 | let q1s = List.map (fun t -> substitute t sigma1) q1 in 210 | let (w2, sigma2) = prune_list al1 q1s x2 in 211 | (substitute w1 sigma2 :: w2), composeSubstitution sigma1 sigma2 212 | | _ -> failwith "invalid args: prune-list" 213 | 214 | 215 | let rec unify variableContext (t : syntax)(u : syntax) : substitution = match t,u with 216 | Op (o1, _), Op (o2, _) when o1 <> o2 -> raise NoUnifier 217 | | Op (o, l1), Op (_, l2) -> unify_list (symbolArity variableContext o) l1 l2 218 | | MVar (name, x1), MVar (name2, x2) when name = name2 -> 219 | let (newArity, y) = equaliser x1 x2 in 220 | id_substitution 221 | |> replaceMVar name newArity (* UNSURE!! *) 222 | (MVar(name, y)) 223 | | MVar (name, x), v | v, MVar (name, x) -> 224 | if occur_check name v then 225 | raise NoUnifier 226 | else 227 | let (w, sigma) = prune variableContext v x in 228 | replaceMVar name variableContext w sigma (* UNSURE!! *) 229 | 230 | 231 | 232 | and unify_list (variableContexts : variableContext list) ts us = 233 | match variableContexts, ts, us with 234 | [], [], [] -> id_substitution 235 | | a1 :: al1, t1 :: q1 , t2 :: q2 -> 236 | let sigma1 = unify a1 t1 t2 in 237 | let mapsigma = List.map (fun t -> substitute t sigma1) in 238 | let sigma2 = unify_list al1 (mapsigma q1) (mapsigma q2) in 239 | composeSubstitution sigma1 sigma2 240 | | _ -> failwith "invalid args: unify_list" 241 | end 242 | 243 | 244 | module LCUnification = Unification(LambdaCalculus) 245 | 246 | let unify_lc (n : LambdaCalculus.variableContext)(t : LCUnification.syntax) u : string = 247 | LCUnification.unify n t u |> LCUnification.substitutionToString 248 | 249 | open LCUnification;; 250 | 251 | unify_lc 3 (MVar ("M", [1; 2])) (MVar ("M", [2; 1]));; 252 | unify_lc 3 (MVar ("M", [1; 3])) (MVar ("M", [2;3]));; 253 | unify_lc 3 (MVar ("M", [1; 2])) (MVar ("N", [2; 1]));; 254 | 255 | (* agad-categories *) -------------------------------------------------------------------------------- /main.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/amblafont/unification/7d658c077f2582d018806136dc956b35632454dd/main.pdf -------------------------------------------------------------------------------- /nominal.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/amblafont/unification/7d658c077f2582d018806136dc956b35632454dd/nominal.pdf -------------------------------------------------------------------------------- /occurcheckind.agda: -------------------------------------------------------------------------------- 1 | {-# OPTIONS --type-in-type #-} 2 | open import Data.List hiding ([_]) 3 | open import Data.Unit hiding (_≤_) 4 | open import Data.Empty 5 | open import Data.Nat hiding (_^_) 6 | open import Data.Nat.Properties as ℕₚ 7 | open import Data.Product hiding (map) 8 | open import Data.Sum hiding (map) 9 | open import Agda.Builtin.Equality 10 | open import Relation.Binary.PropositionalEquality using (sym ; cong ; trans) 11 | module occurcheckind where 12 | ⊥-elim-d : ∀ {w } {Whatever : ⊥ → Set w} → (witness : ⊥ ) → Whatever witness 13 | ⊥-elim-d () 14 | 15 | le0 : ∀ n p → n + p ≤ p → n ≡ 0 16 | le0 = {!!} 17 | 18 | eq-le : ∀ n p → n ≡ p → n ≤ p 19 | eq-le = {!!} 20 | 21 | funext : ∀{A}{B : A → Set}(f g : ∀ a → B a) → (∀ a → f a ≡ g a) → f ≡ g 22 | funext = {!!} 23 | -- I can't figure out how to find lemmas about max in the standard lib, so I define mine 24 | max : ℕ → List ℕ → ℕ 25 | max _ [] = 0 26 | max p (x ∷ l) with ℕₚ.≤-total x (max p l) 27 | ... | inj₁ _ = max p l 28 | ... | inj₂ _ = x 29 | 30 | maxl : ∀ {n p l q} → p ≥ q → max n (p ∷ l) ≥ q 31 | maxl {n} {p}{l}{q} lepq with ℕₚ.≤-total p (max n l) 32 | ... | inj₁ x = ≤-trans lepq x 33 | ... | inj₂ y = lepq 34 | 35 | maxr : ∀ {n p l q} → max n l ≥ q → max n (p ∷ l) ≥ q 36 | maxr {n} {p}{l}{q} lepq with ℕₚ.≤-total p (max n l) 37 | ... | inj₁ x = lepq 38 | ... | inj₂ y = ≤-trans lepq y 39 | 40 | record is-cat (C : Set) : Set where 41 | field 42 | hom : C → C → Set 43 | _·_ : ∀ {a b c : C} → hom a b → hom b c → hom a c 44 | 45 | open is-cat {{...}} public 46 | 47 | instance 48 | SetCat : is-cat Set 49 | hom {{ SetCat }} X Y = X → Y 50 | _·_ {{ SetCat}} f g = λ x → g (f x) 51 | 52 | record is-functor {C : Set} {D : Set} {{ Cc : is-cat C }} {{ Dc : is-cat D}} (F : C → D) : Set where 53 | field 54 | homF : ∀ {a b} → hom a b → hom (F a) (F b) 55 | 56 | open is-functor {{ ... }} public 57 | 58 | ∥_∥ : {C D : Set} ⦃ Cc : is-cat C ⦄ ⦃ Dc : is-cat D ⦄ (F : C → D) 59 | ⦃ r : is-functor F ⦄ {a b : C} → 60 | hom a b → hom (F a) (F b) 61 | ∥ F ∥ f = homF f 62 | 63 | record Functor (C : Set) (D : Set){{ Cc : is-cat C}}{{ Dc : is-cat D}} : Set where 64 | -- private module C = Category C 65 | -- private module D = Category D 66 | field 67 | obF : C → D 68 | instance Functor-is-func : is-functor obF 69 | -- is-func : ∀ {a b : C} → hom a b → hom (∣_∣ a) (∣_∣ b) 70 | 71 | open Functor public 72 | 73 | module _ (D : Set) {{ Dc : is-cat D}} (I : Set)(L : I → List (Functor D D))(S : I → D → Set){{ SF : ∀ {i} → is-functor (S i)}} where 74 | 75 | _⇒_ : ∀ (X Y : D → Set) → Set 76 | X ⇒ Y = ∀ d → X d → Y d 77 | 78 | 79 | data M[_]^_ : (X : D → Set) → List D → Set 80 | data M (X : D → Set) : D → Set 81 | 82 | data M[_]^_ where 83 | M^[] : ∀ X → M[ X ]^ [] 84 | _M^::_ : ∀ {X} {d}{l} → M X d → M[ X ]^ l → M[ X ]^ (d ∷ l) 85 | 86 | -- on pourrait faire une version avec une liste de paires de foncteur et de M X 87 | data M X where 88 | η : ∀ {d} → X d → M X d 89 | op : ∀ i {d} → S i d → (M[ X ]^ (map (λ F → obF F d) (L i))) → M X d 90 | 91 | _[_] : ∀ {X Y : D → Set}{d} → M X d → (X ⇒ M Y) → M Y d 92 | _[_]l : ∀ {X Y : D → Set}{l} → M[ X ]^ l → (X ⇒ M Y) → M[ Y ]^ l 93 | 94 | η x [ σ ] = σ _ x 95 | op i s x [ σ ] = op i s (x [ σ ]l) 96 | 97 | M^[] _ [ σ ]l = M^[] _ 98 | (x M^:: ms) [ σ ]l = (x [ σ ]) M^:: (ms [ σ ]l) 99 | 100 | [][] : ∀ {X Y Z : D → Set}{d} (t : M X d) (u : X ⇒ M Y) (v : Y ⇒ M Z) → (t [ u ]) [ v ] ≡ (t [ (λ d x → u d x [ v ]) ]) 101 | [][]l : ∀ {X Y Z : D → Set}{l} (t : M[ X ]^ l) (u : X ⇒ M Y) (v : Y ⇒ M Z) → (t [ u ]l) [ v ]l ≡ (t [ (λ d x → u d x [ v ]) ]l) 102 | 103 | [][] (η x) u v = refl 104 | [][] (op i x ms) u v = cong (op i x) ([][]l ms u v) 105 | 106 | [][]l (M^[] _) u v = refl 107 | [][]l (x M^:: t) u v rewrite [][] x u v = cong (_M^::_ _) ([][]l t u v) 108 | 109 | record has-weight (C : D → Set) : Set where 110 | field 111 | o : ∀ {d} → C d → ℕ 112 | open has-weight {{ ... }} public 113 | 114 | 115 | ∣_∣ : ∀ {X}{{oX : has-weight X}}{d} → M X d → ℕ 116 | ∣_∣l : ∀ {X}{{oX : has-weight X}}{l} → M[ X ]^ l → List ℕ 117 | 118 | ∣ η x ∣ = o x 119 | ∣ op i s x ∣ = suc (max 0 ∣ x ∣l) 120 | 121 | ∣_∣l (M^[] _) = [] 122 | ∣_∣l (x M^:: ms) = ∣ x ∣ ∷ ∣ ms ∣l 123 | 124 | hs : ∀ {X}{l} → M[ X ]^ l → List ℕ 125 | h : ∀ {X}{d} → M X d → ℕ 126 | 127 | h (η x) = 0 128 | h (op i s x) = suc (max 0 (hs x)) 129 | 130 | hs (M^[] _) = [] 131 | hs (x M^:: ms) = h x ∷ hs ms 132 | 133 | _+ᵢ_ : (D → Set) → (D → Set) → (D → Set) 134 | (X +ᵢ Y) d = X d ⊎ Y d 135 | 136 | 137 | is-closed : ∀ {X}{d} → M X d → M (λ _ → ⊥) d ⊎ ℕ 138 | are-closed : ∀ {X}{l} → M[ X ]^ l → (M[ (λ _ → ⊥) ]^ l) ⊎ ℕ 139 | 140 | are-closed (M^[] _) = inj₁ (M^[] _) 141 | are-closed (m M^:: ms) with are-closed ms | is-closed m 142 | ... | inj₁ ms | inj₁ m = inj₁ (m M^:: ms) 143 | ... | inj₂ n | _ = inj₂ n 144 | ... | inj₁ ms | inj₂ n = inj₂ n 145 | -- ... | inj₁ ms | inj₂ n = inj₂ n 146 | 147 | is-closed (η x) = inj₂ 0 148 | is-closed (op i s ms) with are-closed ms 149 | ... | inj₁ ms = inj₁ (op i s ms) 150 | ... | inj₂ n = inj₂ (1 + n) 151 | 152 | 153 | 154 | -- define h u as min h (u d) 155 | -- then h (m [ u ]) >= h u + || m || 156 | {- 157 | Does it allow to show the think we want about the pullback of 158 | MA 159 | ↓ ? 160 | A → M0 161 | u 162 | 163 | Assume given an element t ∈ MA_d sucht that || t || exists and such that 164 | u_i = t [ u ] 165 | Then, | u_i | ≥ | u | + ||t || but if u is flat then |u_i| = |u| and we can conclude 166 | that || t || = 0 167 | but flatness does not work for the argument M ↦ M(-+1) 168 | -} 169 | h-comp : ∀ {X}{Y}{{oY : has-weight Y}}a → (u : X ⇒ M Y) (m : M X a) → ∀ n (p : ℕ) → is-closed m ≡ inj₂ n → 170 | (∀ d x → ∣ u d x ∣ ≥ p) → ∣ m [ u ] ∣ ≥ n + p 171 | 172 | hs-comp : ∀ {X}{Y}{{oY : has-weight Y}}l → (u : X ⇒ M Y) (ms : M[ X ]^ l) → ∀ n (p : ℕ) → are-closed ms ≡ inj₂ n → 173 | (∀ d x → ∣ u d x ∣ ≥ p) → max 0 ∣ ms [ u ]l ∣l ≥ n + p 174 | 175 | h-comp a u (η x) .0 p refl hp = hp a x 176 | h-comp a u (op i s ms) n p cm hp with are-closed ms in eq 177 | h-comp a u (op i s ms) .(1 + n) p refl hp | inj₂ n = s≤s aux 178 | where 179 | aux : (max 0 ∣ ms [ u ]l ∣l) ≥ n + p 180 | aux = hs-comp (map (λ F → obF F a) (L i)) u ms n p eq hp 181 | 182 | hs-comp .(_ ∷ _) u (_M^::_ {d = d}{l = l} m ms) n p cm hp with are-closed ms in eqms | is-closed m in eqm 183 | hs-comp .(_ ∷ _) u (_M^::_ {d = d} {l = l} m ms) .n p refl hp | inj₁ x | inj₂ n = aux 184 | where 185 | tete : ∣ m [ u ] ∣ ≥ n + p 186 | tete = h-comp d u m n p eqm hp 187 | 188 | aux : max 0 (∣ m [ u ] ∣ ∷ ∣ ms [ u ]l ∣l) ≥ n + p 189 | aux = maxl tete 190 | 191 | hs-comp .(_ ∷ _) u (_M^::_ {d = d} {l} m ms) .n p refl hp | inj₂ n | cm' = aux 192 | where 193 | queue : max 0 ∣ ms [ u ]l ∣l ≥ n + p 194 | queue = hs-comp l u ms n p eqms hp 195 | 196 | -- aux : max 0 (h (m [ u ]) ∷ hs (ms [ u ]l)) ≥ n + p 197 | aux : max 0 (∣ m [ u ] ∣ ∷ ∣ ms [ u ]l ∣l) ≥ n + p 198 | aux = maxr {n = 0}{p = ∣ m [ u ] ∣} {l = ∣ ms [ u ]l ∣l} queue 199 | 200 | is-flat : ∀ {X}{Y}{{oY : has-weight Y}} → (X ⇒ M Y) → Set 201 | is-flat {X} f = ∀ {d d' : D} (x : X d)(x' : X d') → ∣ f d x ∣ ≡ ∣ f d' x' ∣ 202 | 203 | {- 204 | We want to show that 205 | M 0 → M A 206 | ↓ ↓ 207 | M 0 → M 0 + ℕ 208 | is a pullback 209 | 210 | By the pullback lemma, it is equivalent to show that the following is a pullback 211 | M 0 → M A 212 | ↓ ↓ 213 | M 0 + ℕ 214 | ↓ 215 | 1 → 1 + 1 216 | inl 217 | 218 | This means that given t ∈ M A d such that is-closed t = inj₁ u , there exists a unique u' 219 | such that t = M i (u') 220 | Of course, this is going to be u. 221 | For uniqueness, it is enough to show that is-closed (M i u) = inj₁ u for any u 222 | 223 | 224 | -} 225 | pbk-unique : ∀ {A}{d} → (u : M (λ _ → ⊥) d) → is-closed {X = A}(u [ (λ d₁ → ⊥-elim) ]) ≡ inj₁ u 226 | pbks-unique : ∀ {A}{l} → (u : M[ (λ _ → ⊥) ]^ l) → are-closed {X = A}(u [ (λ d₁ → ⊥-elim) ]l) ≡ inj₁ u 227 | 228 | pbk-unique {A} {d} (op i s ms) rewrite pbks-unique {A}{map (λ F → obF F d) (L i)} ms = refl 229 | 230 | 231 | pbks-unique {A} {.[]} (M^[] .(λ _ → ⊥)) = refl 232 | pbks-unique {A} {.(_ ∷ _)} (x M^:: u) rewrite pbk-unique {A} x | pbks-unique {A} u = refl 233 | 234 | pbk-exist : ∀ {A}{d} → (t : M A d)(u : M (λ _ → ⊥) d) → is-closed {X = A} t ≡ inj₁ u 235 | → t ≡ u [ (λ d₁ → ⊥-elim) ] 236 | 237 | pbks-exist : ∀ {A}{l} → (t : M[ A ]^ l)(u : M[ (λ _ → ⊥) ]^ l ) → are-closed {X = A} t ≡ inj₁ u 238 | → t ≡ u [ (λ d₁ → ⊥-elim) ]l 239 | pbk-exist {A} {d} (op i s x) u ct with are-closed x in eq 240 | pbk-exist {A} {d} (op i s x) .(op i s u) refl | inj₁ u rewrite pbks-exist x u eq = refl 241 | 242 | pbks-exist (M^[] _) .(M^[] (λ _ → ⊥)) refl = refl 243 | pbks-exist (x M^:: t) u ct with are-closed t in eqt | is-closed x in eqx 244 | pbks-exist (x M^:: t) .(x₂ M^:: x₁) refl | inj₁ x₁ | inj₁ x₂ rewrite pbk-exist x x₂ eqx | pbks-exist t x₁ eqt = refl 245 | 246 | 247 | instance 248 | ⊥-has-weight : has-weight (λ _ → ⊥) 249 | o {{ ⊥-has-weight}} = ⊥-elim 250 | 251 | 252 | 253 | -- The main result 254 | {- 255 | A → M0 flat 256 | then the pullback 257 | MA 258 | ↓ 259 | A → M0 260 | is A + A×M_0 A 261 | -} 262 | main-result : ∀ {A}(u : A ⇒ M (λ _ → ⊥)) → is-flat u → ∀ d (a : A d) (t : M A d) → u d a ≡ t [ u ] → (Σ (A d) λ a' → t ≡ η a' × u d a ≡ u d a') ⊎ t ≡ u d a [ (λ _ → ⊥-elim) ] 263 | main-result {A} u fl d a t eq with is-closed t in eqt 264 | ... | inj₁ x rewrite eq | pbk-exist t x eqt rewrite [][] x (λ k → ⊥-elim) u 265 | = inj₂ (trans (cong (_[_] x) (funext _ _ λ a₁ → funext _ _ ⊥-elim-d)) (sym ([][] x _ (λ k → ⊥-elim)) )) 266 | 267 | ... | inj₂ n with n-0 (h-comp {A} d u t n ∣ u d a ∣ eqt λ d₁ x → eq-le _ _ (fl a x)) 268 | where n-0 : ∣ t [ u ] ∣ ≥ n + ∣ u d a ∣ → n ≡ 0 269 | n-0 h rewrite eq = le0 _ _ h 270 | main-result {A} u fl d a (η x) eq | inj₂ .0 | refl = inj₁ (x , (refl , eq)) 271 | main-result {A} u fl d a (op i x ms) eq | inj₂ .0 | refl with are-closed ms 272 | ... | inj₁ x = ⊥-elim (impossible eqt) 273 | where impossible : inj₁ (op i _ x) ≡ inj₂ 0 → ⊥ 274 | impossible () 275 | ... | inj₂ y = ⊥-elim (impossible eqt) 276 | where impossible : inj₂ (suc y) ≡ inj₂ 0 → ⊥ 277 | impossible () 278 | 279 | 280 | -- reste a montrer que le yoneda est de la meme taille que l'element 281 | -- yoneda lemma 282 | 283 | y = hom {{ Dc}} 284 | 285 | _ʸ : ∀ {X : D → Set}{{XF : is-functor X}}{d} → X d → y d ⇒ X 286 | _ʸ {X}{d} x d' f = ∥ X ∥ f x 287 | 288 | instance 289 | y-is-functor : ∀ {d} → is-functor (y d) 290 | homF {{y-is-functor {d} }} f x = x · f 291 | 292 | 293 | instance 294 | M-is-functor : ∀ {X} {{ XF : is-functor X}} → is-functor (M X) 295 | Ml-is-functor : ∀ {X} {{ XF : is-functor X}} {l} → is-functor (λ d → M[ X ]^ (map (λ F → obF F d) l) ) 296 | homF ⦃ M-is-functor {X} ⦃ XF ⦄ ⦄ f (η x) = η (∥ X ∥ f x) 297 | homF ⦃ M-is-functor {X} ⦃ XF ⦄ ⦄ f (op i s ms) = op i (homF f s) (homF {{ r = Ml-is-functor }} f ms) 298 | homF ⦃ Ml-is-functor {X} ⦃ XF ⦄ {[]} ⦄ f ms = ms 299 | homF ⦃ Ml-is-functor {X} ⦃ XF ⦄ {F ∷ l} ⦄ f (m M^:: ms) = homF {{ r = M-is-functor}} (homF f) m M^:: homF {{ r = Ml-is-functor}} f ms where open Functor F 300 | 301 | module _ (X : D → Set){{XF : is-functor X}} where 302 | instance 303 | X-has-weight : has-weight X 304 | o {{ X-has-weight }} = λ _ → 0 305 | size-y : ∀ d (u : M X d) → ∀ (a : D) (f : hom d a) → ∣ u ∣ ≡ ∣ (u ʸ) a f ∣ 306 | size-ys : ∀ d (l : List (Functor D D)) 307 | (ms : M[ X ]^ map (λ F → obF F d) l) → 308 | (a : D) 309 | (f : hom d a) → 310 | ∣ ms ∣l ≡ ∣ (is-functor.homF Ml-is-functor f ms) ∣l 311 | size-y d (η x) a f = refl 312 | size-y d (op i s ms) a f rewrite size-ys d (L i) ms a f = refl 313 | 314 | size-ys d [] ms a f = refl 315 | size-ys d (F ∷ l) (m M^:: ms) a f rewrite size-y (obF F d) m (obF F a) (homF {{ r = Functor.Functor-is-func F }} f) | size-ys d l ms a f = refl 316 | 317 | y-is-flat : ∀ d (u : M X d) → is-flat (u ʸ) 318 | y-is-flat d u = λ x x' → trans (sym (size-y d u _ x)) (size-y d u _ x') 319 | 320 | -- final : ∀ (X : D → Set) {{ XF : is-functor X}} a b (u : M X a)(f : y a b) (t : M (y b) ) → 321 | -- homF f u ≡ ? 322 | 323 | 324 | 325 | 326 | 327 | -------------------------------------------------------------------------------- /pbkB-present.json: -------------------------------------------------------------------------------- 1 | {"graph":{"edges":[{"from":1,"id":6,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":2},{"from":3,"id":7,"label":{"label":"","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":1},{"from":3,"id":8,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":4},{"from":4,"id":9,"label":{"label":"u","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":2},{"from":0,"id":10,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":5},{"from":5,"id":11,"label":{"label":"\\mu","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":1}],"latexPreamble":"","nodes":[{"id":0,"label":{"isMath":true,"label":"M(A+B)","pos":[700,100]}},{"id":1,"label":{"isMath":true,"label":"M(A+1)","pos":[700,500]}},{"id":2,"label":{"isMath":true,"label":"M1","pos":[700,700]}},{"id":3,"label":{"isMath":true,"label":"A+A\\times_{M1}A","pos":[300,500]}},{"id":4,"label":{"isMath":true,"label":"A","pos":[300,700]}},{"id":5,"label":{"isMath":true,"label":"MM(A+1)","pos":[700,300]}}],"sizeGrid":200},"version":5} -------------------------------------------------------------------------------- /pbkBsimp.json: -------------------------------------------------------------------------------- 1 | {"graph":{"edges":[{"from":1,"id":11,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":2},{"from":3,"id":12,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":4},{"from":3,"id":13,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":5},{"from":5,"id":14,"label":{"label":"u","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":2},{"from":0,"id":15,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":6},{"from":6,"id":16,"label":{"label":"\\mu","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":1},{"from":4,"id":17,"label":{"label":"in_1","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":7},{"from":7,"id":18,"label":{"label":"\\cong","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":1},{"from":8,"id":19,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":true,"head":"none","position":0.5,"tail":"none"}},"to":4},{"from":9,"id":20,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":true,"head":"none","position":0.5,"tail":"none"}},"to":8},{"from":10,"id":21,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":true,"head":"none","position":0.5,"tail":"none"}},"to":3},{"from":10,"id":22,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":9},{"from":9,"id":23,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":0},{"from":8,"id":24,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":6}],"latexPreamble":"","nodes":[{"id":0,"label":{"isMath":true,"label":"M(A+B)","pos":[900,100]}},{"id":1,"label":{"isMath":true,"label":"M(A+1)","pos":[900,500]}},{"id":2,"label":{"isMath":true,"label":"M1","pos":[900,700]}},{"id":3,"label":{"isMath":true,"label":"A\\times_{M1} A","pos":[300,500]}},{"id":4,"label":{"isMath":true,"label":"A","pos":[500,500]}},{"id":5,"label":{"isMath":true,"label":"A","pos":[300,700]}},{"id":6,"label":{"isMath":true,"label":"MM(A+1)","pos":[900,300]}},{"id":7,"label":{"isMath":true,"label":"A+1+GM(A+1)","pos":[700,500]}},{"id":8,"label":{"isMath":true,"label":"A","pos":[500,300]}},{"id":9,"label":{"isMath":true,"label":"A","pos":[500,100]}},{"id":10,"label":{"isMath":true,"label":"A\\times_{M1} A","pos":[300,100]}}],"sizeGrid":200},"version":5} -------------------------------------------------------------------------------- /pbkBsimp2.json: -------------------------------------------------------------------------------- 1 | {"graph":{"edges":[{"from":1,"id":24,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":2},{"from":3,"id":25,"label":{"label":"u","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":4},{"from":3,"id":26,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":5},{"from":5,"id":27,"label":{"label":"u","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":2},{"from":0,"id":28,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":6},{"from":6,"id":29,"label":{"label":"\\mu","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":1},{"from":7,"id":30,"label":{"label":"\\mu","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":4},{"from":8,"id":31,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":7},{"from":9,"id":32,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":3},{"from":9,"id":33,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":8},{"from":8,"id":34,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":0},{"from":7,"id":35,"label":{"label":"MMin_2","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":6},{"from":4,"id":36,"label":{"label":"Min_2","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":1},{"from":10,"id":37,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":11},{"from":13,"id":38,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":12},{"from":13,"id":39,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":10},{"from":12,"id":40,"label":{"label":"Min_2","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":11},{"from":8,"id":41,"label":{"label":"","style":{"alignment":"left","bend":-0.2,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":13},{"from":14,"id":42,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":15},{"from":17,"id":43,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":16},{"from":17,"id":44,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":14},{"from":16,"id":45,"label":{"label":"Min_2","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":15},{"from":13,"id":46,"label":{"label":"","style":{"alignment":"left","bend":0.4,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":17},{"from":21,"id":47,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":18},{"from":20,"id":48,"label":{"label":"Min_2","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":19},{"from":13,"id":49,"label":{"label":"","style":{"alignment":"left","bend":-0.2,"dashed":true,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":21},{"from":18,"id":50,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":22},{"from":22,"id":51,"label":{"label":"Min_2","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":19},{"from":21,"id":52,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":23},{"from":23,"id":53,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":true,"head":"none","position":0.5,"tail":"none"}},"to":20},{"from":23,"id":54,"label":{"label":"","style":{"alignment":"left","bend":0,"dashed":false,"double":true,"head":"none","position":0.5,"tail":"none"}},"to":22}],"latexPreamble":"","nodes":[{"id":0,"label":{"isMath":true,"label":"M(A+B)","pos":[700,100]}},{"id":1,"label":{"isMath":true,"label":"M(A+1)","pos":[700,500]}},{"id":2,"label":{"isMath":true,"label":"M1","pos":[700,700]}},{"id":3,"label":{"isMath":true,"label":"A","pos":[300,500]}},{"id":4,"label":{"isMath":true,"label":"M1","pos":[500,500]}},{"id":5,"label":{"isMath":true,"label":"A","pos":[300,700]}},{"id":6,"label":{"isMath":true,"label":"MM(A+1)","pos":[700,300]}},{"id":7,"label":{"isMath":true,"label":"MM1","pos":[500,300]}},{"id":8,"label":{"isMath":true,"label":"MB","pos":[500,100]}},{"id":9,"label":{"isMath":true,"label":"A\\times_{M1} MB","pos":[300,100]}},{"id":10,"label":{"isMath":true,"label":"(A+B)","pos":[1100,100]}},{"id":11,"label":{"isMath":true,"label":"M(A+1)","pos":[1100,300]}},{"id":12,"label":{"isMath":true,"label":"M1","pos":[900,300]}},{"id":13,"label":{"isMath":true,"label":"0+B","pos":[900,100]}},{"id":14,"label":{"isMath":true,"label":"A","pos":[1100,500]}},{"id":15,"label":{"isMath":true,"label":"M(A+1)","pos":[1100,700]}},{"id":16,"label":{"isMath":true,"label":"M1","pos":[900,700]}},{"id":17,"label":{"isMath":true,"label":"0","pos":[900,500]}},{"id":18,"label":{"isMath":true,"label":"B","pos":[1500,100]}},{"id":19,"label":{"isMath":true,"label":"M(A+1)","pos":[1500,500]}},{"id":20,"label":{"isMath":true,"label":"M1","pos":[1300,500]}},{"id":21,"label":{"isMath":true,"label":"B","pos":[1300,100]}},{"id":22,"label":{"isMath":true,"label":"M1","pos":[1500,300]}},{"id":23,"label":{"isMath":true,"label":"M1","pos":[1300,300]}}],"sizeGrid":200},"version":5} -------------------------------------------------------------------------------- /popl24-response.md: -------------------------------------------------------------------------------- 1 | We thank the referees for their useful comments and suggestions. 2 | 3 | 4 | OVERVIEW 5 | -------------- 6 | 7 | Reviews A, C, and D express concerns about the novelty of the work, and D specifically asks if the categorical semantics is novel. We believe our semantics is indeed novel: as review A notes, there are no prior extensions of the Rydeheard and Burstall proof to pattern unification. This is because none of the prior semantics for metavariables (eg, Hamana[1], Fiore and Hur[2], and Hsu et al[3]) are suitable for this purpose: they all permit interpreting general metavariables with full substitutions as arguments, rather than only pattern metavariables, which take only a disjoint set of variables as arguments. As a result, the "fatter" models lack the universal properties needed to generalise Rydeheard and Burstall's proof. Furthermore, the notion of signature we introduce is also novel, and is general enough to let us derive several unification algorithms not previously found in the literature, including pattern unification for terms in ordered logic. See our response to review D for more details. 8 | 9 | Review D also expresses concern that our notion of arity is only a list of variables, and does not keep track of type information. Fortunately, this is not the case: as review A notes, our notion of arity is typed, and metavariable arities carry enough type information to rule out type-incorrect uses of metavariables. We give a detailed explanation of this below, and will perhaps switch our example in section 2 from untyped to typed lambda calculus, to make this clear. 10 | 11 | Reviews B and D also ask about support for equations. Our core notion of signature does not contain equations in it (since general E-unification is undecidable), and so by default it does unification modulo alpha-equivalence. (Eg, we cannot do unification modulo the structural congruence of pi-calculus.) However, in the specific case of typed lambda calculus, we can do better, since there is a GB-signature capturing the normal (and neutral) forms, which lets us lift to pattern unification modulo beta-eta. We sketch the (essentially standard) details below. 12 | 13 | In the following, we list the main changes we plan to implement in the final version, and provide detailed answers to the questions of the referees. 14 | 15 | [1] Free S-Monoids: A Higher-Order Syntax with Metavariables. Hamana, APLAS 2004 16 | 17 | [2] Second-Order Equational Logic. Fiore, Hur, CSL 2010 18 | 19 | [3] A Category Theoretic View of Contextual Types: From Simple Types to Dependent Types. Hu, Pientka, Schöpp, ACM TOCL 2022 20 | 21 | 22 | MAIN CHANGES 23 | ----------------- 24 | 25 | - Reorder the figures and fix formatting problems 26 | - Introduce some of the GB-signature examples from section 7 immediately after the definition of GB-signature, in particular simply-typed lambda calculus (from section 7.2 currently) 27 | - Expand the related work to compare with the approach based on contextual modal type theory and other semantics of metavariables 28 | - Add the example of normalised syntax of simply-typed lambda calculus to explain how we can accommodate lambda-calculus modulo beta/eta. 29 | 30 | DETAILED RESPONSE 31 | ------------------- 32 | 33 | We give detailed answers to the questions in each of the reviews below. 34 | 35 | Review A 36 | ----------------- 37 | 38 | > I would have liked to see a more in-depth comparison to the many other notions of abstract 39 | > binding signature that have been developed over the years, several of which are cited. Why 40 | > couldn't you adapt one of those? 41 | 42 | Most existing formalisations of binding signature support general metavariables (ie, metavariables can be instantiated with substitutions containing arbitrary terms). This is too strong to model pattern unification, and our goal was rather to identify the features of a syntax with metavariables satisfying the pattern restriction, which led us to the notion of GB-signatures. 43 | 44 | > Can you clarify exactly which aspects of the paper are novel, 45 | 46 | See our answer to the first concern of review D below. 47 | 48 | > is there anything novel in Section 2.1 49 | 50 | The specific example lambda-calculus as treated in Section 2.1 is not new, except for minor features such as the treatment of failure, as noted by Review B. Section 2.1 is intended to present the pattern unification algorithm in a way that fits into our generic framework. We will make this more explicit in the final version. 51 | 52 | > could you instead consider the entire category of substitutions, of which the renamings form a 53 | > subcategory? 54 | 55 | It is unclear how to define the category of substitutions for a syntax generated by an arbitrary GB-signature. 56 | 57 | 58 | Review B 59 | ----------------- 60 | 61 | > I was constantly expecting unification modulo β-equivalence to appear 62 | 63 | See our answer to the third concern of review D below. 64 | 65 | > for the specific case of proving termination of unification, would it be possible to adapt 66 | > McBride's elegant expression of unification in a structurally recursive manner? 67 | 68 | We initially tried to apply McBride's technique in a Coq formalisation, but couldn't make it work due to the limitations of Coq's pattern matching and termination checking. It would indeed be interesting to prove termination of our algorithm in Agda using the McBride style, but we have not yet tried to do so. 69 | 70 | 71 | >l.493, l.591: The text is careful to specify "small category" but the Agda definitions use level- 72 | >polymorphism to support arbitrarily large universes. As far as I can tell no use is made of this, so 73 | > might it not be simpler to use small concrete Set levels (i.e. Set, with Set₁ where required)? 74 | 75 | If the reviewers prefer, we are ready to apply the change. We don't have a strong opinion on this matter: being a small category could also be interpreted as being small with respect to some universe. 76 | 77 | 78 | Review D 79 | ---------------- 80 | 81 | 82 | This review highlights four major areas of concern, and two specific examples: 83 | 84 | **1. The novelty of the contribution** 85 | 86 | This concern observes that the pattern unification algorithm is not 87 | especially novel, and asks if the ideas in our categorical semantics 88 | also exist in the literature. 89 | 90 | We agree that the core steps of our algorithm are essentially the same 91 | as in other papers: the pattern unification algorithm is the same, regardless 92 | of the details of how it is presented. 93 | 94 | However, our semantics for metavariables does differ in a critical way 95 | from prior semantics of metavariables (such as that of Hsu et al, as 96 | well as those Fiore's and Hamana's). Those models permit interpreting 97 | *general* metavariables -- i.e., a metavariable can be instantiated 98 | with a full substitution of arbitrary terms. A consequence of this is 99 | that they contain the semantic analogues of problems outside the 100 | pattern fragment, such as `M x x ?= N x x`. Since problems like this 101 | do not have most general unifiers, the more general categories in the 102 | literature correspondingly do not always have suitable equalisers. 103 | 104 | However, the pattern fragment is much more restrictive: a metavariable 105 | can only be instantiated with a disjoint collection of free variables, 106 | ensuring that mgu's can be found (if they exist). Our semantics has 107 | been engineered so that it can *only* interpret metavariable 108 | instantiations in the pattern fragment, and cannot interpret full 109 | metavariable instantiations. 110 | 111 | This restriction gives our model much stronger properties, enabling us 112 | to characterise each part of the pattern unification algorithm in 113 | terms of universal properties. This lets us extend Rydeheard and 114 | Burstall's proof to the pattern case. No prior semantics can be used 115 | for this purpose. 116 | 117 | **2. Are arities just vectors of variables without type information? 118 | How can this possibly extend to dependent types?** 119 | 120 | This concern is whether our notion of arity is too limited, as a 121 | vector of variables without type information. 122 | 123 | Our notion of arity is **not** limited to a vector of variables 124 | without type information. As review A notes, in section 7 we give 125 | examples which show that our notion of arity includes the sorts of the 126 | inputs and the output of the result. As a result, we are performing 127 | unification on intrinsically well-sorted terms, and our correctness 128 | theorem ensures that the result of unification is always a well-typed 129 | substitution whose application results in well-typed terms. 130 | 131 | For example, the instance of our unification algorithm for System 132 | F will never confuse type and term variables, nor will it produce 133 | ill-typed terms or substitutions. Likewise, the example of ordered 134 | unification will never produce substitutions which require exchange 135 | or any other structural rule. 136 | 137 | As we say in the paper, extending our approach to fully 138 | dependently-sorted theories is future work. However, the fact that our 139 | approach works for System F (which permits a light dependency on 140 | types) makes us hopeful. 141 | 142 | **3. Can our approach handle equations such as beta/eta-laws?** 143 | 144 | This concern asks whether our algorithm 145 | supports unification modulo equational laws like the β- and η-laws of 146 | the lambda calculus. 147 | 148 | Our algorithm is generic over a notion of signature, and so does not 149 | "know" anything about the lambda calculus. Since our definition of 150 | GB-signature does not carry an equational theory with it, the core 151 | algorithm works only on terms modulo α-equivalence, and works the same 152 | regardless of the signature, whether λ-calculus, π-calculus, or 153 | anything else. 154 | 155 | However, in the specific case of the lambda calculus, every typed 156 | lambda term has a normal form, and furthermore, the syntax of the 157 | normal forms can be specified by a GB-signature. As a result, we can 158 | do unification modulo βη by lowering the metavariables, pre-normalising the 159 | terms and doing unification on normal forms, along the lines of Vezzosi and 160 | Abel[1]. (This is also similar to the approach of Abel and Pientka.) 161 | In the final version of the paper, we will give this as an extended 162 | example. 163 | 164 | [1] A Categorical Perspective on Pattern Unification, Vezzosi-Abel, RISC-Linz 2014 165 | 166 | **4. Motivation -- is LF-style unification already enough?** 167 | 168 | This concern asks if pattern unification for LF-style type theories 169 | can already be used to emulate our results with a suitable choice of 170 | LF signature. The answer to this question is no, it cannot. In fact, 171 | LF signatures and GB-signatures are strictly incomparable in power. 172 | 173 | LF-style signatures already handle type dependency (which is future 174 | work for us), but there are also GB-signatures which cannot be encoded 175 | with an LF signature. For example, GB-signatures allow us to express 176 | pattern unification for ordered lambda terms. This is (to our 177 | knowledge) completely novel -- even Schack-Nielsen and Schürman's 178 | pattern unification algorithm for linear types assumes the 179 | admissibility of exchange. 180 | 181 | **5. The two concrete problems** 182 | 183 | Review D asked about two specific problems. 184 | 185 | The problem in the review `M:(A); y:A |- M[y] = \x. c x y` is not a 186 | valid input to our unification algorithm, because it is 187 | ill-sorted. The `M` metavariable needs a return sort to have a 188 | well-formed arity. If we choose the base sort `B`, then `M:(A) → B`, 189 | and `M[y]` is of type `B`. But `\x. c x y` would have function 190 | type, and we would be mistakenly trying to unify terms of different 191 | sorts. 192 | 193 | In regards to the question about the problem `M:0 ; 1 |- M() 1 = 1`, 194 | when the problem is corrected to give the proper typed arity to the 195 | metavariable, this should succeed when the unification problem is 196 | specified via the normal-form signature after lowering and normalistion, 197 | and fail in the general theory of lambda terms modulo alpha-equivalence. 198 | -------------------------------------------------------------------------------- /popl24-reviews.md: -------------------------------------------------------------------------------- 1 | POPL 2024 Paper #265 Reviews and Comments 2 | =========================================================================== 3 | Paper #265 Generic pattern unification 4 | 5 | 6 | Review #265A 7 | =========================================================================== 8 | 9 | Overall merit 10 | ------------- 11 | B. Weak accept: I lean towards acceptance. 12 | 13 | Reviewer expertise 14 | ------------------ 15 | Y. Knowledgeable: I am knowledgeable about the topic of the paper (or at 16 | least key aspects of it). 17 | 18 | Summary of the paper 19 | -------------------- 20 | The authors present an Agda implementation of pattern unification, parameterized over a novel notion of generalized binding signature whose instances include the untyped, simply-typed, and ordered lambda calculi, and System F. They provide paper proofs that their algorithm is sound, complete, and terminating, based on a categorical semantics of most general unifiers as equalizers, generalizing earlier work of Barr and Wells and Rydeheard and Burstall, among others, on first-order unification. 21 | 22 | Assessment of the paper 23 | ----------------------- 24 | Pattern unification is an essential ingredient of many type inference algorithms, so I am convinced by the paper's goal of trying to formulate the problem statement and algorithm in a clean, abstract, and general way, and I think that the paper does a reasonable job of fulfilling those goals. In particular I want to praise the use of dependent types as a way to enforce some but not all of the necessary invariants -- mechanization is not all-or-nothing. 25 | 26 | My reservation -- besides some presentational complaints that could easily be fixed in a revision -- is that I am (genuinely!) not sure how novel the ideas are. For example, the categorical semantics struck me as such a direct generalization of the ideas of Rydeheard and Burstall that I was shocked when I could not find a categorical description of pattern unification along these lines in the literature. 27 | 28 | Indeed I am not very familiar with the literature on unification (although am moderately familiar with the topic), and in reviewing this paper I was surprised at how difficult it was to compare older papers, each of which concerns a slightly different scenario. Perhaps this indicates that the POPL community is in desperate need of a paper like this one. I don't know. My assessment of B indicates "accept, but with low confidence." 29 | 30 | The notion of pattern-friendly generalized binding signature is likely the most novel of the paper's contributions, but I would have liked to see a more in-depth comparison to the many other notions of abstract binding signature that have been developed over the years, several of which are cited. Why couldn't you adapt one of those? 31 | 32 | Detailed comments for authors 33 | ----------------------------- 34 | The layout and pagination of figures in Section 2 is so bad that it really impeded my ability to read the paper. The figures do not appear in the order that they are explained, and the explanations appear many pages away from the figures themselves. _Please, please fix this._ For example: 35 | - The explanation of Figure 3 (on page 5) starts halfway down page 8. 36 | - The explanation of Figure 5 (on page 7) starts on page 4. 37 | - Figure 4 (on page 6) is not discussed until Section 2.2 on page 10. 38 | 39 | Pages 8 and 9 would also be easier to read with some paragraph headings indicating which part of the algorithm is being discussed. 40 | 41 | Other comments: First, the discussion on page 1 gives the impression that unification has never been studied in a generic or abstract fashion, although later you make it clear that it has. I appreciate that this is to motivate the problem, but maybe you could be less forceful about it? 42 | 43 | I suggest clearly stating once in prose what "Miller's pattern fragment" is given that it is the topic of the paper. Many readers will know, and the paper does define the pattern fragment (e.g. in Figure 1) but never directly connects this definition to the phrase "pattern fragment." 44 | 45 | Citations are formatted strangely throughout the whole paper, e.g. "such as Dunfield and Krishnaswami Dunfield and Krishnaswami [2019]" (L25). 46 | 47 | I found the definition of GB-signature confusing until I got to the examples in Section 7. Your notion of "arity" in the STLC includes the sorts of the inputs _and_ the output of a function symbol, but to me this doesn't square with the terminology of an arity as a "variable context." It might help to give the examples earlier on, closer to where GB-signatures are defined. 48 | 49 | Typos: 50 | - L6: "proved on papers" 51 | - L172: "is actually arises" 52 | - L173: This sentence is duplicated with a few words changed on L289. 53 | - L234: the second premise of the rule should read $\Delta_1$ not $\Gamma$ 54 | - L347: "additional the" 55 | - L626: "on which the algorithm relies on" 56 | - L741: Property 3.17 should say "...for pattern-friendly GB-signatures." 57 | 58 | Questions to be addressed by author response 59 | -------------------------------------------- 60 | - Can you clarify exactly which aspects of the paper are novel, besides the notion of GB-signature? For example, would you say there is anything novel in Section 2.1? 61 | - Rather than taking the category of renamings as the input to your framework, could you instead consider the entire category of substitutions, of which the renamings form a subcategory? (In particular, the paper "Relative induction principles for type theories" by Bocquet, Kaposi, and Sattler isolates a universal property for the subcategory of renamings, so it seems to me possible in theory.) 62 | 63 | 64 | 65 | Review #265B 66 | =========================================================================== 67 | 68 | Overall merit 69 | ------------- 70 | A. Strong accept: I will argue for acceptance. 71 | 72 | Reviewer expertise 73 | ------------------ 74 | Y. Knowledgeable: I am knowledgeable about the topic of the paper (or at 75 | least key aspects of it). 76 | 77 | Summary of the paper 78 | -------------------- 79 | This paper presents an implementation of second-order unification of lambda-calculus terms in Agda, using Miller's pattern fragment. It then generalises this to an arbitrary signature in a particularly neat way, showing how the generalisation can express unification in many other settings. 80 | 81 | Assessment of the paper 82 | ----------------------- 83 | I am impressed by how neat the generalisation of unification is. Very few special-purpose structures are introduced: renaming of terms is functorality of arities, metavariable applications are unified by pullbacks, etc. 84 | 85 | The treatment of failure in Remarks 2.2, 2.3 is particularly elegant - it is very nice to have a definition of mgu that does not have a side-condition of solvability. 86 | 87 | Although the paper never claims to, I was constantly expecting unification modulo β-equivalence to appear, because (a) in dependently typed language implementation, Miller's pattern fragment is often used for this purpose, and (b) the main example is the λ-calculus. I suspect I will not be the only reader misled in this way, so please state explicitly in the introduction equations between terms are out of scope. 88 | 89 | Getting LaTeX to lay out figures well is always annoying, but the positioning and ordering of figures in this paper is particularly bad. The main text introduces figures in the order 1, 5, 2, 3, 6, 7, 8, 4. I read them in roughly this order, constantly flicking back and forth to match code to text. Please rearrange these! 90 | 91 | Detailed comments for authors 92 | ----------------------------- 93 | l.6: "on papers" --> "on paper" 94 | 95 | l.105: Please explain on first use that ⌊_⌋ and ⊥ are the introduction forms for Maybe 96 | 97 | l.335: This paragraph seems to be in the wrong order? It ends with what looks like a definition of metavariable substitutions, which should come first. 98 | 99 | l.475: Is the year in Aczel (2016) correct? The bibliography entry is confusing. 100 | 101 | l.493, l.591: The text is careful to specify "small category" but the Agda definitions use level-polymorphism to support arbitrarily large universes. As far as I can tell no use is made of this, so might it not be simpler to use small concrete Set levels (i.e. Set, with Set₁ where required)? 102 | 103 | l.988: I am in general very sympathetic to the idea of using Agda as a programming language and doing the proofs on paper. (For one thing, it makes it possible to actually read the resulting Agda code!). However, for the specific case of proving termination of unification, would it be possible to adapt McBride's elegant expression of unification in a structurally recursive manner? (See "First-order unification by structural recursion", McBride, 2003, and also the later work "A tutorial implementation of dynamic pattern unification", Gundry & McBride, 2012) 104 | 105 | 106 | 107 | Review #265C 108 | =========================================================================== 109 | 110 | Overall merit 111 | ------------- 112 | B. Weak accept: I lean towards acceptance. 113 | 114 | Reviewer expertise 115 | ------------------ 116 | Y. Knowledgeable: I am knowledgeable about the topic of the paper (or at 117 | least key aspects of it). 118 | 119 | Summary of the paper 120 | -------------------- 121 | The paper implemented a generic second-order pattern unification algorithm in Agda. It also provides an on-paper category-theoretic proof about its correctness. 122 | 123 | Assessment of the paper 124 | ----------------------- 125 | The paper is well-written and the Agda code is elegant. The key insight that unification (for the discussed fragment) is _all_ about coequalizers and pushouts (or equivalently finite connected colimits) and injectivity is inspiring. 126 | 127 | The only disappointment (if any) would be that the correctness proof has not been mechanized in Agda. In particular, the Agda version of `isFriendly` does not contain the properties needed for the correctness proof (discussed on lines 631-632). I believe it would have been a stronger paper if those properties are also mechanized in Agda (so that no one can use the Agda code incorrectly) along with the correctness proof. 128 | 129 | Nonetheless, the current submission still looks impressive. My main reservation is that I am not familiar with the literature on unification so I decided to give Weak Accept. 130 | 131 | Detailed comments for authors 132 | ----------------------------- 133 | Minor points on typesetting: please use `\citep` when appropriate so that citations do not mix with the surrounding text. (Note: the strange citation formatting was also complained by Reviewer A, and `\citep` is the solution.) Also, the footer on page 5 strangely went into the framed box. 134 | 135 | 136 | 137 | Review #265D 138 | =========================================================================== 139 | 140 | Overall merit 141 | ------------- 142 | C. Weak reject: I lean towards rejection. 143 | 144 | Reviewer expertise 145 | ------------------ 146 | X. Expert: I am an expert on the topic of the paper (or at least key 147 | aspects of it). 148 | 149 | Summary of the paper 150 | -------------------- 151 | The paper gives a reconstruction of pattern unification via category theory. In particular, it focuses on second-order pattern unification where meta-variables are applied to distinct bound variables of base type. The goals is to formulate unification algebraically/categoy theoretically and obtain a generic description where we abstractly characterize the context of meta-variables, contexts of bound variables and operations such as pruning, etc. This would then make it easier to customize this abstract framework to different languages such as STLC, System F, etc. 152 | 153 | The paper develops the pattern unification algorithm in Agda and proves termination of the algorithm on paper. 154 | 155 | Assessment of the paper 156 | ----------------------- 157 | There are four main concerns about this paper: 158 | 159 | 1) The paper lacks a discussion of how other formulations of higher-order pattern unification algorithms relate to the given implementation in Agda. Fundamentally, there are no new ideas in the formulation of the pattern unification algorithm. The main new part seems to be its particular formulation and implementation in Agda. 160 | 161 | The characterization of meta-variables together with their arity, describing pruning with respect to a vector of bound variables and intersections of bound variables is already present in Dowek's work [2]. It was given a logical foundation through contextual types where contextual types characterize meta-variables of type A in a context Psi [4,5]. The length of Psi is essentially the arity. The higher-order pattern unification was for example described in detail (including pruning/intersections/etc. together with proofs that pruning is correct) in Pientka's PhD thesis (2003), but also in [1]. 162 | 163 | [1] A. Abel and B. Pientka, Higher-Order Dynamic Pattern Unification for Dependent Types and Records, TLCA 2011 164 | 165 | [2] Gilles Dowek, Thérèse Hardin, Claude Kirchner, Frank Pfenning, Unification via Explicit Substitutions: The Case of Higher-Order Patterns. JICSLP 1996: 259-273 166 | 167 | [3] Jason Z. S. Hu, Brigitte Pientka, Ulrich Schöpp: 168 | A Category Theoretic View of Contextual Types: From Simple Types to Dependent Types. ACM Trans. Comput. Log. 23(4): 25:1-25:36 (2022) 169 | 170 | [4] A. Nanevski, F. Pfenning, B. Pientka, Contextual Modal Type Theory, TOCL 2008 171 | 172 | [5] A. Nanevski, B. Pientka, F. Pfenning: A modal foundation for meta-variables. MERLIN 2003 173 | 174 | [6] B. Pientka, Tabled Higher-Order Logic Programming, CMU PhD Thesis, 2003 175 | 176 | 177 | One might say that the main new novelty is the category theoretic treatment of metavariables; but work by Hu et al already does give a category theoretic model for contextual types. So, the main contribution seems the re-formulation / implementation of the actual pattern unification algorithm. This seems a fairly narrow contribution. 178 | 179 | 2) Treating meta-variables without their type and just keeping the arity lacks / drops information. For example, there is no distinction between a meta-variable M arity 0 (but with type A -> B) and a meta-variable N arity 1 of type B (where the argument takes in one variable of type A) on the syntax level. For example, 180 | 181 | Let M be a meta-variable of type A -> B with arity 0. Then the following should succeed (but I don't think that the algorithm does at the momen): 182 | 183 | M:0 ; 1 |- M() 1 = 1 since we instantiate M() with the identity function. 184 | 185 | In general, we could require that a meta-variable M of type A -> B with arity 0 is lowered to a meta-variable N of type B and arity 1 by instantiating M with \x. N(x). This is in fact what Dowek et all call pre-cooking and is also the restriction imposed in Abel, Pientka's work where we require that all meta-variables are lowered. I am missing a discussion of what assumptions are made about the syntax of terms (e.g. meta-variables must be always lowered). 186 | In fact, one nice aspect of contextual types, i.e. keeping the types around when we characterize meta-variables, is that we can give a purely logical account of lowering / raising, i.e. we can justify them by lemmas. 187 | 188 | the algorithm would unify: 189 | 190 | M:1 ; 1 |- M(1) = \x. c x 2 (or to write it with concrete bound variables) M:A ; y |- M(y) = \x. c x y 191 | 192 | even if M would have type B (base type) with arity 1 (i.e. it may depend on a variable x:A), but it should actually fail, because both sides do not have the same type. 193 | 194 | The current paper should discuss assumptions on terms that are being unified and clearly spell out how raising/lowering can be justified in their framework. If the assumption is that terms are well-typed to begin with, then it should be spelled out and explained how substitutions for meta-variables preserve well-typedness. 195 | 196 | 3) The authors claim that their set-up should scale to dependent types. However, I am somewhat doubtful that there is enough information. In the dependently typed setting, it is actually difficult to prove that when pruning a term M (of type A in a context Psi) with respect to variables x1, ... xn = Phi results in a valid well-typed term M' that is well-typed in Phi -- to put it differently, we could have pruned a variable from Psi which would render the sub-context Phi to be ill-typed/ill-formed. This is discussed in Abel and Pientka's work (see also Pientka's PhD thesis). Therefore, I have doubts that the given description would scale to dependent types without tracking more information than just arity. 197 | 198 | 199 | 4) Motivation: In general, I can see that a category-theoretic reconstruction of pattern unification is intrinsically interesting; but at the same time, I did not find the given explanation convincing (i.e. we can embed STLC / System F into the GB signature and we want a generic unification algorithm for all these systems). We know that we can encode STLC and System F into a lambda-calculus such as LF or Church's simple type theory. We also have pattern unification algorithms for LF / Church's simple type theory and then we simply re-use that algorithm. It is easy to also extend the encoding of STLC with for example de recursive types -- this is done all the time in LF / HOAS style systems. So in some sense unification for LF / Hoas-style systems is already generic and they also have been extended to linear LF (see [6]) -- maybe I am misunderstanding the point the authors want to make. 200 | 201 | [6] Anders Schack-Nielsen, Carsten Schürmann: 202 | Pattern Unification for the Lambda Calculus with Linear and Affine Types. LFMTP 2010: 101-116 203 | 204 | 205 | Overall, this work could be made more accessible and more stronger by putting this line of research within the existing context of work on pattern unification which arguably takes a syntactic rather than a category-theoretic approach. 206 | 207 | Questions to be addressed by author response 208 | -------------------------------------------- 209 | Please explain your assumptions on terms; in particular, how you handle the (counter) examples given under 2. 210 | -------------------------------------------------------------------------------- /popl25-response.txt: -------------------------------------------------------------------------------- 1 | We thank the referees for their useful comments and suggestions. 2 | 3 | Review A and C ask about the motivation of our work. 4 | As noted by Review B, pattern unification is used in the implementation 5 | of various PLs. As a concrete example, consider Dunfield-Krishnaswami's type inference algorithm ("Sound and Complete Bidirectional Typechecking for Higher-Rank Polymorphism with Existentials and Indexed Types", 2019). 6 | It only involves first-order unification, but simply adding a monomorphic type with a binder (for example, a recursive type 𝜇𝑎.𝐴[𝑎]) would require pattern unification. 7 | 8 | In order to avoid reproving everything for each new type system, pattern unification needs to be formulated generically so that it can be used in a variety of contexts without modification. This is our original motivation for this work. To the best of our knowledge, we are the first to give a general definition of pattern unification that works for a wide class of languages, in the vein of Rydeheard-Burstall's first-order analysis. 9 | 10 | Review A and C note that Abel-Vezzosi already studied pattern unification using a categorical approach; however they focused on the single case of normalised simply-typed lambda calculus, and as noted by Review C, our generalisation is not trivial. As a matter of fact, our treatment of lambda-calculus differs. In the revised version, we would make more explicit "how and why this difference". 11 | 12 | Review C also mentions the work of Schack-Nielsen and Schurmann that devises 13 | substructural pattern unification using syntactic method [LFMTP 2010]. As mentioned in Section 7.4, we cover a more restricted variant, but again, they focus on a single case, while we provide a general setting. 14 | 15 | BETA/ETA EQUALITIES IN SYSTEM F 16 | ==================================== 17 | 18 | Reviews A and C ask about the treatment of beta or eta equalities in the System F example. In section 7.3 (simply-typed lambda-calculus modulo beta and eta), we explain how to deal with equations in the syntax, by working on normal forms. 19 | The System F example is meant to address an orthogonal complexity related to the type system. In that case, the category of arities is not a mere product category (as suggested by Review A), but a "dependent pair" (formally, an oplax colimit) because the set of possible types for the term variables depends on the available type variables. 20 | 21 | We do not consider terms up to beta or eta equalities in the system F example (this will be more explicit in a revised version). We thank Review C for raising the issue about eta-equality; we are not sure if this is a significant obstacle, since instantiation of metavariables in pattern unification does not involve full substitution, but only (injective) renaming. A preliminary investigation suggests that it may be possible for us to handle the syntax of eta-short beta-long normal forms, exploiting the presentation of System F as a pure type system. 22 | This syntax is generated by the following two rules, where i denotes a type variable. 23 | ``` 24 | Γ,𝑦₁:𝑢₁,…⊢𝑥 : i 25 | __________________________ 26 | Γ⊢λy₁….𝑥 : ∏ (y₁ : u₁)….i 27 | 28 | 29 | Γ,𝑦₁:𝑢₁,… ⊢ 𝑥 : ∏(a₁ : v₁).τ₁ 30 | Γ,𝑦₁:𝑢₁,… ⊢ t₁ : v₁ τ₁[a₁ ↦ t₁] = ∏(a₂ : v₂).τ₂ 31 | Γ,𝑦₁:𝑢₁,… ⊢ t₂ : v₂ τ₂[a₂ ↦ t₂] = ∏(a₃ : v₃).τ₃ 32 | … τₙ[aₙ ↦ tₙ] = i 33 | __________________________ 34 | Γ ⊢ λy₁….𝑥t₁… : ∏ (y₁ : u₁)….i 35 | ``` 36 | 37 | RELATED WORK 38 | ================== 39 | 40 | If accepted, we will include the following related work mentioned by Review B. 41 | 42 | Pottier & Pouillard 43 | ------------------- 44 | Pottier & Poulliard (ICFP 2010/2011/JFP 2012) provides an agda library to work with syntax with binders. Unlike us, they do not provide a general scheme for syntax (the user typically defines it using Agda's inductive types), nor do they focus on unification. Their library could be used to implement a unification algorithm for a specific syntax. It is not clear if it is possible to reuse their library in our Agda formalisation since the binding structure is not explicit in our signatures. 45 | 46 | Gundry 47 | -------------------- 48 | In chapter 4, Gundry's thesis presents a pattern unification for a dependent type theory, as a component of his type inference algorithm. It falls outside the scope of our setting because of type dependency, but also because it actually implements "dynamic pattern unification", which addresses unification problems that do not necessarily fit into the pattern fragment at first. 49 | 50 | Mixed prefix unification (Miller 1992) 51 | -------------------- 52 | Miller showed that mixed prefix unification problems can be translated into higher-order pattern unification problems. In that respect, our work equally handles mixed prefix unification. 53 | 54 | DETAILED ANSWERS TO REVIEW B 55 | ================== 56 | 57 | - In this work, where are Lawvere theories (mentioned in the abstract) involved? 58 | 59 | The Lawvere theory of a GB-signature is the opposite category of the category of metacontexts and metasubstitutions. One can think of a Lawvere theory as a syntax with substitution, where unification corresponds to computing equalisers of parallel morphisms. 60 | 61 | - Can you justify why the introduction rule FO for metavariables correspond to first-order unification? Shouldn't we restrict the arity of the metavariable to be 0? 62 | 63 | What we mean here is that first-order unification is enough to solve unification problems where metavariables are introduced according to this rule. For example, we explain in the paragraph below how pure lambda-calculus can be specified as a multisorted first-order syntax, where the sorts are the natural numbers. First-order unification readily applies without any further restriction on the sorts of metavariables, hence the absence of restriction in the FO rule. We agree that this might be confusing and propose to clarify this point in the revised version, if accepted. 64 | 65 | As a multisorted first-order syntax, pure lambda-calculus is generated by the following operations, for each sort n: 66 | * n constants of that sort (the variables); 67 | * a binary operation n × n → n (application); 68 | * a unary operation n + 1 → n (lambda-abstraction). 69 | 70 | 71 | - Can you illustrate Example 2.5 (pure lambda-calculus) with Agda code showing how the binding signature specification in figure 6 can be instantiated with? 72 | 73 | Here are the main definitions of the Agda code for Example 2.5, which we propose to include and explain in a revised version. 74 | 75 | ``` 76 | data O n : Set where 77 | Var : Fin n → O n 78 | App : O n 79 | Lam : O n 80 | 81 | α : {n : ℕ} → O n → List ℕ 82 | α (Var x) = [] 83 | α {n} App = n ∷ n ∷ [] 84 | α {n} Lam = 1 + n ∷ [] 85 | 86 | _{_} : ∀ {a b : ℕ} → O a → hom a b → O b 87 | Var x { s } = Var (Vec.lookup s x) 88 | App { s } = App 89 | Lam { s } = Lam 90 | 91 | _^_ : {a b : ℕ} (x : hom a b) (o : O a) → 92 | Pointwise hom (α o) (α (o { x })) 93 | x ^ Var y = [] 94 | x ^ App = x ∷ x ∷ [] 95 | x ^ Lam = (x ↑) ∷ [] 96 | ``` 97 | 98 | 99 | - In unifying $M(\vec{x}$ and $M(\vec{y})$ in a typed setting, do we need to check the matching xs and ys have the same types? 100 | 101 | Our framework handles typed settings in such a way that knowing that $M(\vec{x}$ and $M(\vec{y})$ are well-formed in the same metacontext and scope is enough to conclude that the types of xs and ys are the same. This fact may be more or less implicit in the typed examples (e.g., simply-typed lambda-calculus, section 7.2); we propose to make it more explicit in the revised version. 102 | 103 | 104 | > l744 Is $J$ in $\int J \to \mathcal{A}$ meant to be $\mathcal{O}$? 105 | 106 | Yes, thanks for pointing out the typo. -------------------------------------------------------------------------------- /popl25-reviews.md: -------------------------------------------------------------------------------- 1 | POPL 2025 Paper #103 Reviews and Comments 2 | =========================================================================== 3 | Paper #103 Semantics of pattern unification 4 | 5 | 6 | Review #103A 7 | =========================================================================== 8 | 9 | Overall merit 10 | ------------- 11 | 3. Weak accept 12 | 13 | Reviewer expertise 14 | ------------------ 15 | 2. Some familiarity 16 | 17 | Paper summary 18 | ------------- 19 | The goal of the paper is to develop a uniform semantics for a class of 20 | unification problems for a variety of different languages 21 | with metavariables. In particular, the paper focuses on pattern 22 | unification, that is, unification problems where metavariables are only 23 | applied to distinct object variables. 24 | 25 | To demonstrate its approach, the paper proceeds in two steps. The first 26 | step is an exposition of an algorithm (and its Agda implementation) for 27 | pattern unification for an untyped lambda calculus with metavariabless. 28 | The algorithm is organized carefully around a case analysis. Three cases 29 | stand out: (i) unification of two metavariable applications; (ii) 30 | unification of so called rigid terms (such as two lambdas or two apps); 31 | and (iii) pruning of rigid terms. The second step is the generalization 32 | of the algorithm beyond the setting of untyped lambda calculus. This 33 | involves abstracting over syntax with the use of an abstract specification 34 | for a language that the paper calls GB-signatures, and that consists of an 35 | abstraction of the rigid operations of the language, their arities (and 36 | renamings between them) and scopes. The paper describes these 37 | abstractions as category theory constructions. Given these 38 | constructions, the paper revisits the algorithm and shows that it can be 39 | phrased in an abstract way by casting the object-syntax-specific 40 | handling of the three cases above in category theory terms: case (i) 41 | amounts to the use of equalisers and pullbacks, case (ii) requires 42 | checking the equality of the names of rigid operations, and case (iii) 43 | involves inverse renaming. The third step is the proofs that the 44 | general algorithm is sound, terminating and complete. These proofs rely 45 | on a few conditions that entail properties of the GB-signatures such as 46 | that the category of arities has equalizers and pullbacks. When a 47 | language with metavariables corresponds to a GB-signature with these 48 | properties it is called pattern friendly. 49 | 50 | To demonstrate the applicability of pattern friendly languages, the paper 51 | presents a series of lambda calculi as use cases that fit its framework. 52 | 53 | Comments for authors 54 | -------------------- 55 | Even though I have just elementary knowledge of category theory, I enjoyed 56 | reading the paper and I believe I understand now unification in a deeper 57 | way than I did before. Despite a deep dive in category theory from the 58 | get-go, the paper is well-written. In particular, the use of the concrete 59 | untyped lambda calculus example to introduce the key ideas prior to the 60 | generalization was particularly helpful. Section 3, 4, 5, and 6 are pretty 61 | hard to penetrate for someone who is not well-versed ideas but I was still 62 | able to extract the key points. Section 7 helped a lot to understand how 63 | the general setting can be made concrete in different ways (for example 64 | how the arities category can encode a variety of information) 65 | 66 | That said, I have struggled to identify the novelty of the paper. The 67 | general idea of viewing unification through the lens of category theory 68 | and, in particular the role of equalizers and pullbacks, is not new --- 69 | the first time I encountered it was when studying the Rydeheard and 70 | Burstall text book as a grad student, which the paper also cites. 71 | Similarly, the organization of the algorithm for pattern unification 72 | around the presented case analysis is not new. For instance, the short 73 | Vezzosi and Abel paper that this paper cites has a similarly structured 74 | algorithm modulo \alpha, \beta and \eta equivalence, uses equalizers and 75 | pullbacks to express it in categorical terms, and points to the use of 76 | Indexed Containers as a way to abstract over the object-level details of 77 | the language operators. 78 | 79 | Based on the above, my conclusion is that the novelty of the paper is (i) the 80 | GB-signatures, the identification of the conditions for pattern-friendly 81 | languages and the demonstration of the applicability of this formulation 82 | in section 7; and (ii) the proof of correctness for pattern-friendly 83 | languages. 84 | 85 | I do not have the expertise to comment on whether the above 86 | is a sufficient contribution and/or requires overcoming significant 87 | technical hurdles. 88 | 89 | However, I think that independently of the magnitude of its contributions, 90 | the paper is missing two things: 91 | 92 | (i) A detailed discussion of limitations. There are some hints here and 93 | there about what the framework can handle or can not, but the paper should 94 | devote some space to explain what GB-signatures can and cannot do in 95 | detail and how hard it is to prove the conditions expected for pattern 96 | languages. 97 | 98 | (ii) A detailed discussion of motivation. While unification is undoubtedly 99 | important, it has been studied for decades, and there is a plethora of 100 | formulations and proofs of correctness in the literature for specific 101 | contexts. In particular, there is a lot of work that covers the 102 | applications of unification in PL and a lot of work that discusses how to 103 | correctly extend the unification capabilities of existing language. The paper should 104 | place its results in this context and explain why the proposed 105 | generalization is significant. A possible argument could be that it 106 | facilitates the correct extension of existing languages, but the paper 107 | should spell it out and argue that prior work does not cover this need 108 | already. 109 | 110 | 111 | 112 | A few typos 113 | =========== 114 | 115 | line 37: ``rule'' --> ``rules'' 116 | 117 | line 108: missing ``the'' before ``endofunctor'' 118 | 119 | line 114: ``consists in'' --> ``consists of'' 120 | 121 | line 92: the abbreviation mgu has already been introduced 122 | 123 | line 635: redundant ``be'' after ``must'' 124 | 125 | 126 | 127 | Review #103B 128 | =========================================================================== 129 | 130 | Overall merit 131 | ------------- 132 | 3. Weak accept 133 | 134 | Reviewer expertise 135 | ------------------ 136 | 3. Knowledgeable 137 | 138 | Paper summary 139 | ------------- 140 | Higher-order pattern unification (HOPU) is a subproblem of higher-order unification (HOU) discovered by Miller in which metavariables have to be applied to distinct bould object variables. It is decidable and has most general unifiers (unlike HOU in both respects). This paper proposes a generalization that abstracts over both the constant/function and variable/binding structure of the object language, and the notion of metavariable whose uses are constrained in a way that allows for the idea of the hhigher-order pattern unification algorithm to still work. The first two sections present an overview of the ideas and illustrate the approach using the specific case of ordinary HOPU and the proposed generalization (though with many details deferred till later), using Agda and on-paper inference rule style. Sections 3-6 present the formal details, identifying suitable categorical structures to abstract over binding signatures and the required associated operations to make such a signature "pattern-friendly", and then showing soundness, termination and completeness (which follows from the first two since soundness incorporates the most general unifier property). Section 7 presents further examples that are said to be instances of the framework, thouhg again at a high level, includign patterns where metavariables are applied to sets of variables, lambda calculus with types and with beta-normal eta-long forms, System F, and an ordered linear calculus with both unrestricted and ordered-linear variables. 141 | 142 | Comments for authors 143 | -------------------- 144 | Evaluation 145 | 146 | I think this paper presents a significant contribution, because traditionally higher-order pattern unification has seemed like a rather syntactically motivated special case of higher-order unification without a very strong semantic basis or justification, but it is so useful in practice (cf. Abella, various LF-based systems, etc.) that it is compelling to have a stronger justification for it aside from the fact that it seems to work well in practice as a special case of HOU. This paper defines a space of signatures of languages with a notion of pattern which can be instantiated in many ways. 147 | 148 | However some aspects of the presentation choices and detail sometimes get in the way of appreciating or understanding this work. For example, given that the formalization later in the paper focuses on the rule-based systems, I am not sure how much the Agda implementation adds - it is not a complete formalization even of properties like termination, although it does help to use dpeendent types to ensure some basic well-formedness checks. This is ultimately a stylistic choice and as a not very Agda-literate person I may not be representative of the subset of the POPL community of people likely to be interested in this kind of work. 149 | 150 | I also found myself wondering about relation to some work tha thas a similar feel (without the categorical semantics aspect) such as by Pottier & Poulliard (ICFP 2010/2011/JFP 2012) and perhaps Gundry et al.'s work on typ inference (which includes unification as a subproblem), see particularly Gundry's thesis which presents a reconstruction of Miller's higher order pattern unification (ch 4). I also wondered about whether mixed-prefix unification (Miller 1992) would be definable as an instance of this approach (whehter for first or higher order patterns). So, I feel the context and relationship of this work to prior work could probably be strengthened somewhat. 151 | 152 | I think the presentational issues may be possible to remediate as part of a revision but the current state of the paper requires quite a lot of effort (flipping back and forth and guessing missing details / giving benefit of the doubt). So I would be open to conditional acceptance if other reviewers that have apropriate expertise agree that the formal content of the paper is sound. 153 | 154 | Detailed comments 155 | 156 | In my review the markdown means: please insert **this**, please delete ~~that~~, please spellcheck _this_. 157 | 158 | 6: Lawvere theories are mentioned here but not in the rest of the paper, can it be explained what corresponds to Lawvere theories here (e.g. GB signatures/their initial algebras)? 159 | 160 | 9: missing "and" before "(intrinsic)" 161 | 162 | 29: the justmgent for unification here with $t,u$ doesn't seem to be used much later, instead $t=u $ is used. 163 | 164 | 41: The rule for FO does not make much sense to me. In first order terms, metavariables do not take argumenbts fo m should alkways be zero. It seems OK to allow multiple object variables (universal/rigid variables) so n could be greater than zero, and need not equal m. So if we change teh precondition to $m = 0$ and have no constraint on $M$ then the rule on line 71 can be instantiated in this way provided $\mathcal{A}$ is a category whose objects are natural numbers and zero is initial so that the $x \in hom_{\mathcal{A}}(0,n)$ needed in the rule for M(x) in line 71 is uniquely defined. 165 | 166 | 59: The notation $\mathcal{O}_n$ is not defined here nor until much later in the paper. 167 | 168 | 172: "_litterature_" 169 | 170 | 284: Reading figure 2 for section 2.1 I wondered what $-\{x\}$ notaion meant and had to read further to find this. Consider including the definition of $-\{x\}$ or its signature for self-containedness. 171 | 172 | Relatedly, though I see the bottom half of figure 2 mentions section 2.2, I do find it hard to read with this figure and figure 5 placed in the paper well before the beginning of section 2.2 and figures 6/7 which define the alternative versions of Tm and the other notations used in the generalized setting. 173 | 174 | 296: Figures 4 and 5 where these signatures are used are two pages away. 175 | 176 | 464: If unify-fles* is syntactically identucal in the generalized case, can it actually be defined once and reused in Agda (perhaps by abstracting over the Tm type and commonPositions/equalizerfunctions somehow?) If not, I would slightly prefer having the definition repeated since even if the code is the same, the types are different, and the current way the figures are organized means you have to read carefully to realize the version of prune in figure 5 is not called explicitly but instead called in the unify-flex-* function implicitly included form figure 4. 177 | 178 | 560: It might be helpful to illistrate example 2.5 with Agda code showing how the binding signature specification in figure 6 can be instantiated with - ideally we'd be able to see more concretely how the code in figures 2(top) and 4 falls out by instantiating figures 2(bottom) and 5 respectively that way, currently it is a little mysterious. 179 | 180 | 181 | 622: "then , thus" - extra space before comma 182 | 183 | 674: in $S = (\mathcal{A},O,\alpha)$ it seems that the $O$ should be $\mathcal{O}$ based on how things are written later. Also, this section does not make much sense to present before the definitions of GB-signature and pattern-friendly (needed for lemma 3.12) are given in section 3.2 (def 3.13/3.14). 184 | 185 | 744: Is $J$ in $\int J \to \mathcal{A}$ meant to be $\mathcal{O}$? If not, what is it? 186 | 187 | 755: It was easy to find out that finite connected limits exist if and only if all pullbacks and equalizers exist (which makes it clearer whyyou want this); perhaps mention this parenthetically? 188 | 189 | 775: "**In** the rest..." 190 | 191 | 804-810: it would be helpful to hoist the definition of $\mathcal{C}$ out of Lemam 3.21 and perhaps parameterize it explicitly on $S$, to make the statements of lemmas 3.22-3.23 self-contained. 192 | 193 | 805: "hold~~s~~" 194 | 195 | 827: Please insert words between pathematical statements siuch as between $\Gamma,$ and $x$ and between $\Gamma_1', $ and $\sigma$ to make it easier to split the sentence up. 196 | 197 | 860: The second $\mathcal{L}x$ on this line should be $\mathcal{L}y$. 198 | 199 | 863: $(X : N)$ should be $(X : n)$ I think. 200 | 201 | 881: "U-RIG **rule**." 202 | 203 | 971: "empty size" -> "size zero"? 204 | 205 | 990: the section on termination finishes without a statement of the termination theorem itself 206 | 207 | 1002: Again as a presentational suggestion, would it be worth considering providing this section earlier, and if possible fleshing it out with the definitions of the GB signatures and pattern friendly operations? They are described in the text but details for example of how we would accommodate sets as arguments to metavariables, or ordered linear bindings, might help make things more concrete earlier in the paper before the theory development. 208 | 209 | 1061: I think in the third column the $\Rightarrow should be a -->> (not sure of the latex macro) 210 | 211 | 1080: should this be a table (say table 2)? Later the system f definitions are said to be in table 1, which is on the previous page. 212 | 213 | Also, am I right in believing that since the system F syntax is not up to any equational theory / normal form structure, the instantiation as a GB-signature is largely straightforward? The only unusual aspect is the need to do bookkeeping for two contexts, i.e. A is a product category? 214 | 215 | 1100: In unifying $M(\vec{x}$ and $M(\vec{y})$ in a typed setting, do we need to check the matching xs and ys have the same types (or are we relying on the type system to have checked this already I guess)? 216 | 217 | 1150: "denote**s**" 218 | 219 | 220 | References: 221 | 222 | Nicolas Pouillard, François Pottier: 223 | A unified treatment of syntax with binders. J. Funct. Program. 22(4-5): 614-704 (2012) 224 | 225 | Nicolas Pouillard, François Pottier: 226 | A fresh look at programming with names and binders. ICFP 2010: 217-228 227 | 228 | Nicolas Pouillard: 229 | Nameless, painless. ICFP 2011: 320-332 230 | 231 | Adam Michael Gundry: 232 | Type inference, Haskell and dependent types. University of Strathclyde, Glasgow, UK, 2013 233 | 234 | Adam Gundry, Conor McBride, James McKinna: 235 | Type Inference in Context. MSFP@ICFP 2010: 43-54 236 | 237 | 238 | 239 | Review #103C 240 | =========================================================================== 241 | 242 | Overall merit 243 | ------------- 244 | 2. Weak reject 245 | 246 | Reviewer expertise 247 | ------------------ 248 | 3. Knowledgeable 249 | 250 | Paper summary 251 | ------------- 252 | The paper generalizes Miller's unification on higher-order patterns by taking a 253 | categorical perspective. The gained generality is exemplified with an ordered 254 | linear lambda calculus and polymorphism. Other extensions, such as dependent 255 | types, can not be captured with the present approach. The paper also presents 256 | an implementation of the more general algorithm in Agda, where Agda is used as a 257 | programming language (rather than a theorem prover). Nevertheless, dependent 258 | types are used to capture some of the invariants of the representation and 259 | algorithm. 260 | 261 | Comments for authors 262 | -------------------- 263 | The generalization is nontrivial and (as far as I can tell with my limited 264 | categorical background) done well. However, the motivation remains unclear, 265 | especially in comparison with the (unpublished) [31] which also provides a 266 | categorical approach to pattern unification. The generalization to the ordered 267 | language makes sense, but is hardly surprising as versions for substructural 268 | pattern unification have previously been devised using syntactic method 269 | (see, for example, Schack-Nielsen and Schurmann [LFMTP 2010]). 270 | 271 | For the polymorphic case, it is probably worth mentioning [Pfenning, LICS 1991] 272 | which works for the Calculus of Constructions. One question I have here: 273 | eta-long forms are not stable under type substitution, so respecting eta 274 | equality requires some care. From the rather abbreviated presentation in 275 | section 7.5 I couldn't tell whether and how this problem is handled. -------------------------------------------------------------------------------- /quiver.sty: -------------------------------------------------------------------------------- 1 | % *** quiver *** 2 | % A package for drawing commutative diagrams exported from https://q.uiver.app. 3 | % 4 | % This package is currently a wrapper around the `tikz-cd` package, importing necessary TikZ 5 | % libraries, and defining a new TikZ style for curves of a fixed height. 6 | % 7 | % Version: 1.2.2 8 | % Authors: 9 | % - varkor (https://github.com/varkor) 10 | % - AndréC (https://tex.stackexchange.com/users/138900/andr%C3%A9c) 11 | 12 | \NeedsTeXFormat{LaTeX2e} 13 | \ProvidesPackage{quiver}[2021/01/11 quiver] 14 | 15 | % `tikz-cd` is necessary to draw commutative diagrams. 16 | \RequirePackage{tikz-cd} 17 | % `amssymb` is necessary for `\lrcorner` and `\ulcorner`. 18 | %\RequirePackage{amssymb} 19 | % `calc` is necessary to draw curved arrows. 20 | \usetikzlibrary{calc} 21 | % `pathmorphing` is necessary to draw squiggly arrows. 22 | \usetikzlibrary{decorations.pathmorphing} 23 | 24 | % A TikZ style for curved arrows of a fixed height, due to AndréC. 25 | \tikzset{curve/.style={settings={#1},to path={(\tikztostart) 26 | .. controls ($(\tikztostart)!\pv{pos}!(\tikztotarget)!\pv{height}!270:(\tikztotarget)$) 27 | and ($(\tikztostart)!1-\pv{pos}!(\tikztotarget)!\pv{height}!270:(\tikztotarget)$) 28 | .. (\tikztotarget)\tikztonodes}}, 29 | settings/.code={\tikzset{quiver/.cd,#1} 30 | \def\pv##1{\pgfkeysvalueof{/tikz/quiver/##1}}}, 31 | quiver/.cd,pos/.initial=0.35,height/.initial=0} 32 | 33 | % TikZ arrowhead/tail styles. 34 | \tikzset{tail reversed/.code={\pgfsetarrowsstart{tikzcd to}}} 35 | \tikzset{2tail/.code={\pgfsetarrowsstart{Implies[reversed]}}} 36 | \tikzset{2tail reversed/.code={\pgfsetarrowsstart{Implies}}} 37 | % TikZ arrow styles. 38 | \tikzset{no body/.style={/tikz/dash pattern=on 0 off 1mm}} 39 | 40 | \endinput 41 | -------------------------------------------------------------------------------- /refstyle.sty: -------------------------------------------------------------------------------- 1 | % Ambroise: suggested by chatgpt, otherwise it conflicts with jfp document class 2 | \let\figlabel\relax 3 | %% 4 | %% This is file `refstyle.sty', 5 | %% generated with the docstrip utility. 6 | %% 7 | %% The original source files were: 8 | %% 9 | %% refstyle.dtx (with options: `pkg') 10 | %% 11 | %% Copyright (C) 2002--2024 Danie Els 12 | %% 13 | %% ------------------------------------------------------------------- 14 | %% The refstyle package 15 | %% for the formatting of references 16 | %% ------------------------------------------------------------------- 17 | %% This work may be distributed and/or modified under the conditions 18 | %% of the LaTeX Project Public License, either version 1.3c of this 19 | %% license or (at your option) any later version. The latest version 20 | %% of this license is in 21 | %% http://www.latex-project.org/lppl.txt 22 | %% and version 1.3c or later is part of all distributions of LaTeX 23 | %% version 2008/12/01 or later. 24 | %% 25 | %% This work has the LPPL maintenance status 'maintained'. 26 | %% 27 | %% This Current Maintainer of this work is Danie Els (dnjels@gmail.com) 28 | %% ------------------------------------------------------------------- 29 | %% 30 | \newcommand*{\RefstyleFileDate}{2024/02/01} 31 | \newcommand*{\RefstyleFileVersion}{v0.6b} 32 | 33 | \NeedsTeXFormat{LaTeX2e} 34 | \ProvidesPackage{refstyle}[\RefstyleFileDate\space 35 | \RefstyleFileVersion\space 36 | Reference formatting (DNJ Els)] 37 | \newcommand*{\RS@pkgname}{refstyle} 38 | \RequirePackage{keyval} 39 | \def\RS@setkeys#1#2{% 40 | \def\KV@prefix{KV@#1@}% 41 | \let\@tempc\relax 42 | \KV@do#2,\relax,} 43 | \@ifundefined{vref@space}{\let\vref@space\space}{} 44 | \providecommand*{\@safe@activestrue}{} 45 | \providecommand*{\@safe@activesfalse}{} 46 | \newcommand*{\RS@namelet}[1]{\expandafter\let\csname #1\endcsname} 47 | \newcommand*{\RS@nameuse}[1]{\csname #1\endcsname} 48 | \newcommand*{\RS@namedef}[1]{\expandafter\def\csname #1\endcsname} 49 | \newcommand*{\RS@robustnamedef}[1]{% 50 | \expandafter\edef\csname #1\endcsname{% 51 | \noexpand\protect\RS@nameuse{#1 }}% 52 | \RS@namedef{#1 }} 53 | \def\RS@ifundefined#1{% 54 | \begingroup\expandafter\expandafter\expandafter\endgroup 55 | \expandafter\ifx\csname#1\endcsname\relax 56 | \expandafter\@firstoftwo 57 | \else 58 | \expandafter\@secondoftwo 59 | \fi} 60 | \newcommand*{\RS@removedef}[1]{% 61 | \RS@namelet{#1}\@undefined% 62 | \RS@ifundefined{#1 }{}{\RS@namelet{#1 }\@undefined}} 63 | \newcommand*{\RS@testednamedef}[1]{% 64 | \RS@ifnamedefinable{#1}\RS@namedef{#1}} 65 | \newcommand*{\RS@testedrobustnamedef}[1]{% 66 | \RS@ifnamedefinable{#1}\RS@robustnamedef{#1}} 67 | \long\def\RS@ifnamedefinable #1{% 68 | \edef\reserved@a{#1}% 69 | \RS@ifundefined\reserved@a 70 | {\edef\reserved@b{\expandafter\@carcube \reserved@a xxx\@nil}% 71 | \ifx \reserved@b\@qend \RS@notdefinable\else 72 | \ifx \reserved@a\@qrelax \RS@notdefinable\else 73 | \PackageInfo{\RS@pkgname}{\@backslashchar\reserved@a\space created}% 74 | \fi 75 | \fi}% 76 | \RS@notdefinable} 77 | \gdef\RS@notdefinable{% 78 | \PackageError{\RS@pkgname}{% 79 | Command \@backslashchar\reserved@a\space 80 | already defined.\MessageBreak 81 | Or name \@backslashchar\@qend... illegal.\MessageBreak 82 | It can not be redefined by the \@backslashchar newref% 83 | }{% 84 | If \@backslashchar\reserved@a\space is not important\MessageBreak 85 | then \protect\let\@backslashchar\reserved@a% 86 | =\protect\relax,\MessageBreak 87 | else use a different \@backslashchar newref.}% 88 | } 89 | \newcommand*{\RS@setbool}[2]{% 90 | \lowercase{\def\@tempa{#2}}% 91 | \@ifundefined{@tempswa\@tempa}% 92 | {\PackageError{\RS@pkgname}% 93 | {You can only set the option to `true' or `false'}\@ehc}% 94 | {\csname#1\@tempa\endcsname}} 95 | \def\RS@firstcap#1#2\@nil{% 96 | \iffalse{\fi 97 | \uppercase{\edef\RS@cap{\iffalse}\fi#1}#2}}% 98 | \newif\ifRS@keyactive 99 | \@ifpackagewith{\@currname}{nokeyprefix}% 100 | {\RS@keyactivefalse}% 101 | {\RS@keyactivetrue} 102 | \DeclareOption{nokeyprefix}{\OptionNotUsed} 103 | \newif\ifRSstar\RSstarfalse 104 | \newif\ifRSnameon\RSnameontrue 105 | \newif\ifRScapname\RScapnamefalse 106 | \newif\ifRSplural\RSpluralfalse 107 | \newif\ifRSlsttwo\RSlsttwofalse 108 | \newcommand*{\newref}[1]{% 109 | \lowercase{\def\RS@tempa{#1}}% 110 | \expandafter\RS@newref\expandafter{\RS@tempa}} 111 | \newcommand*{\RS@newref}[2]{% 112 | \RS@clearref{#1}% 113 | \ifRS@keyactive 114 | \define@key{RS@#1}{key}[#1:]{\RS@namedef{RS@#1@key}{##1}}% 115 | \else 116 | \define@key{RS@#1}{key}[]{\RS@namedef{RS@#1@key}{##1}}% 117 | \fi 118 | \define@key{RS@#1}{s}[true]{\RS@setbool{RSplural}{##1}}% 119 | \define@key{RS@#1}{name}[]{\RS@namedef{RS@#1@name}{##1}}% 120 | \define@key{RS@#1}{names}[]{\RS@namedef{RS@#1@names}{##1}}% 121 | \define@key{RS@#1}{Name}[]{\RS@namedef{RS@#1@Name}{##1}}% 122 | \define@key{RS@#1}{Names}[]{\RS@namedef{RS@#1@Names}{##1}}% 123 | \define@key{RS@#1}{rngtxt}[\space to~]{\RS@namedef{RS@#1@rngtxt}{##1}}% 124 | \define@key{RS@#1}{lsttwotxt}[\space and~]{\RS@namedef{RS@#1@lsttwotxt}{##1}}% 125 | \define@key{RS@#1}{lsttxt}[\space and~]{\RS@namedef{RS@#1@lsttxt}{##1}}% 126 | \define@key{RS@#1}{refcmd}[\ref{####1}]{\RS@namedef{RS@#1@rcmd}####1{##1}}% 127 | \define@key{RS@#1}{xr}[]{\RS@namedef{RS@#1@xr}{##1}}% 128 | \define@key{RS@#1}{vref}[true]{\RS@namedef{RS@#1vref}{##1}}% 129 | \RS@setkeys{RS@#1}{key, 130 | s=false, 131 | name,names,Name,Names, 132 | rngtxt,lsttwotxt,lsttxt, 133 | refcmd, 134 | xr, 135 | vref=false}% 136 | \RS@setkeys{RS@#1}{#2}% 137 | \RS@buildref{#1}% 138 | } 139 | \newcommand*{\RS@clearref}[1]{% 140 | \RS@ifundefined{RS@#1@template} 141 | {\RS@namedef{RS@#1@template}{#1}% 142 | \PackageInfo{\RS@pkgname}% 143 | {New reference template \protect\newref{#1}}{}} 144 | {\PackageInfo{\RS@pkgname}% 145 | {Reference template \protect\newref{#1} redefined}{} 146 | \RS@firstcap#1\@nil 147 | \RS@removedef{#1key}% 148 | \RS@removedef{#1label}% 149 | \RS@removedef{#1ref}% 150 | \RS@removedef{\RS@cap ref}% 151 | \RS@removedef{#1rangeref}% 152 | \RS@removedef{\RS@cap rangeref}% 153 | \RS@removedef{#1pageref}% 154 | }% 155 | } 156 | \newcommand*{\RS@buildref}[1]{% 157 | \RS@firstcap#1\@nil 158 | \RS@testednamedef{#1key}{\RS@nameuse{RS@#1@key}} 159 | \RS@testednamedef{#1label}##1{\label{\RS@nameuse{RS@#1@key}##1}} 160 | \RS@testedrobustnamedef{#1ref}{\RScapnamefalse\RS@cmd{ref}{#1}} 161 | \RS@testedrobustnamedef{\RS@cap ref}{\RScapnametrue\RS@cmd{ref}{#1}} 162 | \RS@testedrobustnamedef{#1rangeref}{\RScapnamefalse\RS@cmd{rangeref}{#1}} 163 | \RS@testedrobustnamedef{\RS@cap rangeref}{\RScapnametrue\RS@cmd{rangeref}{#1}} 164 | \RS@testedrobustnamedef{#1pageref}{\RScapnamefalse\RS@cmd{pageref}{#1}} 165 | } 166 | \newcommand*{\RS@cmd}[2]{% 167 | \@ifstar{\RSstartrue\RSnameonfalse\RS@@cmd{#1}{#2}}% 168 | {\RSstarfalse\RSnameontrue\RS@@cmd{#1}{#2}}} 169 | \newcommand*{\RS@@cmd}[2]{% 170 | \@ifnextchar[% 171 | {\RS@nameuse{RS@#1}{#2}}% 172 | {\RS@nameuse{RS@#1}{#2}[]}} 173 | \def\RS@ref#1[#2]#3{% 174 | \begingroup 175 | \RS@setkeys{RS@#1}{#2}% 176 | \@safe@activestrue% 177 | \edef\RS@tmpa{\zap@space#3 \@empty}% 178 | \@safe@activesfalse% 179 | \edef\RS@tmpa{\noexpand\RS@@ref{#1} \RS@tmpa,\relax\noexpand\@eolst}% 180 | \RS@tmpa% 181 | \endgroup} 182 | \def\RS@@ref#1 #2,#3\@eolst{% 183 | \ifx\relax#3\relax 184 | \RS@makename{#1}% 185 | \RS@makeref{#1}{#2}% 186 | \RS@makevpageref{#1}{#2}% 187 | \else 188 | \RSpluraltrue% 189 | \RS@makename{#1}% 190 | \RS@makeref{#1}{#2}% 191 | \RS@makevpageref{#1}{#2}% 192 | \RSnameonfalse% 193 | \RSlsttwotrue% 194 | \RS@@@ref{#1} #3\@eolst% 195 | \fi} 196 | \def\RS@@@ref#1 #2,#3\@eolst{% 197 | \ifx\relax#3\relax 198 | \ifRSlsttwo 199 | \RS@nameuse{RS@#1@lsttwotxt}% 200 | \else 201 | \RS@nameuse{RS@#1@lsttxt}% 202 | \fi 203 | \RS@makeref{#1}{#2}% 204 | \RS@makevpageref{#1}{#2}% 205 | \else 206 | \RSlsttwofalse% 207 | \unskip,\space% 208 | \RS@makeref{#1}{#2}% 209 | \RS@makevpageref{#1}{#2}% 210 | \RS@@@ref{#1} #3\@eolst% 211 | \fi} 212 | \def\RS@rangeref#1[#2]#3#4{% 213 | \begingroup 214 | \RS@setkeys{RS@#1}{#2}% 215 | \RSpluraltrue% 216 | \RS@makename{#1}% 217 | \RS@makeref{#1}{#3}% 218 | \RS@nameuse{RS@#1@rngtxt}% 219 | \RSnameonfalse% 220 | \RS@makeref{#1}{#4}% 221 | \RS@makevpagerefrange{#1}{#3}{#4}% 222 | \endgroup} 223 | \def\RS@pageref#1[#2]#3{% 224 | \begingroup% 225 | \RS@setkeys{RS@#1}{#2}% 226 | \RS@ifvref{#1}% 227 | {\mbox{}\vpageref*{\RS@lbl{#1}{#3}}}% 228 | {\reftextfaraway{\RS@lbl{#1}{#3}}}% 229 | {\pageref{\RS@lbl{#1}{#3}}}% 230 | \endgroup} 231 | \newcommand*{\RS@true}{true} 232 | \newcommand*{\RS@false}{false} 233 | \newcommand*{\RS@far}{far} 234 | \newcommand{\RS@ifvref}[4]{% 235 | \edef\RS@tempa{\RS@nameuse{RS@#1vref}}% 236 | \ifx\RS@tempa\RS@true\relax 237 | #2% 238 | \else\ifx\RS@tempa\RS@far\relax 239 | #3% 240 | \else\ifx\RS@tempa\RS@false\relax 241 | #4% 242 | \else 243 | \PackageError{\RS@pkgname}% 244 | {You can only set the vref option to `true', `far' or `false'}\@ehc 245 | \fi\fi\fi} 246 | \newcommand{\RS@makename}[1]{% 247 | \ifRSstar\else\ifRSnameon 248 | \ifRSplural 249 | \ifRScapname 250 | \RS@nameuse{RS@#1@Names}% 251 | \else 252 | \RS@nameuse{RS@#1@names}% 253 | \fi 254 | \else 255 | \ifRScapname 256 | \RS@nameuse{RS@#1@Name}% 257 | \else 258 | \RS@nameuse{RS@#1@name}% 259 | \fi 260 | \fi 261 | \fi\fi 262 | } 263 | \newcommand*{\RS@lbl}[2]{% 264 | \RS@nameuse{RS@#1@xr}\RS@nameuse{RS@#1@key}#2% 265 | } 266 | \newcommand{\RS@makeref}[2]{% 267 | \RS@nameuse{RS@#1@rcmd}{\RS@lbl{#1}{#2}}% 268 | } 269 | \newcommand{\RS@makevpageref}[2]{% 270 | \RS@ifvref{#1}% 271 | {\vpageref[\unskip]{\RS@lbl{#1}{#2}}}% 272 | { \reftextfaraway{\RS@lbl{#1}{#2}}}% 273 | {}% 274 | } 275 | \newcommand{\RS@makevpagerefrange}[3]{% 276 | \RS@ifvref{#1}% 277 | {\space\vpagerefrange[\unskip]{\RS@lbl{#1}{#2}}{\RS@lbl{#1}{#3}}}% 278 | {\space\vpagerefrange[\unskip]{\RS@lbl{#1}{#2}}{\RS@lbl{#1}{#3}}}% 279 | {}% 280 | } 281 | \AtBeginDocument{% 282 | \providecommand{\vpageref}{% 283 | \PackageError{\RS@pkgname}% 284 | {The vref option used, but varioref.sty not loaded.}% 285 | {Load varioref.sty}} 286 | \providecommand{\reftextfaraway}{% 287 | \PackageError{\RS@pkgname}% 288 | {The vref=far option used, but varioref.sty not loaded.}% 289 | {Load varioref.sty}} 290 | \providecommand{\vpagerefrange}{% 291 | \PackageError{\RS@pkgname}% 292 | {The vref option used, but varioref.sty not loaded.}% 293 | {Load varioref.sty}} 294 | } 295 | \def\RSaddto#1#2{% 296 | #2% 297 | \@temptokena{#2}% 298 | \ifx#1\relax 299 | \let#1\@empty 300 | \fi 301 | \ifx#1\undefined 302 | \edef#1{\the\@temptokena}% 303 | \else 304 | \toks@\expandafter{#1}% 305 | \edef#1{\the\toks@\the\@temptokena}% 306 | \fi 307 | \@temptokena{}\toks@\@temptokena} 308 | \def\DeclareLangOpt#1#2{% 309 | \edef\RS@tempa{\expandafter\@gobble\string#2}% 310 | \RS@ifundefined{\RS@tempa}% 311 | {\PackageError{\RS@pkgname}{% 312 | Unknown definitions \@backslashchar\RS@tempa\MessageBreak 313 | for language option `#1'}{}}% 314 | {\DeclareOption{#1}{% 315 | \AtBeginDocument{\expandafter\RSaddto\csname extras#1\endcsname #2}}}% 316 | } 317 | \newcommand*{\RS@cfgfile}{refstyle.cfg} 318 | \newcommand*{\RS@reffile}{refstyle.def} 319 | \@ifpackagewith{\@currname}{noconfig}% 320 | {\PackageInfo{\RS@pkgname}{No config file loaded}}% 321 | {\InputIfFileExists{\RS@reffile}% 322 | {\PackageInfo{\RS@pkgname}{Local config file \RS@reffile\space used}} 323 | {\InputIfFileExists{\RS@cfgfile}% 324 | {\PackageInfo{\RS@pkgname}{Config file \RS@cfgfile\space used}}% 325 | {\PackageInfo{\RS@pkgname}{No config file found}}}} 326 | \DeclareOption{noconfig}{\OptionNotUsed}% 327 | \ProcessOptions*\relax 328 | \endinput 329 | %% 330 | %% End of file `refstyle.sty'. 331 | -------------------------------------------------------------------------------- /short.lyx: -------------------------------------------------------------------------------- 1 | #LyX 2.3 created this file. For more info see http://www.lyx.org/ 2 | \lyxformat 544 3 | \begin_document 4 | \begin_header 5 | \save_transient_properties true 6 | \origin unavailable 7 | \textclass acmart 8 | \begin_preamble 9 | \input{common-preamble} 10 | %\newenvironment{remark}{\begin{rem}}{\end{rem}} 11 | %\newenvironment{lemma}{\begin{lem}}{\end{lem}} 12 | %\newenvironment{proposition}{\begin{prop}}{\end{prop}} 13 | %\newenvironment{corollary}{\begin{cor}}{\end{cor}} 14 | %\newenvironment{definition}{\begin{defn}}{\end{defn}} 15 | %\newenvironment{theorem}{\begin{thm}}{\end{thm}} 16 | \newcommand{\theproposition}{\thethm} 17 | 18 | \setboolean{fullpaper}{false} 19 | 20 | \newcommand{\startappendix}{\appendix} 21 | %\newcommand{\startappendix}{\appendices} 22 | %\newcommand{\secappendix}[1]{\appendix[#1]} 23 | \newcommand{\secappendix}[1]{\section{#1}} 24 | 25 | 26 | % LICS 27 | % \newcommand{\keywords}[1]{\begin{IEEEkeywords}#1\end{IEEEkeywords}} 28 | \end_preamble 29 | \options acmsmall,review,screen 30 | \use_default_options false 31 | \begin_modules 32 | enumitem 33 | \end_modules 34 | \maintain_unincluded_children false 35 | \begin_local_layout 36 | #\DeclareLyXModule{Other} 37 | #DescriptionBegin 38 | #Defines Notation 39 | #DescriptionEnd 40 | #Category: theorems 41 | 42 | Format 66 43 | 44 | Requires amsmath 45 | 46 | # acmart document class includes amssymb 47 | Provides amssymb 1 48 | 49 | # The environments defined (regular and starred) are : 50 | # - Property 51 | 52 | Style Notation 53 | CopyStyle Theorem 54 | DependsOn Theorem 55 | LatexName notation 56 | LabelString "Notation \thethm." 57 | Preamble 58 | \AtEndPreamble{% 59 | \theoremstyle{acmdefinition} 60 | \newtheorem{notation}[theorem]{Notation}} 61 | EndPreamble 62 | End 63 | 64 | Style Remark 65 | CopyStyle Theorem 66 | DependsOn Theorem 67 | LatexName remark 68 | LabelString "Remark \thethm." 69 | Preamble 70 | \AtEndPreamble{% 71 | \theoremstyle{acmdefinition} 72 | \newtheorem{remark}[theorem]{Remark}} 73 | EndPreamble 74 | End 75 | 76 | Style Property 77 | CopyStyle Theorem 78 | DependsOn Theorem 79 | LatexName property 80 | LabelString "Property \thethm." 81 | Preamble 82 | \AtEndPreamble{% 83 | \theoremstyle{acmdefinition} 84 | \newtheorem{property}[theorem]{Property}} 85 | EndPreamble 86 | End 87 | 88 | 89 | Style "Personal Question" 90 | CopyStyle Theorem 91 | DependsOn Theorem 92 | LatexName personalquestion 93 | LabelString "Personal Question \thethm." 94 | Preamble 95 | %\newenvironment{personalquestion}{\shortfull{\comment}{\begin{question}\color{red}(personal)}}{\shortfull{\endcomment}{\end{question}}} 96 | \newenvironment{personalquestion}{\comment}{\endcomment} 97 | EndPreamble 98 | End 99 | 100 | 101 | Style "Long Proof" 102 | CopyStyle Proof 103 | DependsOn Proof 104 | LatexName longproof 105 | LabelString "Long proof." 106 | Preamble 107 | \newenvironment{longproof}{\shortfull{\comment}{\begin{proof}}}{\shortfull{\endcomment}{\end{proof}}} 108 | EndPreamble 109 | End 110 | 111 | Style "Appendix" 112 | CopyStyle Section 113 | DependsOn Section 114 | LatexName secappendix 115 | LabelString "Appendix" 116 | Preamble 117 | EndPreamble 118 | End 119 | \end_local_layout 120 | \language british 121 | \language_package none 122 | \inputencoding auto 123 | \fontencoding global 124 | \font_roman "default" "default" 125 | \font_sans "default" "default" 126 | \font_typewriter "default" "default" 127 | \font_math "auto" "auto" 128 | \font_default_family default 129 | \use_non_tex_fonts false 130 | \font_sc false 131 | \font_osf false 132 | \font_sf_scale 100 100 133 | \font_tt_scale 100 100 134 | \use_microtype false 135 | \use_dash_ligatures false 136 | \graphics default 137 | \default_output_format default 138 | \output_sync 0 139 | \bibtex_command bibtex 140 | \index_command default 141 | \float_placement tbh 142 | \paperfontsize default 143 | \spacing single 144 | \use_hyperref true 145 | \pdf_title "Your Title" 146 | \pdf_author "Your Name" 147 | \pdf_bookmarks true 148 | \pdf_bookmarksnumbered true 149 | \pdf_bookmarksopen true 150 | \pdf_bookmarksopenlevel 1 151 | \pdf_breaklinks false 152 | \pdf_pdfborder true 153 | \pdf_colorlinks false 154 | \pdf_backref false 155 | \pdf_pdfusetitle false 156 | \pdf_quoted_options "pdfpagelayout=OneColumn, pdfnewwindow=true, pdfstartview=XYZ, plainpages=false" 157 | \papersize default 158 | \use_geometry true 159 | \use_package amsmath 1 160 | \use_package amssymb 1 161 | \use_package cancel 1 162 | \use_package esint 1 163 | \use_package mathdots 1 164 | \use_package mathtools 1 165 | \use_package mhchem 1 166 | \use_package stackrel 1 167 | \use_package stmaryrd 1 168 | \use_package undertilde 1 169 | \cite_engine natbib 170 | \cite_engine_type authoryear 171 | \biblio_style ACM-Reference-Format 172 | \use_bibtopic false 173 | \use_indices false 174 | \paperorientation portrait 175 | \suppress_date false 176 | \justification true 177 | \use_refstyle 0 178 | \use_minted 0 179 | \index Index 180 | \shortcut idx 181 | \color #008000 182 | \end_index 183 | \secnumdepth 3 184 | \tocdepth 3 185 | \paragraph_separation indent 186 | \paragraph_indentation default 187 | \is_math_indent 0 188 | \math_numbering_side default 189 | \quotes_style british 190 | \dynamic_quotes 0 191 | \papercolumns 1 192 | \papersides 1 193 | \paperpagestyle default 194 | \tracking_changes false 195 | \output_changes false 196 | \html_math_output 0 197 | \html_css_as_file 0 198 | \html_be_strict false 199 | \end_header 200 | 201 | \begin_body 202 | 203 | \begin_layout Standard 204 | \begin_inset CommandInset include 205 | LatexCommand input 206 | filename "core.lyx" 207 | 208 | \end_inset 209 | 210 | 211 | \end_layout 212 | 213 | \end_body 214 | \end_document 215 | -------------------------------------------------------------------------------- /short.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/amblafont/unification/7d658c077f2582d018806136dc956b35632454dd/short.pdf -------------------------------------------------------------------------------- /slides.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/amblafont/unification/7d658c077f2582d018806136dc956b35632454dd/slides.pdf -------------------------------------------------------------------------------- /unification.agda-lib: -------------------------------------------------------------------------------- 1 | name: unification 2 | depend: 3 | standard-library 4 | include: . 5 | -------------------------------------------------------------------------------- /unifier-v12.json: -------------------------------------------------------------------------------- 1 | {"graph":{"edges":[{"from":0,"id":6,"label":{"label":"d","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":4},{"from":4,"id":7,"label":{"label":"\\eta^{(\\vec{n})}","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":1},{"from":1,"id":8,"label":{"label":"o","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":2},{"from":2,"id":9,"label":{"label":"g'^*","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":3},{"from":0,"id":10,"label":{"label":"\\delta","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":2},{"from":0,"id":11,"label":{"label":"d","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":4},{"from":4,"id":12,"label":{"label":"g'^{(\\vec{n})}","style":{"alignment":"right","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":5},{"from":5,"id":13,"label":{"label":"o","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":3},{"from":1,"id":14,"label":{"label":"g'^{*(\\vec{n})}","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":5},{"from":5,"id":15,"label":{"label":"o","style":{"alignment":"left","bend":0,"dashed":false,"double":false,"head":"default","position":0.5,"tail":"none"}},"to":3}],"nodes":[{"id":0,"label":{"label":"ym","pos":[300,300]}},{"id":1,"label":{"label":"T(y(m+\\vec{n}))^{(\\vec{n})}","pos":[500,500]}},{"id":2,"label":{"label":"T(y(m+\\vec{n}))","pos":[700,300]}},{"id":3,"label":{"label":"T(\\Delta)","pos":[700,700]}},{"id":4,"label":{"label":"y(m+\\vec{n})^{(\\vec{n})}","pos":[300,500]}},{"id":5,"label":{"label":"T(\\Delta)^{(\\vec{n})}","pos":[300,700]}}],"sizeGrid":200},"version":3} --------------------------------------------------------------------------------