├── smol ├── test.mli ├── level.mli ├── level.ml ├── index.mli ├── dune ├── index.ml ├── HACKING.md ├── stree.mli ├── stree.ml ├── test.ml └── styper.ml ├── syntax ├── test.ml ├── clexer.mli ├── dune ├── ctree.mli ├── ctree.ml ├── clexer.ml └── cparser.mly ├── teika ├── test.mli ├── typer.mli ├── tprinter.mli ├── solve.mli ├── dune ├── terror.mli ├── terror.ml ├── ttree.mli ├── ttree.ml ├── tprinter.ml ├── solve.ml ├── test.ml └── typer.ml ├── .envrc ├── teikalsp ├── teikalsp.mli ├── dune ├── lsp_request.mli ├── lsp_text_document.mli ├── lsp_text_document.ml ├── lsp_notification.mli ├── lsp_context.mli ├── lsp_channel.mli ├── lsp_error.mli ├── lsp_error.ml ├── lsp_request.ml ├── lsp_context.ml ├── lsp_notification.ml ├── teikalsp.ml └── lsp_channel.ml ├── .ocamlformat ├── .gitignore ├── dune-project ├── jsend ├── emit.mli ├── untype.mli ├── jprinter.mli ├── var.mli ├── utree.ml ├── dune ├── jtree.ml ├── utree.mli ├── jtree.mli ├── var.ml ├── test.ml ├── jprinter.ml ├── emit.ml └── untype.ml ├── utils ├── dune ├── utils.mli └── utils.ml ├── README.md ├── nix ├── shell.nix └── default.nix ├── flake.nix ├── design ├── MODULE.md ├── GOALS.md ├── INFERENCE.md ├── SYNTAX.md └── LANGUAGE.md ├── LICENSE ├── teikavsc ├── main.ts ├── package.json └── tsconfig.json └── flake.lock /smol/test.mli: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /syntax/test.ml: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /teika/test.mli: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.envrc: -------------------------------------------------------------------------------- 1 | use_flake 2 | -------------------------------------------------------------------------------- /teikalsp/teikalsp.mli: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.ocamlformat: -------------------------------------------------------------------------------- 1 | version = unknown 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | _build 2 | node_modules 3 | .direnv 4 | dist 5 | -------------------------------------------------------------------------------- /dune-project: -------------------------------------------------------------------------------- 1 | (lang dune 2.9) 2 | 3 | (using menhir 2.0) 4 | -------------------------------------------------------------------------------- /jsend/emit.mli: -------------------------------------------------------------------------------- 1 | val emit_term : Utree.term -> Jtree.expression 2 | -------------------------------------------------------------------------------- /jsend/untype.mli: -------------------------------------------------------------------------------- 1 | open Teika 2 | 3 | val untype_term : Ttree.term -> Utree.term 4 | -------------------------------------------------------------------------------- /jsend/jprinter.mli: -------------------------------------------------------------------------------- 1 | val pp_expression : Format.formatter -> Jtree.expression -> unit 2 | 3 | -------------------------------------------------------------------------------- /teika/typer.mli: -------------------------------------------------------------------------------- 1 | open Ttree 2 | 3 | type value 4 | 5 | val infer_term : term -> (value, exn) result 6 | -------------------------------------------------------------------------------- /utils/dune: -------------------------------------------------------------------------------- 1 | (library 2 | (name utils) 3 | (preprocess 4 | (pps ppx_deriving.eq ppx_deriving.ord ppx_deriving.eq ppx_deriving.show))) 5 | -------------------------------------------------------------------------------- /teikalsp/dune: -------------------------------------------------------------------------------- 1 | (executable 2 | (name teikalsp) 3 | (libraries lsp eio eio_main) 4 | (preprocess 5 | (pps ppx_deriving.show ppx_deriving.eq ppx_deriving.ord sedlex.ppx))) 6 | -------------------------------------------------------------------------------- /teikalsp/lsp_request.mli: -------------------------------------------------------------------------------- 1 | open Lsp.Types 2 | 3 | module Server_life_cycle : sig 4 | val initialize : 5 | Lsp_context.t -> params:InitializeParams.t -> InitializeResult.t 6 | end 7 | -------------------------------------------------------------------------------- /teikalsp/lsp_text_document.mli: -------------------------------------------------------------------------------- 1 | type document 2 | type t = document 3 | 4 | val teika : version:int -> text:string -> document 5 | val with_change : version:int -> text:string -> document -> document 6 | -------------------------------------------------------------------------------- /teika/tprinter.mli: -------------------------------------------------------------------------------- 1 | open Format 2 | open Ttree 3 | open Terror 4 | 5 | val pp_term : formatter -> term -> unit 6 | val pp_pat : formatter -> pat -> unit 7 | val pp_error : Format.formatter -> error -> unit 8 | -------------------------------------------------------------------------------- /smol/level.mli: -------------------------------------------------------------------------------- 1 | type level 2 | type t = level [@@deriving show, eq] 3 | 4 | val zero : level 5 | val next : level -> level 6 | val offset : from:level -> to_:level -> Index.t 7 | val ( < ) : level -> level -> bool 8 | 9 | module Map : Map.S with type key = level 10 | -------------------------------------------------------------------------------- /teika/solve.mli: -------------------------------------------------------------------------------- 1 | open Syntax 2 | 3 | exception Solve_error of { loc : Location.t; exn : exn } 4 | 5 | type context 6 | 7 | (* TODO: couple all the initial contexts *) 8 | val initial : context 9 | val solve_term : context -> Ctree.term -> (Ttree.term, exn) result 10 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Teika 2 | 3 | Teika is a functional programming language. Same pronunciation as in "take a break" or if you prefer: "teika break". 4 | 5 | ## WIP 6 | 7 | This is highly in progress, so it's accepted and even expected that at any point the main branch may be broken. 8 | -------------------------------------------------------------------------------- /smol/level.ml: -------------------------------------------------------------------------------- 1 | type level = int 2 | and t = level [@@deriving show, eq] 3 | 4 | let zero = 0 5 | 6 | (* TODO: check for overflows *) 7 | let next n = n + 1 8 | let offset ~from ~to_ = Index.of_int (to_ - from) 9 | let ( < ) : level -> level -> bool = ( < ) 10 | 11 | module Map = Map.Make (Int) -------------------------------------------------------------------------------- /syntax/clexer.mli: -------------------------------------------------------------------------------- 1 | exception Lexer_error of { loc : Location.t } 2 | exception Parser_error of { loc : Location.t } 3 | 4 | val loc : Sedlexing.lexbuf -> Location.t 5 | val next : Sedlexing.lexbuf -> Cparser.token * Lexing.position * Lexing.position 6 | val term_opt_from_string : string -> Ctree.term option 7 | -------------------------------------------------------------------------------- /nix/shell.nix: -------------------------------------------------------------------------------- 1 | { pkgs, teika }: 2 | 3 | with pkgs; with ocamlPackages; mkShell { 4 | inputsFrom = [ teika ]; 5 | packages = [ 6 | # Make developer life easier 7 | # formatters 8 | nixfmt 9 | # ocamlformat 10 | ocamlformat 11 | # OCaml developer tooling 12 | ocaml 13 | dune_3 14 | ocaml-lsp 15 | utop 16 | ]; 17 | } 18 | -------------------------------------------------------------------------------- /teikalsp/lsp_text_document.ml: -------------------------------------------------------------------------------- 1 | (* TODO: proper types for text and version *) 2 | type document = Smol of { version : int; text : string } 3 | type t = document 4 | 5 | let teika ~version ~text = Smol { version; text } 6 | 7 | let with_change ~version ~text document = 8 | (* TODO: use the version for something? *) 9 | let (Smol { version = _; text = _ }) = document in 10 | Smol { version; text } 11 | -------------------------------------------------------------------------------- /jsend/var.mli: -------------------------------------------------------------------------------- 1 | open Utils 2 | 3 | type var 4 | type t = var [@@deriving show] 5 | 6 | val create : Name.t -> var 7 | val equal : var -> var -> bool 8 | val compare : var -> var -> int 9 | val name : var -> Name.t 10 | 11 | (* predefined *) 12 | val type_ : var 13 | val fix : var 14 | val unit : var 15 | val debug : var 16 | val curry : var 17 | val jmp : var 18 | 19 | module Map : Map.S with type key = t 20 | -------------------------------------------------------------------------------- /smol/index.mli: -------------------------------------------------------------------------------- 1 | type index 2 | type t = index [@@deriving show, eq] 3 | 4 | val zero : index 5 | val one : index 6 | val previous : index -> index option 7 | val next : index -> index 8 | 9 | (* repr *) 10 | (* TODO: this API is non ideal *) 11 | val of_int : int -> index 12 | val repr : index -> int 13 | 14 | (* operations *) 15 | val ( < ) : index -> index -> bool 16 | val ( > ) : index -> index -> bool 17 | -------------------------------------------------------------------------------- /jsend/utree.ml: -------------------------------------------------------------------------------- 1 | type term = 2 | | UT_loc of { term : term; loc : Location.t } 3 | | UT_var of { var : Var.t } 4 | | UT_lambda of { param : Var.t; return : term } 5 | | UT_apply of { lambda : term; arg : term } 6 | | UT_let of { var : Var.t; value : term; return : term } 7 | | UT_string of { literal : string } 8 | | UT_external of { external_ : external_ } 9 | 10 | and external_ = UE_type | UE_fix | UE_unit | UE_debug 11 | -------------------------------------------------------------------------------- /teikalsp/lsp_notification.mli: -------------------------------------------------------------------------------- 1 | open Lsp.Types 2 | 3 | module Server_life_cycle : sig 4 | val initialized : Lsp_context.t -> unit 5 | end 6 | 7 | module Text_document_sync : sig 8 | val did_open : Lsp_context.t -> params:DidOpenTextDocumentParams.t -> unit 9 | val did_change : Lsp_context.t -> params:DidChangeTextDocumentParams.t -> unit 10 | val did_close : Lsp_context.t -> params:DidCloseTextDocumentParams.t -> unit 11 | end 12 | -------------------------------------------------------------------------------- /smol/dune: -------------------------------------------------------------------------------- 1 | (library 2 | (name smol) 3 | (libraries syntax) 4 | (modules 5 | (:standard \ Test)) 6 | (preprocess 7 | (pps ppx_deriving.show ppx_deriving.eq ppx_deriving.ord sedlex.ppx))) 8 | 9 | (executable 10 | (name test) 11 | (modules Test) 12 | (libraries alcotest smol) 13 | (preprocess 14 | (pps ppx_deriving.show))) 15 | 16 | (rule 17 | (alias runtest) 18 | (deps 19 | (:exe ./test.exe)) 20 | (action 21 | (run %{exe}))) 22 | -------------------------------------------------------------------------------- /jsend/dune: -------------------------------------------------------------------------------- 1 | (library 2 | (name jsend) 3 | (libraries teika) 4 | (modules 5 | (:standard \ Test)) 6 | (preprocess 7 | (pps ppx_deriving.show ppx_deriving.eq ppx_deriving.ord))) 8 | 9 | (executable 10 | (name test) 11 | (modules Test) 12 | (libraries alcotest jsend) 13 | (preprocess 14 | (pps ppx_deriving.show ppx_deriving.eq ppx_deriving.ord))) 15 | 16 | (rule 17 | (alias runtest) 18 | (deps 19 | (:exe ./test.exe)) 20 | (action 21 | (run %{exe}))) 22 | -------------------------------------------------------------------------------- /jsend/jtree.ml: -------------------------------------------------------------------------------- 1 | type expression = 2 | | JE_loc of { expression : expression; loc : Location.t } 3 | | JE_var of { var : Var.t } 4 | | JE_generator of { params : Var.t list; block : block } 5 | | JE_new of { constructor : expression } 6 | | JE_call of { lambda : expression; args : expression list } 7 | | JE_yield of { expression : expression } 8 | | JE_string of { literal : string } 9 | 10 | and block = 11 | | JBlock of { consts : (Var.t * expression) list; return : expression } 12 | -------------------------------------------------------------------------------- /smol/index.ml: -------------------------------------------------------------------------------- 1 | type index = int 2 | and t = index [@@deriving show, eq] 3 | 4 | let zero = 0 5 | let one = 1 6 | let previous x = match x > 0 with true -> Some (x - 1) | false -> None 7 | (* TODO: overflow detection *) 8 | 9 | let next x = x + 1 10 | 11 | let of_int x = 12 | match x >= 0 with 13 | | true -> x 14 | | false -> raise (Invalid_argument "index must be bigger than zero") 15 | 16 | let repr x = x 17 | let ( < ) (a : index) (b : index) = a < b 18 | let ( > ) (a : index) (b : index) = a > b 19 | -------------------------------------------------------------------------------- /jsend/utree.mli: -------------------------------------------------------------------------------- 1 | type term = 2 | (* TODO: why is loc a term? *) 3 | | UT_loc of { term : term; loc : Location.t } 4 | | UT_var of { var : Var.t } 5 | (* TODO: patterns in the Itree? *) 6 | | UT_lambda of { param : Var.t; return : term } 7 | | UT_apply of { lambda : term; arg : term } 8 | | UT_let of { var : Var.t; value : term; return : term } 9 | | UT_string of { literal : string } 10 | | UT_external of { external_ : external_ } 11 | 12 | and external_ = UE_type | UE_fix | UE_unit | UE_debug 13 | -------------------------------------------------------------------------------- /jsend/jtree.mli: -------------------------------------------------------------------------------- 1 | type expression = 2 | | JE_loc of { expression : expression; loc : Location.t } 3 | | JE_var of { var : Var.t } 4 | | JE_generator of { params : Var.t list; block : block } 5 | (* TODO: not really a lambda and arg *) 6 | | JE_new of { constructor : expression } 7 | | JE_call of { lambda : expression; args : expression list } 8 | | JE_yield of { expression : expression } 9 | | JE_string of { literal : string } 10 | 11 | and block = 12 | | JBlock of { consts : (Var.t * expression) list; return : expression } 13 | -------------------------------------------------------------------------------- /teika/dune: -------------------------------------------------------------------------------- 1 | (library 2 | (name teika) 3 | (libraries syntax compiler-libs.common) 4 | (modules 5 | (:standard \ Test)) 6 | (preprocess 7 | (pps 8 | ppx_deriving.show 9 | ppx_sexp_conv 10 | ppx_deriving.eq 11 | ppx_deriving.ord 12 | sedlex.ppx))) 13 | 14 | (executable 15 | (name test) 16 | (modules Test) 17 | (libraries alcotest teika) 18 | (preprocess 19 | (pps ppx_deriving.show ppx_deriving.eq ppx_deriving.ord))) 20 | 21 | (rule 22 | (alias runtest) 23 | (deps 24 | (:exe ./test.exe)) 25 | (action 26 | (run %{exe}))) 27 | -------------------------------------------------------------------------------- /smol/HACKING.md: -------------------------------------------------------------------------------- 1 | # Smol Frontend 2 | 3 | ## Optimizations 4 | 5 | ### Explicit Substitutions 6 | 7 | This uses explicit substitutions to achieve laziness, similar to λυ. 8 | 9 | I think Smol doesn't have metavariables as no unification exists, additionally currently substituions are not used for equality. 10 | 11 | Also the failure mode is that it will reject terms not accept terms, which seems to be okay. 12 | 13 | - https://drops.dagstuhl.de/opus/volltexte/2014/4858/pdf/34.pdf 14 | - https://www.irif.fr/~kesner/papers/springer-csl07.pdf 15 | - https://hal.inria.fr/inria-00074197/document 16 | -------------------------------------------------------------------------------- /syntax/dune: -------------------------------------------------------------------------------- 1 | (library 2 | (name syntax) 3 | (libraries menhirLib compiler-libs.common utils zarith) 4 | (modules 5 | (:standard \ Test)) 6 | (preprocess 7 | (pps ppx_deriving.eq ppx_deriving.ord ppx_deriving.show sedlex.ppx))) 8 | 9 | (menhir 10 | (modules cparser) 11 | (flags --dump --explain --table)) 12 | 13 | (executable 14 | (name test) 15 | (modules Test) 16 | (libraries alcotest syntax) 17 | (preprocess 18 | (pps ppx_deriving.show ppx_deriving.eq ppx_deriving.ord))) 19 | 20 | (rule 21 | (alias runtest) 22 | (deps 23 | (:exe ./test.exe)) 24 | (action 25 | (run %{exe}))) 26 | -------------------------------------------------------------------------------- /teikalsp/lsp_context.mli: -------------------------------------------------------------------------------- 1 | open Lsp.Types 2 | 3 | type status = private Handshake | Running 4 | type context 5 | type t = context 6 | 7 | (* TODO: rollback? Requests and notifications should probably be atomic *) 8 | val create : unit -> context 9 | val status : context -> status 10 | val initialize : context -> unit 11 | 12 | (* documents *) 13 | val open_text_document : context -> DocumentUri.t -> Lsp_text_document.t -> unit 14 | 15 | val change_text_document : 16 | context -> 17 | DocumentUri.t -> 18 | (Lsp_text_document.t -> Lsp_text_document.t) -> 19 | unit 20 | 21 | val close_text_document : context -> DocumentUri.t -> unit 22 | -------------------------------------------------------------------------------- /teikalsp/lsp_channel.mli: -------------------------------------------------------------------------------- 1 | open Jsonrpc 2 | 3 | type channel 4 | type t = channel 5 | 6 | val notify : channel -> Lsp.Server_notification.t -> unit 7 | 8 | type on_request = { 9 | f : 10 | 'response. 11 | channel -> 12 | 'response Lsp.Client_request.t -> 13 | ('response, Response.Error.t) result; 14 | } 15 | 16 | (* TODO: request*) 17 | val listen : 18 | input:#Eio.Flow.source -> 19 | output:#Eio.Flow.sink -> 20 | on_request:on_request -> 21 | on_notification:(channel -> Lsp.Client_notification.t -> unit) -> 22 | unit 23 | 24 | (* val input_loop : input:Chan.input -> 25 | output:Chan.output -> (Jsonrpc.Packet.t -> Jsonrpc.Packet.t list) -> unit) *) 26 | -------------------------------------------------------------------------------- /utils/utils.mli: -------------------------------------------------------------------------------- 1 | module Index : sig 2 | type index = private int 3 | type t = index [@@deriving show, eq] 4 | 5 | val zero : index 6 | val next : index -> index 7 | end 8 | 9 | module Level : sig 10 | (* TODO: this private int is not ideal *) 11 | type level = private int 12 | type t = level [@@deriving show, eq] 13 | 14 | val zero : level 15 | val next : level -> level 16 | val offset : from:level -> to_:level -> Index.t option 17 | end 18 | 19 | module Name : sig 20 | type name 21 | type t = name [@@deriving show, eq, ord] 22 | 23 | val make : string -> name 24 | val repr : name -> string 25 | 26 | (* TODO: stop exposing this? *) 27 | module Map : Map.S with type key = name 28 | end 29 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "Nix Flake"; 3 | 4 | inputs = { 5 | nixpkgs.url = "github:anmonteiro/nix-overlays"; 6 | nix-filter.url = "github:numtide/nix-filter"; 7 | flake-utils.url = "github:numtide/flake-utils"; 8 | }; 9 | 10 | outputs = { self, nixpkgs, nix-filter, flake-utils }: 11 | flake-utils.lib.eachDefaultSystem (system: 12 | let pkgs = (nixpkgs.makePkgs { 13 | inherit system; 14 | }).extend (self: super: { 15 | ocamlPackages = super.ocaml-ng.ocamlPackages_5_3; 16 | }); in 17 | let teika = pkgs.callPackage ./nix { 18 | inherit nix-filter; 19 | doCheck = true; 20 | }; in 21 | rec { 22 | packages = { inherit teika; }; 23 | devShell = import ./nix/shell.nix { inherit pkgs teika; }; 24 | }); 25 | } 26 | -------------------------------------------------------------------------------- /utils/utils.ml: -------------------------------------------------------------------------------- 1 | module Index = struct 2 | type index = int 3 | and t = index [@@deriving show, eq] 4 | 5 | let zero = 0 6 | 7 | let next n = 8 | let n = n + 1 in 9 | assert (n + 1 >= zero); 10 | n 11 | end 12 | 13 | module Level = struct 14 | type level = int 15 | and t = level [@@deriving show, eq] 16 | 17 | let zero = 0 18 | 19 | let next n = 20 | let n = n + 1 in 21 | assert (n + 1 >= zero); 22 | n 23 | 24 | let offset ~from ~to_ = 25 | match to_ > from with 26 | | true -> 27 | (* TODO: explain this -1 *) 28 | Some (to_ - from - 1) 29 | | false -> None 30 | end 31 | 32 | module Name = struct 33 | type name = string 34 | and t = name [@@deriving show, eq, ord] 35 | 36 | let make t = t 37 | let repr t = t 38 | 39 | module Map = Map.Make (String) 40 | end 41 | -------------------------------------------------------------------------------- /nix/default.nix: -------------------------------------------------------------------------------- 1 | { pkgs, doCheck ? true, nix-filter }: 2 | 3 | let inherit (pkgs) lib stdenv ocamlPackages; in 4 | 5 | with ocamlPackages; buildDunePackage rec { 6 | pname = "teika"; 7 | version = "0.0.0-dev"; 8 | 9 | src = with nix-filter.lib; 10 | filter { 11 | root = ./..; 12 | include = [ 13 | "dune-project" 14 | "smol" 15 | "teika" 16 | ]; 17 | exclude = [ ]; 18 | }; 19 | 20 | propagatedBuildInputs = [ 21 | menhir 22 | menhirLib 23 | sedlex 24 | ppx_deriving 25 | eio 26 | eio_main 27 | ppx_sexp_conv 28 | zarith 29 | lsp 30 | ] 31 | # checkInputs are here because when cross compiling dune needs test dependencies 32 | # but they are not available for the build phase. The issue can be seen by adding strictDeps = true;. 33 | ++ checkInputs; 34 | 35 | checkInputs = [ alcotest ]; 36 | } 37 | -------------------------------------------------------------------------------- /smol/stree.mli: -------------------------------------------------------------------------------- 1 | type ty_term = ST_typed of { term : term; type_ : term } 2 | 3 | and term = 4 | | ST_loc of { term : term; loc : Location.t } 5 | | ST_free_var of { level : Level.t } 6 | | ST_bound_var of { index : Index.t } 7 | | ST_forall of { param : ty_pat; return : term } 8 | | ST_lambda of { param : ty_pat; return : term } 9 | | ST_apply of { lambda : term; arg : term } 10 | | ST_self of { self : pat; body : term } 11 | | ST_fix of { self : ty_pat; body : term } 12 | | ST_unroll of { term : term } 13 | | ST_let of { bound : ty_pat; value : term; return : term } 14 | | ST_annot of { term : term; annot : term } 15 | 16 | and ty_pat = SP_typed of { pat : pat; type_ : term } 17 | 18 | and pat = 19 | | SP_loc of { pat : pat; loc : Location.t } 20 | | SP_var of { var : Syntax.Name.t } 21 | | SP_erasable of { pat : pat } 22 | | SP_annot of { pat : pat; annot : term } 23 | [@@deriving show] 24 | -------------------------------------------------------------------------------- /design/MODULE.md: -------------------------------------------------------------------------------- 1 | # Module 2 | 3 | This intends to document behavior and features of modules. 4 | 5 | ## Implicit type 6 | 7 | All structures contain an implicit type which by default is abstract both internally and externally, it can be assigned internally. 8 | 9 | This behaves similarly to object-oriented languages like Java and but the goal is to achieve similar interface design to the `Module.t` convention in OCaml. 10 | 11 | 12 | 13 | ### Implicit type alias 14 | 15 | When assigning a structure to a value, the implicit type has an internal alias so that users can have consistent type naming internally and externally. 16 | 17 | 18 | 19 | While this is not always available it will be used it can be used in most cases. 20 | 21 | Example: 22 | 23 | ```rust 24 | Amount = { 25 | // alias 26 | of_nat: Nat -> Amount; 27 | add: Amount -> Amount -> Amount; 28 | }; 29 | ``` 30 | -------------------------------------------------------------------------------- /smol/stree.ml: -------------------------------------------------------------------------------- 1 | type ty_term = ST_typed of { term : term; type_ : term } 2 | 3 | and term = 4 | | ST_loc of { term : term; loc : Location.t [@opaque] } 5 | | ST_free_var of { level : Level.t } 6 | | ST_bound_var of { index : Index.t } 7 | | ST_forall of { param : ty_pat; return : term } 8 | | ST_lambda of { param : ty_pat; return : term } 9 | | ST_apply of { lambda : term; arg : term } 10 | (* TODO: self being only pat is weird *) 11 | | ST_self of { self : pat; body : term } 12 | | ST_fix of { self : ty_pat; body : term } 13 | | ST_unroll of { term : term } 14 | | ST_let of { bound : ty_pat; value : term; return : term } 15 | | ST_annot of { term : term; annot : term } 16 | 17 | and ty_pat = SP_typed of { pat : pat; type_ : term } 18 | 19 | and pat = 20 | | SP_loc of { pat : pat; loc : Location.t [@opaque] } 21 | (* TODO: extract Syntax.Name *) 22 | | SP_var of { var : Syntax.Name.t } 23 | | SP_erasable of { pat : pat } 24 | (* TODO: SP_unroll *) 25 | | SP_annot of { pat : pat; annot : term } 26 | [@@deriving show] 27 | -------------------------------------------------------------------------------- /teikalsp/lsp_error.mli: -------------------------------------------------------------------------------- 1 | open Lsp.Types 2 | 3 | type error = 4 | (* channel *) 5 | | Error_request_unsupported 6 | | Error_response_unsupported 7 | | Error_invalid_request of { error : string } 8 | | Error_invalid_notification of { error : string } 9 | (* server *) 10 | | Error_unsupported_request 11 | | Error_unsupported_notification 12 | (* context *) 13 | | Error_notification_before_initialize 14 | | Error_invalid_status_during_initialize 15 | | Error_text_document_already_in_context 16 | | Error_text_document_not_in_context 17 | (* notification *) 18 | | Error_multiple_content_changes of { 19 | content_changes : TextDocumentContentChangeEvent.t list; 20 | } 21 | | Error_partial_content_change of { 22 | content_change : TextDocumentContentChangeEvent.t; 23 | } 24 | | Error_invalid_content_change of { 25 | content_change : TextDocumentContentChangeEvent.t; 26 | } 27 | | Error_unknown_language_id of { language_id : string } 28 | 29 | type t = error [@@deriving show] 30 | 31 | exception Lsp_error of { error : error } 32 | 33 | val fail : error -> 'a 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Eduardo Rafael 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /teika/terror.mli: -------------------------------------------------------------------------------- 1 | open Utils 2 | open Ttree 3 | 4 | type error = 5 | (* metadata *) 6 | | TError_loc of { error : error; loc : Location.t [@opaque] } 7 | (* equal *) 8 | | TError_type_clash 9 | (* TODO: infer *) 10 | (* typer *) 11 | | TError_unknown_var of { name : Name.t } 12 | | TError_not_a_forall of { type_ : term } 13 | | TError_hoist_not_implemented 14 | | TError_extensions_not_implemented 15 | | TError_pairs_not_implemented 16 | (* TODO: native should not be a string *) 17 | | TError_unknown_native of { native : string } 18 | | TError_missing_annotation 19 | (* elaborate *) 20 | | TError_invalid_notation 21 | 22 | type t = error [@@deriving show] 23 | 24 | exception TError of { error : error } 25 | 26 | (* TODO: error_loc *) 27 | val error_type_clash : unit -> 'a 28 | val error_unknown_var : name:Name.t -> 'a 29 | val error_not_a_forall : type_:term -> 'a 30 | val error_hoist_not_implemented : unit -> 'a 31 | val error_extensions_not_implemented : unit -> 'a 32 | val error_pairs_not_implemented : unit -> 'a 33 | val error_unknown_native : native:string -> 'a 34 | val error_missing_annotation : unit -> 'a 35 | val error_invalid_notation : unit -> 'a 36 | -------------------------------------------------------------------------------- /teikalsp/lsp_error.ml: -------------------------------------------------------------------------------- 1 | open Lsp.Types 2 | 3 | type error = 4 | (* channel *) 5 | | Error_request_unsupported 6 | | Error_response_unsupported 7 | | Error_invalid_request of { error : string } 8 | | Error_invalid_notification of { error : string } 9 | (* server *) 10 | | Error_unsupported_request 11 | | Error_unsupported_notification 12 | (* context *) 13 | | Error_notification_before_initialize 14 | | Error_invalid_status_during_initialize 15 | | Error_text_document_already_in_context 16 | | Error_text_document_not_in_context 17 | (* notification *) 18 | | Error_multiple_content_changes of { 19 | content_changes : TextDocumentContentChangeEvent.t list; [@opaque] 20 | } 21 | | Error_partial_content_change of { 22 | content_change : TextDocumentContentChangeEvent.t; [@opaque] 23 | } 24 | | Error_invalid_content_change of { 25 | content_change : TextDocumentContentChangeEvent.t; [@opaque] 26 | } 27 | | Error_unknown_language_id of { language_id : string } 28 | 29 | and t = error [@@deriving show] 30 | 31 | (* TODO: what happen with errors? *) 32 | exception Lsp_error of { error : error } 33 | 34 | let fail error = raise (Lsp_error { error }) 35 | -------------------------------------------------------------------------------- /teikalsp/lsp_request.ml: -------------------------------------------------------------------------------- 1 | open Lsp.Types 2 | open Lsp_context 3 | open Lsp_error 4 | 5 | module Server_life_cycle = struct 6 | let initialize context ~params = 7 | let () = 8 | (* TODO: this is duplicated *) 9 | match status context with 10 | | Handshake -> () 11 | | Running -> fail Error_invalid_status_during_initialize 12 | in 13 | (* TODO: use additional data *) 14 | let InitializeParams. 15 | { 16 | workDoneToken = _; 17 | processId = _; 18 | clientInfo = _; 19 | locale = _; 20 | rootPath = _; 21 | rootUri = _; 22 | initializationOptions = _; 23 | capabilities = _; 24 | (* TODO: definitely ignore capabilities *) 25 | trace = _; 26 | (* TODO: enable logging using tgrace*) 27 | workspaceFolders = _; 28 | } = 29 | params 30 | in 31 | let () = Lsp_context.initialize context in 32 | (* TODO: better capabilities *) 33 | let capabilities = 34 | ServerCapabilities.create ~textDocumentSync:(`TextDocumentSyncKind Full) 35 | ~hoverProvider:(`Bool true) () 36 | in 37 | (* TODO: server_info *) 38 | InitializeResult.create ~capabilities () 39 | end 40 | -------------------------------------------------------------------------------- /design/GOALS.md: -------------------------------------------------------------------------------- 1 | # Goals 2 | 3 | This should document the goals of the project, the why and the tradeoff's. 4 | 5 | ## Assumptions 6 | 7 | 1. Most code will be read more times than written 8 | 2. Most code will be read by proficient developers 9 | 3. Most code will be written by proficient developers 10 | 4. Most code will be simple even if powerful features exists 11 | 5. Beginners developers are only temporarily beginners 12 | 6. Tooling can be made to help beginners developers 13 | 14 | ## Direct 15 | 16 | Indirections makes code more complex, a language should be direct. Functions should be known statically and data indirections should be optimized away. 17 | 18 | ## Succinct 19 | 20 | Only local information should be syntactically provided, a language should be succint. Noise should be avoided and contextual information should be provided on demand. 21 | 22 | ## Powerful 23 | 24 | Users should be able to describe abstract and efficient code, while still being able to reason about it locally, a language should be powerful. Effects should be tracked and mutation controlled. 25 | 26 | ## Flexible 27 | 28 | Hacks were needed in the past, are needed today and will be needed in the future, users will need to hack code, a language should be flexible. Abstractions are gonna restrict code, tooling should flexibilize it. 29 | -------------------------------------------------------------------------------- /design/INFERENCE.md: -------------------------------------------------------------------------------- 1 | # Inference 2 | 3 | Teika intends to be an ML-like language, which means inference is a must. 4 | 5 | ## HM inference 6 | 7 | The basic kind of inference present at Teika is Hindley-Milner inference, which essentially assume that all parameters are monomorphic(no quantification) and when a weak variable(aka a variable not constrained) escape it's scope, then an implicit forall is added. 8 | 9 | It is quite limited but relatively simple to understand and good visualizations are possible to be developed. 10 | 11 | ### Higher Kinded Types 12 | 13 | There is a couple decisions to be made on inference for higher kinded types. 14 | 15 | ```rust 16 | // When infering the following two types are possible 17 | f = T => (x: T Int) => x; 18 | // the easy one, here we follow the fact that Int is a type 19 | f = (T: _ -> _) => (x: T Int) => x; // T is an arrow 20 | f = (T: #(Int) -> _) => (x: T Int) => x; // T param is the type Int 21 | f = (T: #(Int) -> *) => (x: T Int) => x; // unify with x 22 | // the logical one, consider it's kind 23 | f = T => (x: T (Int: *)) => x; // Int has kind * 24 | f = (T: _ -> _) => (x: T (Int: *)) => x; // T is an arrow 25 | f = (T: * -> _) => (x: T (Int: *)) => x; // T param is the kind * 26 | f = (T: * -> *) => (x: T (Int: *)) => x; // unify with x 27 | ``` 28 | 29 | I decided to go with the second one, because in the same way that when you call a function with a value it infers it's type, when you call a function with a type it infers it's kind. If you do `f => f 1` the type is not `{A} -> (f: 1 -> A) -> f 1`, but `Int -> Int`. 30 | -------------------------------------------------------------------------------- /syntax/ctree.mli: -------------------------------------------------------------------------------- 1 | open Utils 2 | 3 | type term = CTerm of { term : term_syntax; loc : Location.t } 4 | 5 | and term_syntax = 6 | | CT_var of { var : Name.t } 7 | | CT_extension of { extension : Name.t } 8 | | CT_forall of { param : term; body : term } 9 | | CT_lambda of { param : term; body : term } 10 | | CT_apply of { funct : term; arg : term } 11 | | CT_pair of { left : term; right : term } 12 | | CT_both of { left : term; right : term } 13 | | CT_bind of { bound : term; value : term } 14 | | CT_semi of { left : term; right : term } 15 | | CT_annot of { value : term; annot : term } 16 | | CT_string of { literal : string } 17 | | CT_number of { literal : Z.t } 18 | | CT_parens of { content : term } 19 | | CT_braces of { content : term } 20 | [@@deriving show] 21 | 22 | val ct_var : Location.t -> var:Name.t -> term 23 | val ct_extension : Location.t -> extension:Name.t -> term 24 | val ct_forall : Location.t -> param:term -> body:term -> term 25 | val ct_lambda : Location.t -> param:term -> body:term -> term 26 | val ct_apply : Location.t -> funct:term -> arg:term -> term 27 | val ct_pair : Location.t -> left:term -> right:term -> term 28 | val ct_both : Location.t -> left:term -> right:term -> term 29 | val ct_bind : Location.t -> bound:term -> value:term -> term 30 | val ct_semi : Location.t -> left:term -> right:term -> term 31 | val ct_annot : Location.t -> value:term -> annot:term -> term 32 | val ct_string : Location.t -> literal:string -> term 33 | val ct_number : Location.t -> literal:Z.t -> term 34 | val ct_parens : Location.t -> content:term -> term 35 | val ct_braces : Location.t -> content:term -> term 36 | -------------------------------------------------------------------------------- /teika/terror.ml: -------------------------------------------------------------------------------- 1 | open Utils 2 | open Ttree 3 | 4 | (* TODO: too much work to add errors, 5 | adding here and context is bad*) 6 | type error = 7 | (* TODO: why track nested locations? 8 | Probably because things like macros exists *) 9 | | TError_loc of { error : error; loc : Location.t [@opaque] } 10 | (* equal *) 11 | | TError_type_clash 12 | (* typer *) 13 | | TError_unknown_var of { name : Name.t } 14 | | TError_not_a_forall of { type_ : term } 15 | | TError_hoist_not_implemented 16 | | TError_extensions_not_implemented 17 | | TError_pairs_not_implemented 18 | | TError_unknown_native of { native : string } 19 | | TError_missing_annotation 20 | (* elaborate *) 21 | | TError_invalid_notation 22 | 23 | and t = error [@@deriving show { with_path = false }] 24 | 25 | exception TError of { error : error } 26 | 27 | let () = 28 | Printexc.register_printer @@ function 29 | | TError { error } -> Some (show_error error) 30 | | _ -> None 31 | 32 | let terror error = raise (TError { error }) 33 | let error_type_clash () = terror @@ TError_type_clash 34 | let error_unknown_var ~name = terror @@ TError_unknown_var { name } 35 | let error_not_a_forall ~type_ = terror @@ TError_not_a_forall { type_ } 36 | let error_hoist_not_implemented () = terror @@ TError_hoist_not_implemented 37 | 38 | let error_extensions_not_implemented () = 39 | terror @@ TError_extensions_not_implemented 40 | 41 | let error_pairs_not_implemented () = terror @@ TError_pairs_not_implemented 42 | let error_unknown_native ~native = terror @@ TError_unknown_native { native } 43 | let error_missing_annotation () = terror @@ TError_missing_annotation 44 | let error_invalid_notation () = terror @@ TError_invalid_notation 45 | -------------------------------------------------------------------------------- /jsend/var.ml: -------------------------------------------------------------------------------- 1 | open Utils 2 | 3 | module Id : sig 4 | type t [@@deriving show] 5 | 6 | val next : unit -> t 7 | val equal : t -> t -> bool 8 | val compare : t -> t -> int 9 | end = struct 10 | type t = int [@@deriving show] 11 | 12 | let acc = Atomic.make 0 13 | let next () = Atomic.fetch_and_add acc 1 14 | let equal = Int.equal 15 | let compare = Int.compare 16 | end 17 | 18 | let _ = Id.show 19 | 20 | type var_kind = Global | Scoped 21 | type var = { id : Id.t; name : Name.t; kind : var_kind } 22 | type t = var 23 | 24 | let pp fmt var = 25 | let { id; name; kind } = var in 26 | match kind with 27 | | Global -> Format.fprintf fmt "%s" (Name.repr name) 28 | | Scoped -> Format.fprintf fmt "%s$%a" (Name.repr name) Id.pp id 29 | 30 | let show var = Format.asprintf "%a" pp var 31 | 32 | let create_any kind name = 33 | let id = Id.next () in 34 | { id; name; kind } 35 | 36 | let create name = create_any Scoped name 37 | 38 | let predef name = 39 | let name = Name.make name in 40 | create_any Global name 41 | 42 | let equal a b = 43 | let { id = a; name = _; kind = _ } = a in 44 | let { id = b; name = _; kind = _ } = b in 45 | Id.equal a b 46 | 47 | let compare a b = 48 | let { id = a; name = _; kind = _ } = a in 49 | let { id = b; name = _; kind = _ } = b in 50 | Id.compare a b 51 | 52 | let name var = 53 | let { id = _; name; kind = _ } = var in 54 | name 55 | 56 | (* TODO: those should be checked somewhere *) 57 | let type_ = predef "$type" 58 | let fix = predef "$fix" 59 | let unit = predef "$unit" 60 | let debug = predef "$debug" 61 | let curry = predef "$curry" 62 | let jmp = predef "$jmp" 63 | 64 | module Map = Map.Make (struct 65 | type t = var 66 | 67 | let compare = compare 68 | end) 69 | -------------------------------------------------------------------------------- /teika/ttree.mli: -------------------------------------------------------------------------------- 1 | open Utils 2 | 3 | (* TODO: explicit unfold for loops on terms *) 4 | type term = Term of { struct_ : term_struct; loc : Location.t [@opaque] } 5 | 6 | and term_struct = 7 | (* (M : A) *) 8 | | T_annot of { term : term; annot : term } 9 | (* \n *) 10 | | T_var of { var : Index.t } 11 | (* P = N; M *) 12 | | T_let of { bound : pat; arg : term; body : term } 13 | (* x : A; M *) 14 | | T_hoist of { bound : var_pat; body : term } 15 | (* x : A; ...; x = N; M *) 16 | | T_fix of { bound : var_pat; var : Index.t; arg : term; body : term } 17 | (* P => M *) 18 | | T_lambda of { bound : pat; body : term } 19 | (* M N *) 20 | | T_apply of { funct : term; arg : term } 21 | (* (P : A) -> B *) 22 | | T_forall of { bound : pat; param : term; body : term } 23 | (* (P : A) & B *) 24 | | T_self of { bound : var_pat; body : term } 25 | (* (M, ...) *) 26 | | T_tuple of { elements : term list } 27 | (* (x : A, ...) *) 28 | | T_exists of { elements : pat list } 29 | 30 | and var_pat = VPat of { struct_ : var_pat_struct; loc : Location.t } 31 | 32 | and var_pat_struct = 33 | (* (P : A) *) 34 | | VP_annot of { pat : var_pat; annot : term } 35 | (* x *) 36 | | VP_var of { var : Name.t } 37 | 38 | and pat = Pat of { struct_ : pat_struct; loc : Location.t } 39 | 40 | and pat_struct = 41 | (* (P : A) *) 42 | | P_annot of { pat : pat; annot : term } 43 | (* x *) 44 | (* TODO: drop names and uses receipts *) 45 | | P_var of { var : Name.t } 46 | (* (x, ...) *) 47 | | P_tuple of { elements : pat list } 48 | [@@deriving show] 49 | 50 | val t_wrap : loc:Location.t -> term_struct -> term 51 | val vp_wrap : loc:Location.t -> var_pat_struct -> var_pat 52 | val p_wrap : loc:Location.t -> pat_struct -> pat 53 | -------------------------------------------------------------------------------- /teika/ttree.ml: -------------------------------------------------------------------------------- 1 | open Utils 2 | 3 | (* TODO: explicit unfold for loops on terms *) 4 | type term = Term of { struct_ : term_struct; loc : Location.t [@opaque] } 5 | 6 | and term_struct = 7 | (* (M : A) *) 8 | | T_annot of { term : term; annot : term } 9 | (* \n *) 10 | | T_var of { var : Index.t } 11 | (* P = N; M *) 12 | | T_let of { bound : pat; arg : term; body : term } 13 | (* x : A; M *) 14 | | T_hoist of { bound : var_pat; body : term } 15 | (* x : A; ...; x = N; M *) 16 | | T_fix of { bound : var_pat; var : Index.t; arg : term; body : term } 17 | (* P => M *) 18 | | T_lambda of { bound : pat; body : term } 19 | (* M N *) 20 | | T_apply of { funct : term; arg : term } 21 | (* (P : A) -> B *) 22 | | T_forall of { bound : pat; param : term; body : term } 23 | (* (P : A) & B *) 24 | | T_self of { bound : var_pat; body : term } 25 | (* (M, ...) *) 26 | | T_tuple of { elements : term list } 27 | (* (x : A, ...) *) 28 | | T_exists of { elements : pat list } 29 | 30 | and var_pat = VPat of { struct_ : var_pat_struct; loc : Location.t [@opaque] } 31 | 32 | and var_pat_struct = 33 | (* (P : A) *) 34 | | VP_annot of { pat : var_pat; annot : term } 35 | (* x *) 36 | | VP_var of { var : Name.t } 37 | 38 | and pat = Pat of { struct_ : pat_struct; loc : Location.t [@opaque] } 39 | 40 | and pat_struct = 41 | (* (P : A) *) 42 | | P_annot of { pat : pat; annot : term } 43 | (* x *) 44 | (* TODO: drop names and uses receipts *) 45 | | P_var of { var : Name.t } 46 | (* (x, ...) *) 47 | | P_tuple of { elements : pat list } 48 | [@@deriving show { with_path = false }] 49 | 50 | let t_wrap ~loc struct_ = Term { struct_; loc } 51 | let vp_wrap ~loc struct_ = VPat { struct_; loc } 52 | let p_wrap ~loc struct_ = Pat { struct_; loc } 53 | -------------------------------------------------------------------------------- /syntax/ctree.ml: -------------------------------------------------------------------------------- 1 | open Utils 2 | 3 | type term = 4 | (* TODO: printer location *) 5 | | CTerm of { term : term_syntax; loc : Location.t [@opaque] } 6 | 7 | and term_syntax = 8 | | CT_var of { var : Name.t } 9 | | CT_extension of { extension : Name.t } 10 | | CT_forall of { param : term; body : term } 11 | | CT_lambda of { param : term; body : term } 12 | | CT_apply of { funct : term; arg : term } 13 | | CT_pair of { left : term; right : term } 14 | | CT_both of { left : term; right : term } 15 | | CT_bind of { bound : term; value : term } 16 | | CT_semi of { left : term; right : term } 17 | | CT_annot of { value : term; annot : term } 18 | | CT_string of { literal : string } 19 | | CT_number of { literal : Z.t [@printer Z.pp_print] } 20 | | CT_parens of { content : term } 21 | | CT_braces of { content : term } 22 | [@@deriving show { with_path = false }] 23 | 24 | let cterm loc term = CTerm { loc; term } 25 | let ct_var loc ~var = cterm loc (CT_var { var }) 26 | let ct_extension loc ~extension = cterm loc (CT_extension { extension }) 27 | let ct_forall loc ~param ~body = cterm loc (CT_forall { param; body }) 28 | let ct_lambda loc ~param ~body = cterm loc (CT_lambda { param; body }) 29 | let ct_apply loc ~funct ~arg = cterm loc (CT_apply { funct; arg }) 30 | let ct_pair loc ~left ~right = cterm loc (CT_pair { left; right }) 31 | let ct_both loc ~left ~right = cterm loc (CT_both { left; right }) 32 | let ct_bind loc ~bound ~value = cterm loc (CT_bind { bound; value }) 33 | let ct_semi loc ~left ~right = cterm loc (CT_semi { left; right }) 34 | let ct_annot loc ~value ~annot = cterm loc (CT_annot { value; annot }) 35 | let ct_string loc ~literal = cterm loc (CT_string { literal }) 36 | let ct_number loc ~literal = cterm loc (CT_number { literal }) 37 | let ct_parens loc ~content = cterm loc (CT_parens { content }) 38 | let ct_braces loc ~content = cterm loc (CT_braces { content }) 39 | -------------------------------------------------------------------------------- /teikalsp/lsp_context.ml: -------------------------------------------------------------------------------- 1 | open Lsp.Types 2 | open Lsp_error 3 | module Document_uri_map = Map.Make (DocumentUri) 4 | 5 | (* TODO: capabilities *) 6 | (* TODO: initialized *) 7 | type status = Handshake | Running 8 | 9 | type context = { 10 | mutable status : status; 11 | mutable text_documents : Lsp_text_document.t Document_uri_map.t; 12 | } 13 | 14 | type t = context 15 | 16 | let create () = { status = Handshake; text_documents = Document_uri_map.empty } 17 | let status context = context.status 18 | 19 | let initialize context = 20 | match context.status with 21 | | Handshake -> context.status <- Running 22 | | Running -> fail Error_invalid_status_during_initialize 23 | 24 | let update_text_documents context f = 25 | let text_documents = context.text_documents in 26 | let text_documents = f text_documents in 27 | context.text_documents <- text_documents 28 | 29 | let open_text_document context uri text_document = 30 | update_text_documents context @@ fun text_documents -> 31 | (match Document_uri_map.mem uri text_documents with 32 | | true -> fail Error_text_document_already_in_context 33 | | false -> ()); 34 | Document_uri_map.add uri text_document text_documents 35 | 36 | let change_text_document context uri cb = 37 | update_text_documents context @@ fun text_documents -> 38 | let text_document = 39 | match Document_uri_map.find_opt uri text_documents with 40 | | Some text_document -> text_document 41 | | None -> fail Error_text_document_not_in_context 42 | in 43 | let text_document = cb text_document in 44 | (* TODO: only accept if version is newer or equal *) 45 | Document_uri_map.add uri text_document text_documents 46 | 47 | let close_text_document context uri = 48 | update_text_documents context @@ fun text_documents -> 49 | (match Document_uri_map.mem uri text_documents with 50 | | true -> () 51 | | false -> fail Error_text_document_not_in_context); 52 | Document_uri_map.remove uri text_documents 53 | -------------------------------------------------------------------------------- /teikalsp/lsp_notification.ml: -------------------------------------------------------------------------------- 1 | open Lsp.Types 2 | open Lsp_context 3 | open Lsp_error 4 | 5 | module Server_life_cycle = struct 6 | (* TODO: do something here?*) 7 | let initialized _context = () 8 | end 9 | 10 | module Text_document_sync = struct 11 | let did_open context ~params = 12 | let DidOpenTextDocumentParams.{ textDocument = text_document } = params in 13 | let TextDocumentItem.{ uri; languageId = language_id; version; text } = 14 | text_document 15 | in 16 | let document = 17 | match language_id with 18 | | "teika" -> Lsp_text_document.teika ~version ~text 19 | | language_id -> fail (Error_unknown_language_id { language_id }) 20 | in 21 | (* TODO: async typing here *) 22 | open_text_document context uri document 23 | 24 | let did_change context ~params = 25 | (* TODO: currently only full content changes are supported 26 | partial content changes could be supported *) 27 | let DidChangeTextDocumentParams. 28 | { textDocument = { uri; version }; contentChanges = content_changes } 29 | = 30 | params 31 | in 32 | let content_change = 33 | match content_changes with 34 | | [ content_change ] -> content_change 35 | | content_changes -> 36 | fail (Error_multiple_content_changes { content_changes }) 37 | in 38 | let TextDocumentContentChangeEvent.{ range; rangeLength; text } = 39 | content_change 40 | in 41 | (match (range, rangeLength) with 42 | | None, None -> () 43 | | Some _, Some _ -> fail (Error_partial_content_change { content_change }) 44 | | Some _, None | None, Some _ -> 45 | fail (Error_invalid_content_change { content_change })); 46 | change_text_document context uri @@ fun document -> 47 | (* TODO: async typing here *) 48 | Lsp_text_document.with_change ~version ~text document 49 | 50 | let did_close context ~params = 51 | let DidCloseTextDocumentParams.{ textDocument = { uri } } = params in 52 | close_text_document context uri 53 | 54 | (* TODO: save and rename *) 55 | end 56 | -------------------------------------------------------------------------------- /teikavsc/main.ts: -------------------------------------------------------------------------------- 1 | import { workspace, ExtensionContext, commands } from "vscode"; 2 | 3 | import { 4 | LanguageClient, 5 | LanguageClientOptions, 6 | ServerOptions, 7 | TransportKind, 8 | } from "vscode-languageclient/node"; 9 | 10 | let client: LanguageClient; 11 | 12 | const restartClient = async () => { 13 | const workspaceConfiguration = workspace.getConfiguration(); 14 | const teikaServerPath = workspaceConfiguration.get( 15 | "teika.server.path", 16 | "" 17 | ); 18 | 19 | // If the extension is launched in debug mode then the debug server options are used 20 | // Otherwise the run options are used 21 | const serverOptions: ServerOptions = { 22 | run: { 23 | command: teikaServerPath, 24 | args: [], 25 | transport: TransportKind.stdio, 26 | }, 27 | debug: { 28 | command: teikaServerPath, 29 | args: [], 30 | transport: TransportKind.stdio, 31 | }, 32 | }; 33 | 34 | // Options to control the language client 35 | const clientOptions: LanguageClientOptions = { 36 | // Register the server for plain text documents 37 | documentSelector: [{ scheme: "file", language: "teika" }], 38 | synchronize: {}, 39 | }; 40 | 41 | if (!client) { 42 | // Create the language client and start the client. 43 | client = new LanguageClient( 44 | "teika", 45 | "Teika Server", 46 | serverOptions, 47 | clientOptions 48 | ); 49 | } 50 | 51 | if (client && client.isRunning()) { 52 | await client.restart(); 53 | } else { 54 | // Start the client. This will also launch the server 55 | await client.start(); 56 | } 57 | }; 58 | 59 | export function activate(context: ExtensionContext) { 60 | context.subscriptions.push( 61 | commands.registerCommand("teika.server.restart", () => { 62 | // TODO: handle promise below 63 | restartClient(); 64 | }) 65 | ); 66 | 67 | restartClient(); 68 | } 69 | 70 | export function deactivate(): Thenable | undefined { 71 | if (!client) { 72 | return undefined; 73 | } 74 | return client.stop(); 75 | } 76 | -------------------------------------------------------------------------------- /jsend/test.ml: -------------------------------------------------------------------------------- 1 | open Syntax 2 | open Teika 3 | open Jsend 4 | 5 | let compile code = 6 | let term = Option.get @@ Clexer.from_string Cparser.term_opt code in 7 | (* TODO: locations *) 8 | let loc = Location.none in 9 | let term = Lparser.parse_term ~loc term in 10 | let term = 11 | match Typer.Infer.infer_term term with 12 | | Ok ttree -> ttree 13 | | Error error -> 14 | Format.eprintf "%a\n%!" Terror.pp error; 15 | failwith "infer" 16 | in 17 | 18 | let term = Untype.untype_term term in 19 | let term = Emit.emit_term term in 20 | Format.printf "%a\n\n%!" Jprinter.pp_expression term 21 | 22 | let () = Printexc.record_backtrace true 23 | 24 | let () = 25 | compile {| 26 | ((A : Type) => (x : A) => x) String "Hello World" 27 | |} 28 | 29 | let () = 30 | compile 31 | {| 32 | noop = (u : (A : Type) -> (x : A) -> A) => u ((A : Type) -> (x : A) -> A) u; 33 | noop 34 | |} 35 | 36 | (* let () = 37 | compile 38 | {| 39 | Unit = (A : Type) -> (x : A) -> A; 40 | (noop : (u : Unit) -> Unit) = u => u Unit u; 41 | noop 42 | |} 43 | 44 | let () = 45 | compile 46 | {| 47 | Bool = (A : Type) -> (t : A) -> (f : A) -> A; 48 | (true : Bool) = A => x => y => x; 49 | (false : Bool) = A => x => y => y; 50 | f = (bool : Bool) => @native("debug")(bool String "!!true" "!!false"); 51 | f false 52 | |} 53 | 54 | let () = 55 | compile 56 | {| 57 | Nat = (A : Type) -> (z : A) -> 58 | (s : (x : A) -> A) -> (k : (x : A) -> A) -> A; 59 | (zero : Nat) = A => z => s => k => k z; 60 | (succ : (n : Nat) -> Nat) = 61 | n => A => z => s => k => n A z s (x => k (s x)); 62 | (add : (n : Nat) -> (m : Nat) -> Nat) = 63 | n => m => n Nat m succ (x => x); 64 | (mul : (n : Nat) -> (m : Nat) -> Nat) = 65 | n => m => n Nat zero (add m) (x => x); 66 | one = succ zero; 67 | two = succ one; 68 | four = mul two two; 69 | eight = mul two four; 70 | sixteen = mul two eight; 71 | byte = mul sixteen sixteen; 72 | short = mul byte byte; 73 | short String "zero" (_ => @native("debug")("hello")) (x => x) 74 | |} *) 75 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "flake-utils": { 4 | "inputs": { 5 | "systems": "systems" 6 | }, 7 | "locked": { 8 | "lastModified": 1731533236, 9 | "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", 10 | "owner": "numtide", 11 | "repo": "flake-utils", 12 | "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", 13 | "type": "github" 14 | }, 15 | "original": { 16 | "owner": "numtide", 17 | "repo": "flake-utils", 18 | "type": "github" 19 | } 20 | }, 21 | "nix-filter": { 22 | "locked": { 23 | "lastModified": 1731533336, 24 | "narHash": "sha256-oRam5PS1vcrr5UPgALW0eo1m/5/pls27Z/pabHNy2Ms=", 25 | "owner": "numtide", 26 | "repo": "nix-filter", 27 | "rev": "f7653272fd234696ae94229839a99b73c9ab7de0", 28 | "type": "github" 29 | }, 30 | "original": { 31 | "owner": "numtide", 32 | "repo": "nix-filter", 33 | "type": "github" 34 | } 35 | }, 36 | "nixpkgs": { 37 | "inputs": { 38 | "nixpkgs": "nixpkgs_2" 39 | }, 40 | "locked": { 41 | "lastModified": 1732830411, 42 | "narHash": "sha256-ZQwGw5DsRLg6fRwbW9YsEI1IMEYMNqUl0yRA9uuLbHg=", 43 | "owner": "anmonteiro", 44 | "repo": "nix-overlays", 45 | "rev": "d4c5d82ae569f35a77160272475b18be0936fe14", 46 | "type": "github" 47 | }, 48 | "original": { 49 | "owner": "anmonteiro", 50 | "repo": "nix-overlays", 51 | "type": "github" 52 | } 53 | }, 54 | "nixpkgs_2": { 55 | "locked": { 56 | "lastModified": 1732780316, 57 | "narHash": "sha256-NskLIz0ue4Uqbza+1+8UGHuPVr8DrUiLfZu5VS4VQxw=", 58 | "owner": "NixOS", 59 | "repo": "nixpkgs", 60 | "rev": "226216574ada4c3ecefcbbec41f39ce4655f78ef", 61 | "type": "github" 62 | }, 63 | "original": { 64 | "owner": "NixOS", 65 | "repo": "nixpkgs", 66 | "rev": "226216574ada4c3ecefcbbec41f39ce4655f78ef", 67 | "type": "github" 68 | } 69 | }, 70 | "root": { 71 | "inputs": { 72 | "flake-utils": "flake-utils", 73 | "nix-filter": "nix-filter", 74 | "nixpkgs": "nixpkgs" 75 | } 76 | }, 77 | "systems": { 78 | "locked": { 79 | "lastModified": 1681028828, 80 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 81 | "owner": "nix-systems", 82 | "repo": "default", 83 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 84 | "type": "github" 85 | }, 86 | "original": { 87 | "owner": "nix-systems", 88 | "repo": "default", 89 | "type": "github" 90 | } 91 | } 92 | }, 93 | "root": "root", 94 | "version": 7 95 | } 96 | -------------------------------------------------------------------------------- /jsend/jprinter.ml: -------------------------------------------------------------------------------- 1 | open Jtree 2 | open Format 3 | 4 | (* TODO: identation *) 5 | let pp_block_syntax ~pp_wrapped_expression fmt block = 6 | let (JBlock { consts; return }) = block in 7 | List.iter 8 | (fun (var, value) -> 9 | fprintf fmt "const %a = %a;" Var.pp var pp_wrapped_expression value) 10 | consts; 11 | fprintf fmt "return %a;" pp_wrapped_expression return 12 | 13 | let rec pp_expression_syntax ~pp_wrapped ~pp_call ~pp_atom ~pp_block fmt 14 | expression = 15 | let pp_expression_syntax fmt expression = 16 | pp_expression_syntax ~pp_wrapped ~pp_call ~pp_atom ~pp_block fmt expression 17 | in 18 | match expression with 19 | | JE_loc { expression; loc = _ } -> pp_expression_syntax fmt expression 20 | | JE_var { var } -> Var.pp fmt var 21 | | JE_generator { params; block } -> 22 | (* TODO: names on functions? *) 23 | let rec pp_params fmt params = 24 | match params with 25 | | [] -> () 26 | | [ param ] -> fprintf fmt "%a" Var.pp param 27 | | param :: params -> fprintf fmt "%a, %a" Var.pp param pp_params params 28 | in 29 | fprintf fmt "function* (%a) { %a }" pp_params params pp_block block 30 | (* TODO: new precedence is the same as call? *) 31 | | JE_new { constructor } -> fprintf fmt "new %a" pp_call constructor 32 | | JE_call { lambda; args } -> 33 | (* TODO: almost duplicated from params *) 34 | let rec pp_args fmt args = 35 | match args with 36 | | [] -> () 37 | | [ arg ] -> fprintf fmt "%a" pp_wrapped arg 38 | | arg :: args -> fprintf fmt "%a, %a" pp_wrapped arg pp_args args 39 | in 40 | fprintf fmt "%a(%a)" pp_call lambda pp_args args 41 | | JE_yield { expression } -> fprintf fmt "yield %a" pp_call expression 42 | | JE_string { literal } -> 43 | (* TODO: proper JS escaping *) 44 | fprintf fmt "%S" literal 45 | 46 | type prec = Wrapped | Call | Atom 47 | 48 | let rec pp_expression prec fmt expression = 49 | let pp_wrapped fmt term = pp_expression Wrapped fmt term in 50 | let pp_call fmt term = pp_expression Call fmt term in 51 | let pp_atom fmt term = pp_expression Atom fmt term in 52 | let pp_block fmt block = 53 | pp_block_syntax ~pp_wrapped_expression:pp_wrapped fmt block 54 | in 55 | match (expression, prec) with 56 | | JE_loc { expression; loc = _ }, prec -> pp_expression prec fmt expression 57 | | (JE_var _ | JE_string _), (Wrapped | Call | Atom) 58 | | (JE_new _ | JE_call _), (Wrapped | Call) 59 | | (JE_generator _ | JE_yield _), Wrapped -> 60 | pp_expression_syntax ~pp_wrapped ~pp_call ~pp_atom ~pp_block fmt 61 | expression 62 | | (JE_new _ | JE_call _), Atom | (JE_generator _ | JE_yield _), (Call | Atom) 63 | -> 64 | fprintf fmt "(%a)" pp_wrapped expression 65 | 66 | let pp_expression fmt expression = pp_expression Wrapped fmt expression 67 | -------------------------------------------------------------------------------- /teikalsp/teikalsp.ml: -------------------------------------------------------------------------------- 1 | open Lsp_error 2 | 3 | let on_request (type response) context _channel 4 | (request : response Lsp.Client_request.t) : response = 5 | let open Lsp_request in 6 | let open Lsp.Client_request in 7 | (* TODO: use channel? *) 8 | match request with 9 | | Initialize params -> Server_life_cycle.initialize context ~params 10 | | _request -> 11 | (* TODO: print which requests are not supported *) 12 | fail Error_unsupported_request 13 | 14 | let on_request_error _context _channel error = 15 | let open Jsonrpc.Response.Error in 16 | (* TODO: maybe error should show to user? *) 17 | (* TODO: better errors *) 18 | let message = 19 | match error with 20 | | Lsp_error { error } -> Lsp_error.show error 21 | | error -> Printexc.to_string error 22 | in 23 | Jsonrpc.Response.Error.make ~code:Code.InternalError ~message () 24 | 25 | let on_notification context _channel notification = 26 | let open Lsp_notification in 27 | let open Lsp.Client_notification in 28 | (* TODO: use channel? *) 29 | match notification with 30 | | Initialized -> Server_life_cycle.initialized context 31 | | TextDocumentDidOpen params -> Text_document_sync.did_open context ~params 32 | | TextDocumentDidChange params -> 33 | Text_document_sync.did_change context ~params 34 | | TextDocumentDidClose params -> Text_document_sync.did_close context ~params 35 | | _notification -> 36 | (* TODO: print which notifications are not supported *) 37 | fail Error_unsupported_notification 38 | 39 | let on_notification context channel notification = 40 | (* TODO: notification error handling *) 41 | match Lsp_context.status context with 42 | | Handshake -> 43 | (* TODO: log *) 44 | (* TODO: server can send some notifications during handshake *) 45 | fail Error_notification_before_initialize 46 | | Running -> on_notification context channel notification 47 | 48 | let on_notification_error _context channel error = 49 | let open Lsp.Types in 50 | let open Lsp.Server_notification in 51 | let message = 52 | match error with 53 | | Lsp_error { error } -> Lsp_error.show error 54 | | error -> Printexc.to_string error 55 | in 56 | (* TODO: maybe error should show to user? *) 57 | let message = LogMessageParams.create ~type_:Error ~message in 58 | Lsp_channel.notify channel @@ LogMessage message 59 | 60 | let main () = 61 | Eio_main.run @@ fun env -> 62 | let context = Lsp_context.create () in 63 | let on_request channel request = 64 | try Ok (on_request context channel request) 65 | with error -> Error (on_request_error context channel error) 66 | in 67 | let on_notification channel notification = 68 | try on_notification context channel notification 69 | with error -> on_notification_error context channel error 70 | in 71 | Lsp_channel.listen ~input:env#stdin ~output:env#stdout 72 | ~on_request:{ f = on_request } ~on_notification 73 | 74 | let () = main () 75 | -------------------------------------------------------------------------------- /jsend/emit.ml: -------------------------------------------------------------------------------- 1 | open Utree 2 | open Jtree 3 | 4 | let emit_curry function_ = 5 | JE_call { lambda = JE_var { var = Var.curry }; args = [ function_ ] } 6 | 7 | let rec emit_term : Utree.term -> expression = 8 | fun term -> 9 | match term with 10 | (* TODO: sourcemap *) 11 | | UT_loc { term; loc = _ } -> emit_term term 12 | | UT_var { var } -> JE_var { var } 13 | | UT_lambda _ -> 14 | (* TODO: weird to ignore UT_lambda like this *) 15 | emit_curry @@ emit_generator ~params:[] term 16 | | UT_apply _ -> 17 | (* TODO: weird to ignore UT_apply like this *) 18 | let call = emit_call ~args:[] term in 19 | (* TODO: test optimization, if instanceof before yield *) 20 | JE_yield { expression = call } 21 | | UT_let _ -> 22 | (* TODO: weird to ignore UT_let like this *) 23 | let block = emit_block ~consts:[] term in 24 | let wrapper = JE_generator { params = []; block } in 25 | let call = JE_call { lambda = wrapper; args = [] } in 26 | JE_yield { expression = call } 27 | | UT_string { literal } -> JE_string { literal } 28 | | UT_external { external_ } -> translate_external external_ 29 | 30 | and emit_generator ~params return = 31 | (* TODO: is this transformation desired? 32 | Does it changes performance behaviour *) 33 | (* TODO: too many params *) 34 | match return with 35 | | UT_loc { term = return; loc = _ } -> emit_generator ~params return 36 | | UT_lambda { param; return } -> 37 | let params = param :: params in 38 | emit_generator ~params return 39 | | UT_var _ | UT_apply _ | UT_let _ | UT_string _ | UT_external _ -> 40 | let params = List.rev params in 41 | let block = emit_block ~consts:[] return in 42 | JE_generator { params; block } 43 | 44 | and emit_call ~args lambda = 45 | (* TODO: too many args? *) 46 | match lambda with 47 | | UT_loc { term = lambda; loc = _ } -> emit_call ~args lambda 48 | | UT_apply { lambda; arg } -> 49 | let arg = emit_term arg in 50 | let args = arg :: args in 51 | emit_call ~args lambda 52 | | UT_var _ | UT_lambda _ | UT_let _ | UT_string _ | UT_external _ -> 53 | let lambda = emit_term lambda in 54 | JE_call { lambda; args } 55 | 56 | and emit_block ~consts return = 57 | match return with 58 | | UT_loc { term = return; loc = _ } -> emit_block ~consts return 59 | | UT_let { var; value; return } -> 60 | let value = emit_term value in 61 | let consts = (var, value) :: consts in 62 | emit_block ~consts return 63 | | UT_apply _ -> 64 | (* tco *) 65 | let return = 66 | let return = emit_call ~args:[] return in 67 | let constructor = 68 | JE_call { lambda = JE_var { var = Var.jmp }; args = [ return ] } 69 | in 70 | JE_new { constructor } 71 | in 72 | let consts = List.rev consts in 73 | JBlock { consts; return } 74 | | UT_var _ | UT_lambda _ | UT_string _ | UT_external _ -> 75 | let return = emit_term return in 76 | let consts = List.rev consts in 77 | JBlock { consts; return } 78 | 79 | and translate_external : external_ -> expression = 80 | fun external_ -> 81 | let var = 82 | match external_ with 83 | | UE_type -> Var.type_ 84 | | UE_fix -> Var.fix 85 | | UE_unit -> Var.unit 86 | | UE_debug -> Var.debug 87 | in 88 | JE_var { var } 89 | -------------------------------------------------------------------------------- /syntax/clexer.ml: -------------------------------------------------------------------------------- 1 | open Cparser 2 | open Sedlexing.Utf8 3 | 4 | exception Lexer_error of { loc : Location.t } 5 | exception Parser_error of { loc : Location.t } 6 | 7 | let () = 8 | Printexc.register_printer @@ function 9 | | Lexer_error { loc = _ } -> Some "lexer: syntax error" 10 | | Parser_error { loc = _ } -> Some "parser: syntax error" 11 | | _ -> None 12 | 13 | let loc buf = 14 | let loc_start, loc_end = Sedlexing.lexing_positions buf in 15 | Location.{ loc_start; loc_end; loc_ghost = false } 16 | 17 | let whitespace = [%sedlex.regexp? Plus (' ' | '\t' | '\n')] 18 | let alphabet = [%sedlex.regexp? 'a' .. 'z' | 'A' .. 'Z'] 19 | let digit = [%sedlex.regexp? '0' .. '9'] 20 | let variable = [%sedlex.regexp? (alphabet | '_'), Star (alphabet | digit | '_')] 21 | let extension = [%sedlex.regexp? '%', variable] 22 | let string = [%sedlex.regexp? '"', Star (Sub (any, '"')), '"'] 23 | let number = [%sedlex.regexp? Plus '0' .. '9'] 24 | 25 | let rec tokenizer buf = 26 | match%sedlex buf with 27 | | whitespace -> tokenizer buf 28 | | variable -> VAR (lexeme buf) 29 | | extension -> EXTENSION (lexeme buf) 30 | | ":" -> COLON 31 | | "->" -> ARROW 32 | | "=>" -> FAT_ARROW 33 | | "=" -> EQUAL 34 | | "," -> COMMA 35 | | "&" -> AMPERSAND 36 | | ";" -> SEMICOLON 37 | | string -> 38 | (* TODO: this should probably be somewhere else *) 39 | let literal = lexeme buf in 40 | (* TODO: this is dangerous *) 41 | let literal = String.sub literal 1 (String.length literal - 2) in 42 | STRING literal 43 | | number -> 44 | (* TODO: this should probably be somewhere else *) 45 | let literal = lexeme buf in 46 | (* TODO: this is dangerous *) 47 | let literal = Z.of_string literal in 48 | NUMBER literal 49 | | "(" -> LEFT_PARENS 50 | | ")" -> RIGHT_PARENS 51 | | "{" -> LEFT_BRACE 52 | | "}" -> RIGHT_BRACE 53 | | eof -> EOF 54 | | _ -> 55 | let loc = loc buf in 56 | raise @@ Lexer_error { loc } 57 | 58 | let next buf = 59 | let token = tokenizer buf in 60 | let start, end_ = Sedlexing.lexing_positions buf in 61 | (token, start, end_) 62 | 63 | open Cparser.MenhirInterpreter 64 | 65 | let rec loop buf state = 66 | match state with 67 | | InputNeeded _env -> 68 | (* The parser needs a token. Request one from the lexer, 69 | and offer it to the parser, which will produce a new 70 | checkpoint. Then, repeat. *) 71 | let token, start, end_ = next buf in 72 | let state = offer state (token, start, end_) in 73 | loop buf state 74 | | Shifting _ | AboutToReduce _ -> 75 | let state = resume state in 76 | loop buf state 77 | | HandlingError _env -> 78 | let loc = loc buf in 79 | raise (Parser_error { loc }) 80 | | Accepted value -> value 81 | | Rejected -> failwith "cdriver.loop: rejected reached" 82 | 83 | let buf_from_string string = 84 | (* TODO: from string seems to not trigger new line, likely a bug in sedlex *) 85 | let index = ref 0 in 86 | let length = String.length string in 87 | from_gen (fun () -> 88 | match !index < length with 89 | | true -> 90 | let char = String.get string !index in 91 | incr index; 92 | Some char 93 | | false -> None) 94 | 95 | let term_opt_from_string string = 96 | let buf = buf_from_string string in 97 | (* TODO: allow to change this *) 98 | let start, _end = Sedlexing.lexing_positions buf in 99 | loop buf @@ Cparser.Incremental.term_opt start 100 | -------------------------------------------------------------------------------- /jsend/untype.ml: -------------------------------------------------------------------------------- 1 | open Syntax 2 | open Teika 3 | open Ttree 4 | open Utree 5 | 6 | exception Term_subst_found 7 | exception Term_shift_found 8 | exception Invalid_variable 9 | 10 | let type_term : term = UT_external { external_ = UE_type } 11 | (* let fix_term : term = UT_external { external_ = UE_fix } 12 | let unit_term : term = UT_external { external_ = UE_unit } 13 | let debug_term : term = UT_external { external_ = UE_debug } *) 14 | 15 | let next_level ~vars = 16 | let current_level = 17 | match Level.Map.max_binding_opt vars with 18 | | Some (current_level, _) -> current_level 19 | | None -> 20 | (* TODO: this is weird *) 21 | level_type_string 22 | in 23 | Level.next current_level 24 | 25 | module Context : sig 26 | type 'a context 27 | 28 | val run : (unit -> 'a context) -> 'a 29 | val return : 'a -> 'a context 30 | val ( let* ) : 'a context -> ('a -> 'b context) -> 'b context 31 | val ( let+ ) : 'a context -> ('a -> 'b) -> 'b context 32 | val with_var : Name.t -> (Var.t -> 'k context) -> 'k context 33 | val lookup : Level.t -> Var.t context 34 | end = struct 35 | type 'a context = vars:Var.t Level.Map.t -> 'a 36 | 37 | let run context = 38 | let vars = 39 | let open Level.Map in 40 | let vars = empty in 41 | (* TODO: string is also a $type *) 42 | let vars = add level_type_univ Var.type_ vars in 43 | let vars = add level_type_string Var.type_ vars in 44 | vars 45 | in 46 | context () ~vars 47 | 48 | let return x ~vars:_ = x 49 | let ( let* ) context k ~vars = k (context ~vars) ~vars 50 | let ( let+ ) context k ~vars = k (context ~vars) 51 | 52 | let with_var name k ~vars = 53 | let var = Var.create name in 54 | let level = next_level ~vars in 55 | let vars = Level.Map.add level var vars in 56 | k var ~vars 57 | 58 | let lookup level ~vars = 59 | match Level.Map.find_opt level vars with 60 | | Some var -> var 61 | | None -> raise Invalid_variable 62 | end 63 | 64 | open Context 65 | 66 | let rec untype_term term = 67 | match term with 68 | | TT_with_type { term; type_ = _ } -> untype_term term 69 | | TT_with_sort { term } -> 70 | (* TODO: should also not be reachable? *) 71 | untype_term term 72 | | TT_subst _ -> raise Term_subst_found 73 | | TT_shift _ -> raise Term_shift_found 74 | | TT_var { var } -> 75 | let+ var = lookup var in 76 | UT_var { var } 77 | | TT_forall _ -> return type_term 78 | | TT_lambda { param; return } -> 79 | let+ param, return = 80 | erase_pat param @@ fun var -> 81 | let+ return = untype_term return in 82 | (var, return) 83 | in 84 | UT_lambda { param; return } 85 | | TT_apply { lambda; arg } -> 86 | let* lambda = untype_term lambda in 87 | let+ arg = untype_term arg in 88 | UT_apply { lambda; arg } 89 | | TT_let { bound; value; return } -> 90 | (* TODO: param first *) 91 | let* value = untype_term value in 92 | let+ var, return = 93 | erase_pat bound @@ fun var -> 94 | let+ return = untype_term return in 95 | (var, return) 96 | in 97 | UT_let { var; value; return } 98 | | TT_annot { term; annot = _ } -> untype_term term 99 | | TT_string { literal } -> return @@ UT_string { literal } 100 | 101 | and erase_pat pat k = 102 | match pat with 103 | | TP_with_type { pat; type_ = _ } -> erase_pat pat k 104 | | TP_annot { pat; annot = _ } -> erase_pat pat k 105 | | TP_var { name } -> with_var name k 106 | 107 | let untype_term term = Context.run @@ fun () -> untype_term term 108 | -------------------------------------------------------------------------------- /syntax/cparser.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | open Utils 3 | open Ctree 4 | 5 | let mk (loc_start, loc_end) = 6 | Location.{ loc_start; loc_end; loc_ghost = false } 7 | 8 | %} 9 | %token VAR (* x *) 10 | %token COLON (* : *) 11 | %token ARROW (* -> *) 12 | %token FAT_ARROW (* => *) 13 | %token EQUAL (* = *) 14 | %token COMMA (* , *) 15 | %token AMPERSAND (* & *) 16 | %token SEMICOLON (* ; *) 17 | %token STRING (* "abc" *) 18 | %token NUMBER (* 123 *) 19 | %token LEFT_PARENS (* ( *) 20 | %token RIGHT_PARENS (* ) *) 21 | %token LEFT_BRACE (* { *) 22 | %token RIGHT_BRACE (* } *) 23 | %token EXTENSION (* %x *) 24 | 25 | %token EOF 26 | 27 | %start term_opt 28 | %% 29 | 30 | let term_opt := 31 | | EOF; 32 | { None } 33 | | term = term; EOF; 34 | { Some term } 35 | 36 | let term := term_rec_pair 37 | 38 | let term_rec_pair := 39 | | term_semi_or_annot 40 | | term_pair(term_rec_pair, term_semi_or_annot) 41 | 42 | let term_semi_or_annot := 43 | | term_rec_annot 44 | | term_semi(term_rec_semi, term_rec_semi_bind) 45 | 46 | let term_rec_semi := 47 | | term_rec_funct 48 | | term_semi(term_rec_semi, term_rec_semi_bind) 49 | 50 | let term_rec_semi_bind := 51 | | term_rec_semi_annot 52 | | term_bind(term_rec_semi, term_rec_semi_annot) 53 | 54 | let term_rec_semi_annot := 55 | | term_rec_funct 56 | | term_annot(term_rec_semi_annot, term_rec_funct) 57 | 58 | let term_rec_annot := 59 | | term_rec_funct 60 | | term_annot(term_rec_annot, term_rec_funct) 61 | 62 | let term_rec_funct := 63 | | term_rec_apply 64 | | term_forall(term_rec_funct, term_rec_apply) 65 | | term_lambda(term_rec_funct, term_rec_apply) 66 | | term_both(term_rec_funct, term_rec_apply) 67 | 68 | let term_rec_apply := 69 | | term_atom 70 | | term_apply(term_rec_apply, term_atom) 71 | 72 | let term_atom := 73 | | term_var 74 | | term_extension 75 | | term_string 76 | | term_number 77 | | term_parens(term) 78 | | term_braces(term) 79 | 80 | let term_forall(self, lower) == 81 | | param = lower; ARROW; body = self; 82 | { ct_forall (mk $loc) ~param ~body } 83 | let term_lambda(self, lower) == 84 | | param = lower; FAT_ARROW; body = self; 85 | { ct_lambda (mk $loc) ~param ~body } 86 | let term_apply(self, lower) == 87 | | funct = self; arg = lower; 88 | { ct_apply (mk $loc) ~funct ~arg } 89 | let term_pair(self, lower) == 90 | | left = lower; COMMA; right = self; 91 | { ct_pair (mk $loc) ~left ~right } 92 | let term_both(self, lower) == 93 | | left = lower; AMPERSAND; right = self; 94 | { ct_both (mk $loc) ~left ~right } 95 | let term_bind(self, lower) == 96 | | bound = lower; EQUAL; value = lower; 97 | { ct_bind (mk $loc) ~bound ~value } 98 | let term_semi(self, lower) == 99 | | left = lower; SEMICOLON; right = self; 100 | { ct_semi (mk $loc) ~left ~right } 101 | let term_annot(self, lower) == 102 | | value = lower; COLON; annot = self; 103 | { ct_annot (mk $loc) ~value ~annot } 104 | let term_var == 105 | | var = VAR; 106 | { ct_var (mk $loc) ~var:(Name.make var) } 107 | let term_extension == 108 | | extension = EXTENSION; 109 | { ct_extension (mk $loc) ~extension:(Name.make extension) } 110 | let term_string == 111 | | literal = STRING; 112 | { ct_string (mk $loc) ~literal } 113 | let term_number == 114 | | literal = NUMBER; 115 | { ct_number (mk $loc) ~literal } 116 | let term_parens(content) == 117 | | LEFT_PARENS; content = content; RIGHT_PARENS; 118 | { ct_parens (mk $loc) ~content } 119 | let term_braces(content) == 120 | | LEFT_BRACE; content = content; RIGHT_BRACE; 121 | { ct_braces (mk $loc) ~content } 122 | -------------------------------------------------------------------------------- /design/SYNTAX.md: -------------------------------------------------------------------------------- 1 | # Syntax 2 | 3 | This should document the syntax of the project, they and the tradeoff's. 4 | 5 | ## Requirements 6 | 7 | 8 | 9 | The syntax needs to be able to describe four class of terms, modules, expressions, types and patterns. 10 | 11 | The syntax should also be consistent and succint, this requires the syntax meaning to be context dependent. 12 | 13 | The syntax should be simple so that it can easily be user manipulated, allowing tools like macros and ppx to be trivially implemented. 14 | 15 | ## Unified Representation 16 | 17 | To avoid having too much notation, an unified representation was choosen, this means that all classes of terms have an identical syntax. 18 | 19 | This is achieved by accepting a lot more code during parsing and rejecting this code later. 20 | 21 | Another problem is the need for parens in many situations such as typing a simple binding. This is avoided by allowing a couple places to omit parens in an ad-hoc manner. 22 | 23 | Pros: 24 | 25 | - AST is really small, making macros easier 26 | - Syntax error messages are much easier 27 | - Error recovery is way easier 28 | - Flexible, ppx can use of the invalid syntax 29 | 30 | Cons: 31 | 32 | - invalid code may be parsed 33 | - not clear where parens are needed 34 | - no syntatical indication of current context 35 | 36 | ```rust 37 | Syntax = 38 | | Identifier // variable 39 | | Number // number 40 | | Syntax -> Syntax // arrow 41 | | Syntax => Syntax // lambda 42 | | Syntax Syntax // apply 43 | | Syntax = Syntax; 44 | | Syntax = Syntax; Syntax // binding 45 | | Syntax : Syntax; 46 | | Syntax : Syntax; Syntax // binding signature 47 | | Syntax Syntax[] = Syntax; 48 | | Syntax Syntax[] = Syntax; Syntax // binding + lambda 49 | | { Syntax } // structure 50 | | Syntax : Syntax // constraint 51 | | Syntax.Syntax // field 52 | | (Syntax) // parens 53 | ``` 54 | 55 | ## Arrow and Lambda 56 | 57 | The entire language can be described using lambda only, in fact arrow can only describe a subset of the language. 58 | 59 | But adding arrow allows easier inference and a simpler syntax for describing common types. 60 | 61 | ## Implicit Argument 62 | 63 | Initially `A. Syntax` was the thought way to do implicit parameters, but this lead to a couple weird syntax properties, such `A.Eq` and `A. Eq` being two different things. 64 | 65 | Also that completely prevented binding + function syntax `id x = x`, which may be desirable in the future. 66 | 67 | So for the syntax argument it is currently using `{A: M}` which was an already supported syntax. 68 | 69 | But this makes it ambiguous with record destructuring, currently this means that destructuring on a record with a single fields the last semicolon cannot be omitted `{A} -> A` means `forall a. a` but `{A;} -> A` means `{ A: _ } -> A`. 70 | 71 | - https://agda.readthedocs.io/en/v2.6.1/language/implicit-arguments.html 72 | 73 | Another option for implicit arguments is using syntax from optional parameters, `?A -> A`. 74 | 75 | ## Binding Lambda 76 | 77 | A common feature in languages such OCaml and Haskell is to support a fusion syntax for lambdas and binding, in the form of `add a b = a + b`. 78 | 79 | The advantages of this is that it's way more succinct for most functions and it's a common feature in other programming languages. 80 | 81 | The disadvantage is that it's not straightforward to explain to users that `f = x -> x` is the same as `f x = x`, it also doesn't work with binding + constraint such as `x: Int -> Int = x -> x`. 82 | 83 | This was decided to be a reasonable choice due to type constructors. And as constraints + lambda should not be common. 84 | 85 | ```rust 86 | Packet {A} = { 87 | id: Nat; 88 | data: A; 89 | }; 90 | ``` 91 | -------------------------------------------------------------------------------- /teikavsc/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "teikavsc", 3 | "displayName": "Teika", 4 | "description": "Teika language extension for VSCode", 5 | "license": "MIT", 6 | "version": "0.0.1", 7 | "publisher": "teikalang", 8 | "repository": { 9 | "type": "git", 10 | "url": "https://github.com/teikalang/teika" 11 | }, 12 | "bugs": { 13 | "url": "https://github.com/teikalang/teika/issues" 14 | }, 15 | "homepage": "my home page", 16 | "main": "./dist/main.js", 17 | "engines": { 18 | "vscode": "^1.64.0" 19 | }, 20 | "categories": [ 21 | "Programming Languages" 22 | ], 23 | "activationEvents": [ 24 | "workspaceContains:**/*.te", 25 | "workspaceContains:**/*.tei" 26 | ], 27 | "_icon": "assets/logo.png", 28 | "contributes": { 29 | "viewsWelcome": [], 30 | "viewsContainers": {}, 31 | "views": {}, 32 | "commands": [ 33 | { 34 | "command": "teika.server.restart", 35 | "category": "Teika", 36 | "title": "Restart Language Server" 37 | } 38 | ], 39 | "keybindings": [], 40 | "menus": { 41 | "editor/context": [], 42 | "commandPalette": [], 43 | "editor/title": [], 44 | "view/title": [], 45 | "view/item/context": [] 46 | }, 47 | "configuration": { 48 | "title": "Teika", 49 | "properties": { 50 | "teika.server.path": { 51 | "type": "string", 52 | "default": null, 53 | "description": "teikalsp path" 54 | }, 55 | "teika.trace.server": { 56 | "scope": "window", 57 | "type": "string", 58 | "enum": [ 59 | "off", 60 | "messages", 61 | "verbose" 62 | ], 63 | "default": "off", 64 | "description": "Traces the communication between VS Code and the language server." 65 | } 66 | } 67 | }, 68 | "configurationDefaults": { 69 | "[teika]": { 70 | "editor.tabSize": 2 71 | } 72 | }, 73 | "problemMatchers": [], 74 | "taskDefinitions": [], 75 | "languages": [ 76 | { 77 | "id": "teika", 78 | "aliases": [ 79 | "Teika", 80 | "teika" 81 | ], 82 | "extensions": [ 83 | ".te", 84 | ".tei" 85 | ], 86 | "configuration": "./teika.language.json" 87 | } 88 | ], 89 | "grammars": [ 90 | { 91 | "language": "teika", 92 | "scopeName": "source.teika", 93 | "path": "./teika.syntax.json" 94 | }, 95 | { 96 | "scopeName": "markdown.teika.codeblock", 97 | "path": "./teika.markdown.codeblock.json", 98 | "injectTo": [ 99 | "text.html.markdown" 100 | ], 101 | "embeddedLanguages": { 102 | "meta.embedded.block.teika": "teika" 103 | } 104 | } 105 | ], 106 | "snippets": [ 107 | { 108 | "language": "teika", 109 | "path": "./teika.snippets.json" 110 | } 111 | ], 112 | "jsonValidation": [], 113 | "customEditors": [] 114 | }, 115 | "scripts": { 116 | "package": "vsce package --out vscode-teika.vsix --yarn", 117 | "deploy:vsce": "vsce publish --packagePath vscode-teika.vsix --yarn", 118 | "fmt:check": "prettier . --check", 119 | "fmt": "prettier . --write" 120 | }, 121 | "dependencies": { 122 | "vscode-languageclient": "*" 123 | }, 124 | "devDependencies": { 125 | "@types/vscode": "*", 126 | "@types/node": "*", 127 | "prettier": "*", 128 | "vsce": "*", 129 | "typescript": "*" 130 | }, 131 | "prettier": { 132 | "proseWrap": "always", 133 | "overrides": [] 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /design/LANGUAGE.md: -------------------------------------------------------------------------------- 1 | # Teika Language 2 | 3 | This document intends to describe the Teika Language. Which refers to all the features supported by Teika but not exactly how it will be presented to an user. 4 | 5 | ## Goals 6 | 7 | Those are the current goals in order of importance and they may change. 8 | 9 | 1. Soundness, type preservation 10 | 2. Convenience, inference and effects 11 | 3. Uniformity, calculus of construction 12 | 4. Logical consistency, predicativity 13 | 5. Decidability, subtyping 14 | 15 | ## Warning 16 | 17 | This document was written by me(@EduardoRFS) and I'm not proficient in type theory, so mistakes and changes are very likely to happen here. 18 | 19 | ## Smol 20 | 21 | This is the canonical language and should contain all the theoritical power on Teika, everything else should be describable in terms of the following features. 22 | 23 | ```rust 24 | // Type in Type 25 | (Type : Type); 26 | 27 | // variable 28 | (A); 29 | 30 | // forall 31 | ((A : Type) -> (x : A) -> A); 32 | // lambda 33 | ((A : Type) => (x : A) => x); 34 | // apply 35 | (id Nat 1); 36 | 37 | // exists 38 | (A : Type, A); 39 | // pair 40 | (A = Nat, 1 : A); 41 | // split 42 | ((A, x) = p; x); 43 | 44 | // equal 45 | Equal : (A : Type) -> (x : A) -> (y : B) -> Type; 46 | // refl 47 | Refl : (A : Type) -> (x : A) -> Equal A x x; 48 | // subst 49 | Subst : 50 | (A : Type) -> 51 | (x : A) -> 52 | (y : A) -> 53 | (x_eq_y : Equal A x y) -> 54 | (P : (x_or_y : A) -> Type) -> 55 | (p_x : P x) -> 56 | P y; 57 | ``` 58 | 59 | ### Forall, Lambda and Apply 60 | 61 | This is abstraction and can be seen as universal quantification. Those are functions, they're your work horse, the majority of your code will likely be about declaring and calling those. 62 | 63 | #### Forall 64 | 65 | The forall is the description of a function, it describes the type of the paramater and the type of the return, the return may depend on the type such as in polymorphic functions and dependent functions. Those are values. 66 | 67 | ```rust 68 | /* forall syntax */ 69 | ((A : T) -> A); 70 | 71 | /* rule */ 72 | A : Type B : Type 73 | ------------------ 74 | (x : A) -> B 75 | ``` 76 | 77 | #### Lambda 78 | 79 | The lambda is the way to introduce a function, the type of a lambda will always be a forall. The body may dependend on the parameter. Those are values. 80 | 81 | ```rust 82 | /* lambda syntax */ 83 | ((A : Type) => A); 84 | 85 | /* rule */ 86 | b : B 87 | --------------------------- 88 | (x : A) => b : (x : A) -> B 89 | ``` 90 | 91 | #### Apply 92 | 93 | This is the source of computing, the left side is anything of an arrow type and the right side must have the same type of the paramater expected by the lambda. 94 | 95 | ```rust 96 | /* apply syntax */ 97 | (lambda argument); 98 | 99 | /* rule */ 100 | l : (x : A) -> B a : A 101 | ----------------------- 102 | l a : B 103 | ``` 104 | 105 | ### Exists, Pair and Split 106 | 107 | This is allows pairing and can be seen as existential quantification. Those are pairs, they're the fundamental way to pack data together, all of your modules can be represented using those. 108 | 109 | #### Exists 110 | 111 | This is the description of pairs, it describes the type of the left value and the type of the right value, the type of the right value may depend on the left value. Those are values. 112 | 113 | ```rust 114 | /* exists syntax */ 115 | (A : Type, A); 116 | 117 | /* rule */ 118 | A : Type B : Type 119 | ------------------ 120 | (x : A, B) 121 | ``` 122 | 123 | #### Pair 124 | 125 | This is how you introduce a pair, the type of a pair will always be an exists. The type of the right side may depend on the left side, but the value itself cannot depended on the left side. Those are values. 126 | 127 | ```rust 128 | /* pair syntax */ 129 | (A = Nat, 1 : A) 130 | 131 | /* rule */ 132 | l : A R : B 133 | --------------------------- 134 | (x = l, r : B) : (x : A, B) 135 | ``` 136 | 137 | #### Split 138 | 139 | This is how you destruct a pair, it is like a let, but it can extract pairs. The body may depend on the pair values. The type of the body may depend on the pair values. 140 | 141 | ```rust 142 | /* split syntax */ 143 | ((A, one) = p; A); 144 | 145 | /* rule */ 146 | p : (x : A, B) b : C 147 | --------------------- 148 | ((x, y) = p; b) : C 149 | ``` 150 | 151 | ### Equal, Refl and Subst 152 | 153 | This is leibniz equality, it can be used as a way to do some form of dependent elimination, they hopefully only matters as a stepping stone for building libraries. 154 | 155 | #### Equal 156 | 157 | This is the type of an equality, it states that the first and second values are literally the same, allowing to replace one for the other. 158 | 159 | #### Refl 160 | 161 | This is the introduction of an equality, it is the only way to construct an equality and any equality is equivalent to this. 162 | 163 | #### Subst 164 | 165 | This is how you can eliminate an equality, it is enough to derive all the other transformations that are desirable for equalities 166 | 167 | ```rust 168 | /* sym */ 169 | Sym = 170 | (A : Type) => 171 | (B : Type) => 172 | (A_eq_B : Equal A B) => 173 | subst 174 | ((B : Type) => Equal B A) 175 | A 176 | B 177 | A_eq_B 178 | (Refl A) 179 | /* trans */ 180 | Trans = 181 | (A : Type) => 182 | (B : Type) => 183 | (C : Type) => 184 | (A_eq_B : Equal A B) => 185 | (B_eq_C : Equal B C) => 186 | subst 187 | ((C : Type) => Equal A C) 188 | B 189 | C 190 | B_eq_C 191 | A_eq_B; 192 | ``` 193 | -------------------------------------------------------------------------------- /teikalsp/lsp_channel.ml: -------------------------------------------------------------------------------- 1 | module Io : sig 2 | type 'a t 3 | 4 | val return : 'a -> 'a t 5 | val raise : exn -> 'a t 6 | val await : 'a t -> 'a 7 | val async : (sw:Eio.Switch.t -> ('a, exn) result) -> 'a t 8 | 9 | module O : sig 10 | val ( let+ ) : 'a t -> ('a -> 'b) -> 'b t 11 | val ( let* ) : 'a t -> ('a -> 'b t) -> 'b t 12 | end 13 | end = struct 14 | type 'a t = sw:Eio.Switch.t -> ('a, exn) result Eio.Promise.t 15 | 16 | let await t = Eio.Switch.run @@ fun sw -> Eio.Promise.await_exn (t ~sw) 17 | let return value ~sw:_ = Eio.Promise.create_resolved (Ok value) 18 | let error desc ~sw:_ = Eio.Promise.create_resolved (Error desc) 19 | 20 | let async f ~sw = 21 | let promise, resolver = Eio.Promise.create () in 22 | ( Eio.Fiber.fork ~sw @@ fun () -> 23 | try 24 | let result = f ~sw in 25 | Eio.Promise.resolve resolver result 26 | with exn -> Eio.Promise.resolve resolver @@ Error exn ); 27 | promise 28 | 29 | let bind t f = 30 | async @@ fun ~sw -> 31 | match Eio.Promise.await (t ~sw) with 32 | | Ok value -> Eio.Promise.await @@ f value ~sw 33 | | Error desc -> Error desc 34 | 35 | let raise = error 36 | 37 | module O = struct 38 | let ( let+ ) x f = bind x @@ fun value -> return @@ f value 39 | let ( let* ) = bind 40 | end 41 | end 42 | 43 | module Chan : sig 44 | type input 45 | type output 46 | 47 | (* eio *) 48 | val of_source : #Eio.Flow.source -> input 49 | val with_sink : #Eio.Flow.sink -> (output -> 'a) -> 'a 50 | 51 | (* lsp *) 52 | val read_line : input -> string option Io.t 53 | val read_exactly : input -> int -> string option Io.t 54 | val write : output -> string -> unit Io.t 55 | end = struct 56 | type input = Input of { mutex : Eio.Mutex.t; buf : Eio.Buf_read.t } 57 | type output = Output of { mutex : Eio.Mutex.t; buf : Eio.Buf_write.t } 58 | 59 | (* TODO: magic numbers *) 60 | let initial_size = 1024 61 | let max_size = 1024 * 1024 62 | 63 | let of_source source = 64 | let mutex = Eio.Mutex.create () in 65 | let buf = Eio.Buf_read.of_flow ~initial_size ~max_size source in 66 | Input { mutex; buf } 67 | 68 | let with_sink sink f = 69 | let mutex = Eio.Mutex.create () in 70 | Eio.Buf_write.with_flow ~initial_size sink @@ fun buf -> 71 | f @@ Output { mutex; buf } 72 | 73 | let read_line input = 74 | let (Input { mutex; buf }) = input in 75 | Io.async @@ fun ~sw:_ -> 76 | (* TODO: what this protect does? *) 77 | Eio.Mutex.use_rw ~protect:true mutex @@ fun () -> 78 | match Eio.Buf_read.eof_seen buf with 79 | | true -> Ok None 80 | | false -> Ok (Some (Eio.Buf_read.line buf)) 81 | 82 | let read_exactly input size = 83 | let (Input { mutex; buf }) = input in 84 | Io.async @@ fun ~sw:_ -> 85 | Eio.Mutex.use_rw ~protect:true mutex @@ fun () -> 86 | match Eio.Buf_read.eof_seen buf with 87 | | true -> Ok None 88 | | false -> Ok (Some (Eio.Buf_read.take size buf)) 89 | 90 | let write output str = 91 | let (Output { mutex; buf }) = output in 92 | Io.async @@ fun ~sw:_ -> 93 | Eio.Mutex.use_rw ~protect:true mutex @@ fun () -> 94 | Ok (Eio.Buf_write.string buf str) 95 | end 96 | 97 | module Lsp_io = Lsp.Io.Make (Io) (Chan) 98 | open Jsonrpc 99 | open Lsp_error 100 | 101 | (* TODO: is a mutex needed for write? *) 102 | type channel = Chan.output 103 | type t = channel 104 | 105 | let notify channel notification = 106 | (* TODO: fork here *) 107 | (* TODO: buffering and async? *) 108 | let notification = Lsp.Server_notification.to_jsonrpc notification in 109 | Io.await @@ Lsp_io.write channel @@ Notification notification 110 | 111 | let respond channel response = 112 | Io.await @@ Lsp_io.write channel @@ Response response 113 | 114 | let rec input_loop ~input ~output with_ = 115 | (* TODO: buffering and async handling *) 116 | match Io.await @@ Lsp_io.read input with 117 | | Some packet -> 118 | let () = with_ packet in 119 | input_loop ~input ~output with_ 120 | | exception exn -> (* TODO: handle this exception *) raise exn 121 | | None -> 122 | (* TODO: this means EOF right? *) 123 | () 124 | 125 | let request_of_jsonrpc request = 126 | match Lsp.Client_request.of_jsonrpc request with 127 | | Ok request -> request 128 | | Error error -> fail (Error_invalid_notification { error }) 129 | 130 | let notification_of_jsonrpc notification = 131 | match Lsp.Client_notification.of_jsonrpc notification with 132 | | Ok notification -> notification 133 | | Error error -> fail (Error_invalid_notification { error }) 134 | 135 | type on_request = { 136 | f : 137 | 'response. 138 | channel -> 139 | 'response Lsp.Client_request.t -> 140 | ('response, Response.Error.t) result; 141 | } 142 | 143 | let listen ~input ~output ~on_request ~on_notification = 144 | let on_request channel request = 145 | (* TODO: error handling *) 146 | let result = 147 | let (E request) = request_of_jsonrpc request in 148 | match on_request.f channel request with 149 | | Ok result -> Ok (Lsp.Client_request.yojson_of_result request result) 150 | | Error _error as error -> error 151 | in 152 | let response = Jsonrpc.Response.{ id = request.id; result } in 153 | respond channel response 154 | in 155 | let on_notification channel notification = 156 | let notification = notification_of_jsonrpc notification in 157 | on_notification channel notification 158 | in 159 | 160 | let input = Chan.of_source input in 161 | Chan.with_sink output @@ fun channel -> 162 | input_loop ~input ~output @@ fun packet -> 163 | (* TODO: make this async? *) 164 | match packet with 165 | | Notification notification -> on_notification channel notification 166 | | Request request -> on_request channel request 167 | | Batch_call calls -> 168 | (* TODO: what if one fails? It should not prevents the others *) 169 | List.iter 170 | (fun call -> 171 | match call with 172 | | `Request request -> on_request channel request 173 | | `Notification notification -> on_notification channel notification) 174 | calls 175 | (* TODO: can the server receive a response? 176 | Yes but right now it will not be supported *) 177 | | Response _ -> fail Error_response_unsupported 178 | | Batch_response _ -> fail Error_response_unsupported 179 | -------------------------------------------------------------------------------- /teika/tprinter.ml: -------------------------------------------------------------------------------- 1 | [@@@ocaml.warning "-unused-constructor"] 2 | 3 | module Ptree = struct 4 | open Format 5 | open Utils 6 | 7 | type term = 8 | (* TODO: use PT_meta for level, subst and shift *) 9 | | PT_meta of { term : term } 10 | | PT_annot of { term : term; annot : term } 11 | | PT_var of { var : Name.t } 12 | | PT_free_var of { var : Level.t } 13 | | PT_bound_var of { var : Index.t } 14 | | PT_hoist of { bound : term; body : term } 15 | | PT_let of { bound : term; arg : term; body : term } 16 | | PT_apply of { funct : term; arg : term } 17 | | PT_lambda of { param : term; body : term } 18 | | PT_forall of { param : term; body : term } 19 | | PT_inter of { left : term; right : term } 20 | | PT_string of { literal : string } 21 | 22 | type term_prec = T_wrapped | T_let | T_funct | T_apply | T_atom 23 | 24 | let pp_term_syntax ~pp_wrapped ~pp_let ~pp_funct ~pp_apply ~pp_atom fmt term = 25 | match term with 26 | | PT_meta { term } -> fprintf fmt "#%a" pp_atom term 27 | | PT_annot { term; annot } -> 28 | fprintf fmt "%a : %a" pp_funct term pp_wrapped annot 29 | | PT_var { var } -> fprintf fmt "%s" (Name.repr var) 30 | | PT_free_var { var } -> fprintf fmt "\\+%a" Level.pp var 31 | | PT_bound_var { var } -> fprintf fmt "\\-%a" Index.pp var 32 | | PT_hoist { bound; body } -> 33 | (* TODO: is pp_wrapped correct here? *) 34 | fprintf fmt "%a; %a" pp_wrapped bound pp_let body 35 | | PT_let { bound; arg; body } -> 36 | fprintf fmt "%a = %a; %a" pp_atom bound pp_funct arg pp_let body 37 | | PT_lambda { param; body } -> 38 | fprintf fmt "%a => %a" pp_atom param pp_funct body 39 | | PT_apply { funct; arg } -> fprintf fmt "%a %a" pp_apply funct pp_atom arg 40 | | PT_string { literal } -> 41 | (* TODO: proper escaping *) 42 | fprintf fmt "%S" literal 43 | | PT_forall { param; body } -> 44 | fprintf fmt "(%a) -> %a" pp_wrapped param pp_funct body 45 | | PT_inter { left; right } -> 46 | fprintf fmt "(%a) & %a" pp_wrapped left pp_funct right 47 | 48 | let rec pp_term prec fmt term = 49 | let pp_wrapped fmt term = pp_term T_wrapped fmt term in 50 | let pp_let fmt term = pp_term T_let fmt term in 51 | let pp_funct fmt term = pp_term T_funct fmt term in 52 | let pp_apply fmt term = pp_term T_apply fmt term in 53 | let pp_atom fmt term = pp_term T_atom fmt term in 54 | match (term, prec) with 55 | | ( (PT_meta _ | PT_var _ | PT_free_var _ | PT_bound_var _ | PT_string _), 56 | (T_wrapped | T_let | T_funct | T_apply | T_atom) ) 57 | | PT_apply _, (T_wrapped | T_let | T_funct | T_apply) 58 | | (PT_lambda _ | PT_forall _ | PT_inter _), (T_wrapped | T_let | T_funct) 59 | | (PT_hoist _ | PT_let _), (T_wrapped | T_let) 60 | | PT_annot _, T_wrapped -> 61 | pp_term_syntax ~pp_wrapped ~pp_let ~pp_funct ~pp_apply ~pp_atom fmt term 62 | | PT_apply _, T_atom 63 | | (PT_lambda _ | PT_forall _ | PT_inter _), (T_apply | T_atom) 64 | | (PT_hoist _ | PT_let _), (T_funct | T_apply | T_atom) 65 | | PT_annot _, (T_let | T_funct | T_apply | T_atom) -> 66 | fprintf fmt "(%a)" pp_wrapped term 67 | 68 | let pp_term fmt term = pp_term T_wrapped fmt term 69 | end 70 | 71 | open Ttree 72 | open Ptree 73 | 74 | let _pt_with_type ~type_ term = 75 | PT_meta { term = PT_annot { term; annot = type_ } } 76 | 77 | (* TODO: extract substitutions *) 78 | (* TODO: rename all tt_ to term_ *) 79 | let rec tt_print term = 80 | let (Term { struct_ = term; loc = _ }) = term in 81 | match term with 82 | | T_annot { term; annot } -> 83 | let term = tt_print term in 84 | let annot = tt_print annot in 85 | PT_annot { term; annot } 86 | | T_var { var } -> PT_bound_var { var } 87 | | T_let { bound; arg; body } -> 88 | let bound = tp_print bound in 89 | let arg = tt_print arg in 90 | let body = tt_print body in 91 | PT_let { bound; arg; body } 92 | | T_hoist { bound; body } -> 93 | let bound = vp_print bound in 94 | let body = tt_print body in 95 | PT_hoist { bound; body } 96 | | T_fix { bound; var = _; arg; body } -> 97 | (* TODO: proper var renaming *) 98 | let bound = vp_print bound in 99 | let arg = tt_print arg in 100 | let body = tt_print body in 101 | PT_let { bound; arg; body } 102 | | T_lambda { bound; body } -> 103 | let param = tp_print bound in 104 | let body = tt_print body in 105 | PT_lambda { param; body } 106 | | T_apply { funct; arg } -> 107 | let funct = tt_print funct in 108 | let arg = tt_print arg in 109 | PT_apply { funct; arg } 110 | | T_forall { bound; param; body } -> 111 | let param = 112 | let pat = tp_print bound in 113 | let annot = tt_print param in 114 | PT_annot { term = pat; annot } 115 | in 116 | let body = tt_print body in 117 | PT_forall { param; body } 118 | | T_self { bound; body } -> 119 | let left = vp_print bound in 120 | let right = tt_print body in 121 | (* TODO: self *) 122 | PT_inter { left; right } 123 | | T_tuple _ | T_exists _ -> failwith "not implemented" 124 | 125 | and vp_print pat = 126 | let (VPat { struct_ = pat; loc = _ }) = pat in 127 | match pat with 128 | | VP_annot { pat; annot } -> 129 | let pat = vp_print pat in 130 | let annot = tt_print annot in 131 | PT_annot { term = pat; annot } 132 | | VP_var { var } -> PT_var { var } 133 | 134 | and tp_print pat = 135 | let (Pat { struct_ = pat; loc = _ }) = pat in 136 | match pat with 137 | | P_annot { pat; annot } -> 138 | let pat = tp_print pat in 139 | let annot = tt_print annot in 140 | PT_annot { term = pat; annot } 141 | | P_var { var } -> PT_var { var } 142 | | P_tuple _ -> failwith "not implemented" 143 | 144 | let pp_term fmt term = 145 | let term = tt_print term in 146 | Ptree.pp_term fmt term 147 | 148 | let pp_pat fmt pat = 149 | let pat = tp_print pat in 150 | Ptree.pp_term fmt pat 151 | 152 | module Perror = struct 153 | open Format 154 | open Utils 155 | open Ptree 156 | 157 | type error = 158 | | PE_loc of { loc : Location.t; error : error } 159 | | PE_type_clash 160 | | PE_unknown_var of { name : Name.t } 161 | | PE_not_a_forall of { type_ : term } 162 | | PE_hoist_not_implemented 163 | | PE_extensions_not_implemented 164 | | PE_pairs_not_implemented 165 | | PE_unknown_native of { native : string } 166 | | PE_missing_annotation 167 | | PE_invalid_notation 168 | 169 | let pp_pos fmt pos = 170 | let Lexing.{ pos_fname; pos_lnum; pos_bol; pos_cnum = _ } = pos in 171 | (* TODO: print only file by default? *) 172 | fprintf fmt "%s:%d:%d" pos_fname pos_lnum pos_bol 173 | 174 | let pp_loc fmt loc = 175 | let Location.{ loc_start; loc_end; loc_ghost = _ } = loc in 176 | match Location.is_none loc with 177 | | true -> fprintf fmt "[__NONE__]" 178 | | false -> fprintf fmt "[%a .. %a]" pp_pos loc_start pp_pos loc_end 179 | 180 | let rec pp_error fmt error = 181 | match error with 182 | | PE_loc { loc; error } -> fprintf fmt "%a\n%a" pp_loc loc pp_error error 183 | | PE_type_clash -> fprintf fmt "type clash" 184 | | PE_unknown_var { name } -> fprintf fmt "unknown variable %a" Name.pp name 185 | | PE_not_a_forall { type_ } -> 186 | fprintf fmt "expected forall\nreceived : %a" pp_term type_ 187 | | PE_hoist_not_implemented -> fprintf fmt "hoist not implemented" 188 | | PE_extensions_not_implemented -> fprintf fmt "extensions not implemented" 189 | | PE_pairs_not_implemented -> fprintf fmt "pairs not implemented" 190 | | PE_unknown_native { native } -> fprintf fmt "unknown native : %S" native 191 | (* TODO: rename missing annotation *) 192 | | PE_missing_annotation -> fprintf fmt "not enough annotations" 193 | | PE_invalid_notation -> fprintf fmt "invalid notation" 194 | end 195 | 196 | let rec te_print error = 197 | let open Terror in 198 | let open Perror in 199 | match error with 200 | | TError_loc { error; loc } -> 201 | let rec loop loc error = 202 | match error with 203 | | TError_loc { error; loc = loc' } -> 204 | let loc = 205 | (* ignore none locations *) 206 | match Location.is_none loc' with 207 | | true -> loc 208 | | false -> loc' 209 | in 210 | loop loc error 211 | | error -> 212 | let error = te_print error in 213 | PE_loc { loc; error } 214 | in 215 | loop loc error 216 | | TError_type_clash -> PE_type_clash 217 | | TError_unknown_var { name } -> PE_unknown_var { name } 218 | | TError_not_a_forall { type_ } -> 219 | let type_ = tt_print type_ in 220 | PE_not_a_forall { type_ } 221 | | TError_extensions_not_implemented -> PE_extensions_not_implemented 222 | | TError_hoist_not_implemented -> PE_hoist_not_implemented 223 | | TError_pairs_not_implemented -> 224 | PE_pairs_not_implemented (* TODO: print payload *) 225 | | TError_unknown_native { native } -> PE_unknown_native { native } 226 | | TError_missing_annotation -> PE_missing_annotation 227 | | TError_invalid_notation -> PE_invalid_notation 228 | 229 | let pp_error fmt error = 230 | let error = te_print error in 231 | Perror.pp_error fmt error 232 | -------------------------------------------------------------------------------- /smol/test.ml: -------------------------------------------------------------------------------- 1 | open Syntax 2 | open Smol 3 | 4 | type test = { name : string; term : string } 5 | 6 | let type_term name term = { name; term } 7 | 8 | (* TODO: used variable still usable on type level *) 9 | let id = 10 | type_term "id" 11 | {|((A : Type $ 0) => (x : A) => x 12 | :(A : Type $ 0) -> (x : A) -> A)|} 13 | 14 | let id_propagate = 15 | type_term "id_propagate" 16 | {|((A $ 0) => x => x : (A : Type $ 0) -> (x : A) -> A)|} 17 | 18 | let apply_erasable = 19 | type_term "apply_erasable" 20 | {| 21 | (A : Type $ 0) => ((A : Type $ 0) => (x : A) => x) A 22 | |} 23 | 24 | let sequence = 25 | type_term "sequence" 26 | {|((A : Type) => (x : A) => (B : Type) => (y : B) => y 27 | :(A : Type) -> (x : A) -> (B : Type) -> (y : B) -> B)|} 28 | 29 | let bool = 30 | type_term "bool" 31 | {|((A : Type) => (x : A) => (y : A) => x 32 | :(A : Type) -> (x : A) -> (y : A) -> A)|} 33 | 34 | let sequence_propagate = 35 | type_term "sequence_propagate" 36 | {|(A => x => B => y => y 37 | :(A : Type) -> (x : A) -> (B : Type) -> (y : B) -> B)|} 38 | 39 | let true_ = 40 | type_term "true" 41 | {|((A : Type) => (x : A) => (y : A) => x 42 | :(A : Type) -> (x : A) -> (y : A) -> A)|} 43 | 44 | let true_propagate = 45 | type_term "true_propagate" 46 | {|(A => x => y => x 47 | :(A : Type) -> (x : A) -> (y : A) -> A)|} 48 | 49 | let false_ = 50 | type_term "false" 51 | {|((A : Type) => (x : A) => (y : A) => y 52 | :(A : Type) -> (x : A) -> (y : A) -> A)|} 53 | 54 | let ind_False = 55 | let b_false = {|f @-> (P : (f : @False I_False) -> Type) -> @I_False P f|} in 56 | let i_false_t = 57 | Format.sprintf 58 | {|I_False @-> (P : (f : @False I_False) -> Type) -> (f : %s) -> Type|} 59 | b_false 60 | in 61 | let code = 62 | Format.sprintf 63 | {| 64 | (FalseT : Type) === False @-> (I_False : %s) -> Type; 65 | (False : FalseT) @=> (I_False : %s) => %s 66 | |} 67 | i_false_t i_false_t b_false 68 | in 69 | type_term "ind_False" code 70 | 71 | let ind_Unit = 72 | let b_Unit = 73 | {| 74 | u @-> (P : (x : @Unit I_Unit unit I_unit) -> Type) -> 75 | (x : @I_Unit unit I_unit P (@unit I_unit)) -> @I_Unit unit I_unit P u 76 | |} 77 | in 78 | let b_unit = 79 | {| 80 | (u : u @-> 81 | (P : (x : @Unit I_Unit unit I_unit) -> Type) -> 82 | (a : @I_Unit unit I_unit P (@unit I_unit)) -> @I_Unit unit I_unit P u 83 | ) @=> (P : (x : @Unit I_Unit unit I_unit) -> Type) => 84 | (b : @I_Unit unit I_unit P (@unit I_unit)) => @I_unit P b 85 | |} 86 | in 87 | let t_i_unit = 88 | Format.sprintf 89 | {| 90 | I_unit @-> (P : (c : @Unit I_Unit unit I_unit) -> Type) -> 91 | (d : @I_Unit unit I_unit P (@unit I_unit)) -> 92 | @I_Unit unit I_unit P (%s) 93 | |} 94 | b_unit 95 | in 96 | let t_unit = 97 | Format.sprintf {| 98 | unit @-> (I_unit : %s) -> %s 99 | |} t_i_unit b_Unit 100 | in 101 | let t_i_Unit = 102 | Format.sprintf 103 | {| 104 | I_Unit @-> (unit : %s) -> (I_unit : %s) -> 105 | (P : (e : @Unit I_Unit unit I_unit) -> Type) -> 106 | (f : %s) -> Type 107 | |} 108 | t_unit t_i_unit b_Unit 109 | in 110 | (* 111 | {| 112 | (UnitT : Type) === 113 | Unit @-> (I_Unit : %s) -> (unit : %s) -> (I_unit : %s) -> Type; 114 | (Unit : UnitT) === (Unit : UnitT) @=> 115 | (I_Unit : %s) => (unit : %s) => (I_unit : %s) => %s; 116 | (UnitR : Type) === (I_Unit : %s) -> (unit : %s) -> (I_unit : %s) -> Type; 117 | (UnitEq : (P : (x : UnitR) -> Type) -> (x : P @Unit) -> 118 | P ((I_Unit : %s) => (unit : %s) => (I_unit : %s) => %s) 119 | ) === (P : (x : UnitR) -> Type) => (x : P @Unit) => %%expand x; 120 | (I_Unit : %s) === (I_Unit : %s) @=> (unit : %s) => (I_unit : %s) => 121 | (P : (f : @Unit I_Unit unit I_unit) -> Type) => 122 | UnitEq ((Unit : UnitR) => (f : Unit I_Unit unit I_unit) -> Type) P; 123 | (unit : %s) === (unit : %s) @=> (I_unit : %s) => %s; 124 | (I_unitT : Type) === %s; 125 | (unitR : Type) === (I_unit : I_unitT) -> %s; 126 | (unitEq : (P : (x : unitR) -> Type) -> (x : P @unit) -> 127 | P ((I_unit : I_unitT) => %s)) === 128 | (P : (x : unitR) -> Type) => (x : P @unit) => %%expand P; 129 | unitEq 130 | |} 131 | *) 132 | let _code = 133 | Format.sprintf 134 | {| 135 | (UnitT : Type) === 136 | Unit @-> (I_Unit : %s) -> (unit : %s) -> (I_unit : %s) -> Type; 137 | (I_UnitT : (Unit : UnitT) -> Type) === 138 | (Unit : UnitT) => %s; 139 | (unitT : (Unit : UnitT) -> (I_Unit : I_UnitT Unit) -> Type) === 140 | (Unit : UnitT) => (I_Unit : I_UnitT Unit) => %s; 141 | (I_unitT : (Unit : UnitT) -> (I_Unit : I_UnitT Unit) -> 142 | (unit : unitT Unit I_Unit) -> Type) === 143 | (Unit : UnitT) => (I_Unit : I_UnitT Unit) => 144 | (unit : unitT Unit I_Unit) => %s; 145 | (Unit : UnitT) === (Unit : UnitT) @=> 146 | (I_Unit : I_UnitT Unit) => (unit : unitT Unit I_Unit) => 147 | (I_unit : I_unitT Unit I_Unit unit) => %s; 148 | (I_UnitT : Type) === I_UnitT Unit; 149 | (unitT : (I_Unit : I_UnitT) -> Type) === unitT Unit; 150 | (I_unitT : (I_Unit : I_UnitT) -> 151 | (unit : unitT I_Unit) -> Type) === I_unitT Unit; 152 | (UnitR : Type) === (I_Unit : I_UnitT) -> (unit : unitT I_Unit) -> 153 | (I_unit : I_unitT I_Unit unit) -> Type; 154 | (UnitEq : (P : (x : UnitR) -> Type) -> (x : P @Unit) -> 155 | P ( 156 | (I_Unit : I_UnitT) => (unit : unitT I_Unit) => 157 | (I_unit : I_unitT I_Unit unit) => %s) 158 | ) === (P : (x : UnitR) -> Type) => (x : P @Unit) => %%expand x; 159 | (I_Unit : I_UnitT) === (I_Unit : I_UnitT) @=> 160 | (unit : unitT I_Unit) => (I_unit : I_unitT I_Unit unit) => 161 | (P : (f : @Unit I_Unit unit I_unit) -> Type) => 162 | UnitEq ((Unit : UnitR) => (f : Unit I_Unit unit I_unit) -> Type) P; 163 | (unitT : Type) === unitT I_Unit; 164 | (I_unitT : (unit : unitT) -> Type) === I_unitT I_Unit; 165 | (unit : unitT) === (unit : unitT) @=> 166 | (I_unit : I_unitT unit) => %s; 167 | (I_unitT : Type) === I_unitT unit; 168 | (unitR : Type) === (I_unit : I_unitT) -> %s; 169 | (unitEq : (P : (x : unitR) -> Type) -> (x : P @unit) -> 170 | P ((I_unit : I_unitT) => %s)) === 171 | (P : (x : unitR) -> Type) => (x : P @unit) => %%expand x; 172 | (I_unit : I_unitT) === (I_unit : I_unitT) @=> 173 | (P : (c : @Unit I_Unit unit I_unit) -> Type) => 174 | (d : @I_Unit unit I_unit P (@unit I_unit)) => 175 | unitEq ((at_unit : unitR) => @I_Unit unit I_unit P (at_unit I_unit)) 176 | d; 177 | (UnitEq : (P : (_ : Type) -> Type) -> 178 | (x : P (@Unit I_Unit unit I_unit)) -> P (%s)) === 179 | (P : (_ : Type) -> Type) => 180 | (x : P (@Unit I_Unit unit I_unit)) => %%expand x; 181 | (RevUnitEq : (P : (_ : Type) -> Type) -> 182 | (x : P (%s)) -> P (@Unit I_Unit unit I_unit)) === 183 | (P : (_ : Type) -> Type) => 184 | UnitEq ((T : Type) => (x : P T) -> P (@Unit I_Unit unit I_unit)) 185 | ((x : P (@Unit I_Unit unit I_unit)) => x); 186 | 187 | (unitK : @Unit I_Unit unit I_unit) === RevUnitEq ((X : Type) => X) (@unit I_unit); 188 | @(%%expand unitK) 189 | |} 190 | t_i_Unit t_unit t_i_unit t_i_Unit t_unit t_i_unit b_Unit b_Unit b_unit 191 | b_Unit b_unit b_Unit b_Unit 192 | in 193 | let code = 194 | Format.sprintf 195 | {| 196 | (UnitT : Type) === 197 | Unit @-> (I_Unit : %s) -> (unit : %s) -> (I_unit : %s) -> Type; 198 | (I_UnitT : (Unit : UnitT) -> Type) === 199 | (Unit : UnitT) => %s; 200 | (unitT : (Unit : UnitT) -> (I_Unit : I_UnitT Unit) -> Type) === 201 | (Unit : UnitT) => (I_Unit : I_UnitT Unit) => %s; 202 | (I_unitT : (Unit : UnitT) -> (I_Unit : I_UnitT Unit) -> 203 | (unit : unitT Unit I_Unit) -> Type) === 204 | (Unit : UnitT) => (I_Unit : I_UnitT Unit) => 205 | (unit : unitT Unit I_Unit) => %s; 206 | (Unit : UnitT) === (Unit : UnitT) @=> 207 | (I_Unit : I_UnitT Unit) => (unit : unitT Unit I_Unit) => 208 | (I_unit : I_unitT Unit I_Unit unit) => %s; 209 | (I_UnitT : Type) === I_UnitT Unit; 210 | (unitT : (I_Unit : I_UnitT) -> Type) === unitT Unit; 211 | (I_unitT : (I_Unit : I_UnitT) -> 212 | (unit : unitT I_Unit) -> Type) === I_unitT Unit; 213 | (UnitR : Type) === (I_Unit : I_UnitT) -> (unit : unitT I_Unit) -> 214 | (I_unit : I_unitT I_Unit unit) -> Type; 215 | (UnitEq : (P : (x : UnitR) -> Type) -> (x : P @Unit) -> 216 | P ( 217 | (I_Unit : I_UnitT) => (unit : unitT I_Unit) => 218 | (I_unit : I_unitT I_Unit unit) => %s) 219 | ) === (P : (x : UnitR) -> Type) => (x : P @Unit) => %%expand x; 220 | (I_Unit : I_UnitT) === (I_Unit : I_UnitT) @=> 221 | (unit : unitT I_Unit) => (I_unit : I_unitT I_Unit unit) => 222 | (P : (f : @Unit I_Unit unit I_unit) -> Type) => 223 | UnitEq ((Unit : UnitR) => (f : Unit I_Unit unit I_unit) -> Type) P; 224 | I_Unit 225 | |} 226 | t_i_Unit t_unit t_i_unit t_i_Unit t_unit t_i_unit b_Unit b_Unit 227 | in 228 | type_term "ind_Unit" code 229 | 230 | let _tests = 231 | [ 232 | id; 233 | id_propagate; 234 | sequence; 235 | sequence_propagate; 236 | bool; 237 | true_; 238 | true_propagate; 239 | false_; 240 | ind_False; 241 | ind_Unit; 242 | ] 243 | 244 | let tests = [ id; id_propagate; apply_erasable ] 245 | 246 | let type_term term = 247 | let term = Clexer.from_string Cparser.term_opt term in 248 | let term = Option.get term in 249 | let term = 250 | let loc = Location.none in 251 | Lparser.parse_term ~loc term 252 | in 253 | Styper.(Context.run @@ fun () -> infer_term term) 254 | 255 | let test { name; term } = 256 | let check () = 257 | let _type_ = type_term term in 258 | (* Format.eprintf "type_ : %a\n%!" Tprinter.pp_term type_; *) 259 | () 260 | in 261 | Alcotest.test_case name `Quick check 262 | 263 | let tests = ("tests", List.map test tests) 264 | let () = Alcotest.run "Typer" [ tests ] 265 | -------------------------------------------------------------------------------- /teikavsc/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig to read more about this file */ 4 | /* Projects */ 5 | // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ 6 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ 7 | // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ 8 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ 9 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ 10 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ 11 | /* Language and Environment */ 12 | "target": "es2020", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ 13 | "lib": [ 14 | "es2020" 15 | ], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ 16 | // "jsx": "preserve", /* Specify what JSX code is generated. */ 17 | // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ 18 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ 19 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ 20 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ 21 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ 22 | // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ 23 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ 24 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ 25 | // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ 26 | /* Modules */ 27 | "module": "commonjs", /* Specify what module code is generated. */ 28 | // "rootDir": "./", /* Specify the root folder within your source files. */ 29 | "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ 30 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ 31 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ 32 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ 33 | // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ 34 | // "types": [], /* Specify type package names to be included without being referenced in a source file. */ 35 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 36 | // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ 37 | // "resolveJsonModule": true, /* Enable importing .json files. */ 38 | // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ 39 | /* JavaScript Support */ 40 | // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ 41 | // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ 42 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ 43 | /* Emit */ 44 | // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ 45 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */ 46 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ 47 | "sourceMap": false, /* Create source map files for emitted JavaScript files. */ 48 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ 49 | "outDir": "./dist", /* Specify an output folder for all emitted files. */ 50 | // "removeComments": true, /* Disable emitting comments. */ 51 | // "noEmit": true, /* Disable emitting files from a compilation. */ 52 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ 53 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ 54 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ 55 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ 56 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 57 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ 58 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ 59 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ 60 | // "newLine": "crlf", /* Set the newline character for emitting files. */ 61 | // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ 62 | // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ 63 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ 64 | // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ 65 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ 66 | // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ 67 | /* Interop Constraints */ 68 | // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ 69 | // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ 70 | "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ 71 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ 72 | "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ 73 | /* Type Checking */ 74 | "strict": true, /* Enable all strict type-checking options. */ 75 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ 76 | // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ 77 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 78 | // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ 79 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ 80 | // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ 81 | // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ 82 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 83 | // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ 84 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ 85 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ 86 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ 87 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ 88 | // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ 89 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ 90 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ 91 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ 92 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ 93 | /* Completeness */ 94 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ 95 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /teika/solve.ml: -------------------------------------------------------------------------------- 1 | open Utils 2 | open Syntax 3 | open Ctree 4 | open Ttree 5 | open Terror 6 | 7 | exception Solve_error of { loc : Location.t; exn : exn } 8 | 9 | (* TODO: context vs env *) 10 | type context = 11 | | Context of { names : (bool * Level.t) Name.Map.t; next : Level.t } 12 | 13 | let with_loc ~loc f = 14 | try f () with 15 | | Solve_error { loc; exn } -> 16 | (* TODO: reraise *) 17 | raise @@ Solve_error { loc; exn } 18 | | exn -> raise @@ Solve_error { loc; exn } 19 | 20 | let () = 21 | Printexc.register_printer @@ function 22 | | Solve_error { loc = _; exn } -> Some (Printexc.to_string exn) 23 | | _ -> None 24 | 25 | let split_pat_annot pat = 26 | let (Pat { struct_ = pat; loc = _ }) = pat in 27 | match pat with 28 | | P_annot { pat; annot } -> (pat, annot) 29 | (* TODO: support tuple here *) 30 | | P_var _ | P_tuple _ -> error_missing_annotation () 31 | 32 | let rec enter ctx pat = 33 | let (Pat { struct_ = pat; loc = _ }) = pat in 34 | (* TODO: use this location? *) 35 | match pat with 36 | | P_annot { pat; annot = _ } -> enter ctx pat 37 | | P_var { var = name } -> 38 | let (Context { names; next }) = ctx in 39 | Format.eprintf "hi\n%!"; 40 | let names = Name.Map.add name (false, next) names in 41 | let next = Level.next next in 42 | Context { names; next } 43 | | P_tuple { elements } -> 44 | List.fold_left (fun ctx el -> enter ctx el) ctx elements 45 | 46 | let rec name_of_var_pat pat = 47 | let (VPat { struct_ = pat; loc = _ }) = pat in 48 | match pat with 49 | | VP_annot { pat; annot = _ } -> name_of_var_pat pat 50 | | VP_var { var } -> var 51 | 52 | let open_hoist ctx pat = 53 | (* TODO: ensure that somehow all the hoists are closed *) 54 | let name = name_of_var_pat pat in 55 | let (Context { names; next }) = ctx in 56 | Format.eprintf "hi\n%!"; 57 | let names = Name.Map.add name (true, next) names in 58 | let next = Level.next next in 59 | Context { names; next } 60 | 61 | let close_hoist ctx pat = 62 | (* TODO: this is a bad API *) 63 | let name = name_of_var_pat pat in 64 | let (Context { names; next }) = ctx in 65 | let names = 66 | match Name.Map.find_opt name names with 67 | | Some (true, from) -> Name.Map.add name (false, from) names 68 | | Some (false, _from) -> failwith "compiler bug invalid name" 69 | | None -> failwith "close_hoist: compiler bug invalid name" 70 | in 71 | Context { names; next } 72 | 73 | let lookup ctx name = 74 | let (Context { names; next }) = ctx in 75 | match Name.Map.find_opt name names with 76 | | Some (is_open_hoist, from) -> ( 77 | match Level.offset ~from ~to_:next with 78 | | Some var -> (`hoist is_open_hoist, var) 79 | | None -> failwith "compiler bug invalid var") 80 | | None -> error_unknown_var ~name 81 | 82 | let is_hoist ctx name = 83 | let (Context { names; next }) = ctx in 84 | match Name.Map.find_opt name names with 85 | | Some (is_open_hoist, from) -> ( 86 | match Level.offset ~from ~to_:next with 87 | | Some var -> ( 88 | match is_open_hoist with true -> Some var | false -> None) 89 | | None -> None) 90 | | None -> None 91 | 92 | type meta_pat = 93 | | MP_simple of var_pat 94 | | MP_fancy of pat 95 | | MP_fix of Index.t * var_pat 96 | 97 | let rec pat_of_var_pat var_pat = 98 | let (VPat { struct_ = var_pat; loc }) = var_pat in 99 | match var_pat with 100 | | VP_annot { pat; annot } -> 101 | let pat = pat_of_var_pat pat in 102 | p_wrap ~loc @@ P_annot { pat; annot } 103 | | VP_var { var } -> p_wrap ~loc @@ P_var { var } 104 | 105 | let pat_not_fix meta_pat = 106 | match meta_pat with 107 | | MP_simple pat -> pat_of_var_pat pat 108 | | MP_fancy pat -> pat 109 | | MP_fix (_var, _pat) -> 110 | (* TODO: proper error here *) 111 | failwith "a variable with the same name is open on a hoist" 112 | 113 | let self_pat_simple meta_pat = 114 | match meta_pat with 115 | | MP_simple pat -> pat 116 | | MP_fancy _pat -> failwith "fancy patterns are not supported on self" 117 | | MP_fix (_var, _pat) -> 118 | failwith "a variable with the same name is open on a hoist" 119 | 120 | let rec solve_term ctx term = 121 | let (CTerm { term; loc }) = term in 122 | with_loc ~loc @@ fun () -> 123 | match term with 124 | | CT_parens { content = term } -> solve_term ctx term 125 | | CT_annot { value = term; annot } -> 126 | let annot = solve_term ctx annot in 127 | let term = solve_term ctx term in 128 | t_wrap ~loc @@ T_annot { term; annot } 129 | | CT_var { var = name } -> 130 | (* TODO: this could be treated as forward *) 131 | let `hoist _, var = lookup ctx name in 132 | t_wrap ~loc @@ T_var { var } 133 | | CT_semi { left; right } -> solve_semi ctx ~loc ~left ~right 134 | | CT_extension _ -> error_extensions_not_implemented () 135 | | CT_apply { funct; arg } -> 136 | let funct = solve_term ctx funct in 137 | let arg = solve_term ctx arg in 138 | t_wrap ~loc @@ T_apply { funct; arg } 139 | | CT_lambda { param; body } -> 140 | let bound = solve_pat ctx param in 141 | let bound = pat_not_fix bound in 142 | let body = 143 | let ctx = enter ctx bound in 144 | solve_term ctx body 145 | in 146 | t_wrap ~loc @@ T_lambda { bound; body } 147 | | CT_forall { param; body } -> 148 | let bound = solve_pat ctx param in 149 | let bound = pat_not_fix bound in 150 | let bound, param = split_pat_annot bound in 151 | let body = 152 | let ctx = enter ctx bound in 153 | solve_term ctx body 154 | in 155 | t_wrap ~loc @@ T_forall { bound; param; body } 156 | | CT_pair { left; right } -> 157 | let left = solve_term ctx left in 158 | let acc = [ left ] in 159 | let elements = solve_term_tuple ctx ~acc ~right in 160 | t_wrap ~loc @@ T_tuple { elements } 161 | | CT_both { left; right } -> 162 | let bound = solve_pat ctx left in 163 | let bound = self_pat_simple bound in 164 | let body = 165 | (* TODO: this is hackish *) 166 | let bound = pat_of_var_pat bound in 167 | let ctx = enter ctx bound in 168 | solve_term ctx right 169 | in 170 | t_wrap ~loc @@ T_self { bound; body } 171 | | CT_bind _ | CT_number _ | CT_braces _ | CT_string _ -> 172 | error_invalid_notation () 173 | 174 | and solve_term_tuple ctx ~acc ~right = 175 | match 176 | let (CTerm { term = right; loc = _ }) = right in 177 | right 178 | with 179 | | CT_pair { left; right } -> 180 | let left = solve_term ctx left in 181 | let acc = left :: acc in 182 | solve_term_tuple ctx ~acc ~right 183 | | CT_var _ | CT_extension _ | CT_forall _ | CT_lambda _ | CT_apply _ 184 | | CT_both _ | CT_bind _ | CT_semi _ | CT_annot _ | CT_string _ | CT_number _ 185 | | CT_parens _ | CT_braces _ -> 186 | let right = solve_term ctx right in 187 | List.rev (right :: acc) 188 | 189 | and solve_semi ctx ~loc ~left ~right = 190 | let (CTerm { term = left_desc; loc = _ }) = left in 191 | match left_desc with 192 | | CT_bind { bound; value } -> ( 193 | let bound = solve_pat ctx bound in 194 | (* TODO: just clean this *) 195 | match bound with 196 | | MP_simple bound -> 197 | let bound = pat_of_var_pat bound in 198 | let arg = solve_term ctx value in 199 | let body = 200 | let ctx = enter ctx bound in 201 | solve_term ctx right 202 | in 203 | t_wrap ~loc @@ T_let { bound; arg; body } 204 | | MP_fancy bound -> 205 | let arg = solve_term ctx value in 206 | let body = 207 | let ctx = enter ctx bound in 208 | solve_term ctx right 209 | in 210 | t_wrap ~loc @@ T_let { bound; arg; body } 211 | | MP_fix (var, bound) -> 212 | let arg = solve_term ctx value in 213 | let body = 214 | let ctx = close_hoist ctx bound in 215 | solve_term ctx right 216 | in 217 | t_wrap ~loc @@ T_fix { bound; var; arg; body }) 218 | | CT_annot { value = _; annot = _ } -> 219 | let bound = 220 | match solve_pat ctx left with 221 | | MP_simple pat -> pat 222 | | MP_fancy _pat -> failwith "fancy patterns are not supported on hoist" 223 | | MP_fix (_var, _pat) -> 224 | failwith "a variable with the same name is already open" 225 | in 226 | let body = 227 | let ctx = open_hoist ctx bound in 228 | solve_term ctx right 229 | in 230 | t_wrap ~loc @@ T_hoist { bound; body } 231 | | CT_parens _ | CT_var _ | CT_extension _ | CT_forall _ | CT_lambda _ 232 | | CT_apply _ | CT_pair _ | CT_both _ | CT_semi _ | CT_string _ | CT_number _ 233 | | CT_braces _ -> 234 | error_invalid_notation () 235 | 236 | (* TODO: this code is kind of ugly *) 237 | and solve_pat ctx pat = solve_pat_simple ctx pat 238 | 239 | and solve_pat_simple ctx pat = 240 | let (CTerm { term = pat_desc; loc }) = pat in 241 | (* TODO: a bit duplicated *) 242 | match pat_desc with 243 | | CT_parens { content = pat } -> solve_pat_simple ctx pat 244 | | CT_var { var = name } -> ( 245 | match is_hoist ctx name with 246 | | Some var -> MP_fix (var, vp_wrap ~loc @@ VP_var { var = name }) 247 | | None -> MP_simple (vp_wrap ~loc @@ VP_var { var = name })) 248 | | CT_annot { value = pat; annot } -> ( 249 | let annot = solve_term ctx annot in 250 | match solve_pat ctx pat with 251 | | MP_simple pat -> MP_simple (vp_wrap ~loc @@ VP_annot { pat; annot }) 252 | | MP_fancy pat -> MP_fancy (p_wrap ~loc @@ P_annot { pat; annot }) 253 | | MP_fix (var, pat) -> 254 | MP_fix (var, vp_wrap ~loc @@ VP_annot { pat; annot })) 255 | | CT_pair { left = _; right = _ } -> MP_fancy (solve_pat_fancy ctx pat) 256 | | CT_extension _ | CT_forall _ | CT_lambda _ | CT_apply _ | CT_both _ 257 | | CT_bind _ | CT_semi _ | CT_string _ | CT_number _ | CT_braces _ -> 258 | error_invalid_notation () 259 | 260 | and solve_pat_fancy ctx pat = 261 | (* TODO: no duplicated name on pattern *) 262 | (* TODO: to_ here *) 263 | let (CTerm { term = pat; loc }) = pat in 264 | match pat with 265 | | CT_parens { content = pat } -> solve_pat_fancy ctx pat 266 | | CT_var { var = name } -> ( 267 | match is_hoist ctx name with 268 | | Some _var -> failwith "hoist is not supported on fancy patterns" 269 | | None -> p_wrap ~loc @@ P_var { var = name }) 270 | | CT_annot { value = pat; annot } -> 271 | let annot = solve_term ctx annot in 272 | let pat = solve_pat_fancy ctx pat in 273 | p_wrap ~loc @@ P_annot { pat; annot } 274 | | CT_pair { left; right } -> 275 | let left = solve_pat_fancy ctx left in 276 | let acc = [ left ] in 277 | let elements = solve_pat_tuple ctx ~acc ~right in 278 | p_wrap ~loc @@ P_tuple { elements } 279 | | CT_extension _ | CT_forall _ | CT_lambda _ | CT_apply _ | CT_both _ 280 | | CT_bind _ | CT_semi _ | CT_string _ | CT_number _ | CT_braces _ -> 281 | error_invalid_notation () 282 | 283 | and solve_pat_tuple ctx ~acc ~right = 284 | match 285 | let (CTerm { term = right; loc = _ }) = right in 286 | right 287 | with 288 | | CT_pair { left; right } -> 289 | let left = solve_pat_fancy ctx left in 290 | let acc = left :: acc in 291 | solve_pat_tuple ctx ~acc ~right 292 | | CT_var _ | CT_extension _ | CT_forall _ | CT_lambda _ | CT_apply _ 293 | | CT_both _ | CT_bind _ | CT_semi _ | CT_annot _ | CT_string _ | CT_number _ 294 | | CT_parens _ | CT_braces _ -> 295 | let right = solve_pat_fancy ctx right in 296 | List.rev (right :: acc) 297 | 298 | (* external *) 299 | let solve_term ctx term = try Ok (solve_term ctx term) with exn -> Error exn 300 | 301 | let initial = 302 | (* TODO: duplicated from Typer *) 303 | let next = Level.(next zero) in 304 | (* TODO: predef somewhere *) 305 | (* TODO: rename Type to data *) 306 | let type_ = Name.make "Type" in 307 | let names = Name.Map.(empty |> add type_ (false, Level.zero)) in 308 | Context { names; next } 309 | -------------------------------------------------------------------------------- /teika/test.ml: -------------------------------------------------------------------------------- 1 | open Syntax 2 | 3 | module Typer = struct 4 | open Teika 5 | 6 | type test = 7 | | Check of { name : string; annotated_term : string } 8 | | Fail of { name : string; annotated_term : string } 9 | 10 | let check name annotated_term = Check { name; annotated_term } 11 | let fail name annotated_term = Fail { name; annotated_term } 12 | 13 | (* TODO: write tests for locations and names / offset *) 14 | (* TODO: write tests for escape check *) 15 | let univ_type = check "Type" {|(Type : Type)|} 16 | let string_type = check "String" {|(String : Type)|} 17 | let false_type = check "False" {|(A : Type) -> A|} 18 | 19 | let id = 20 | check "id" {|((A : Type) => (x : A) => x : (A : Type) -> (x : A) -> A)|} 21 | 22 | let id_propagate = 23 | check "id_propagate" {|((A => x => x) : (A : Type) -> (x : A) -> A)|} 24 | 25 | let id_unify = 26 | check "id_unify" {|((A => (x : A) => x) : (A : Type) -> (x : A) -> A)|} 27 | 28 | let let_id = 29 | check "let_id" 30 | {|(( 31 | id : (A : Type) -> (x : A) -> A = A => (x : A) => x; 32 | id 33 | ) : (A : Type) -> (x : A) -> A)|} 34 | 35 | let id_type = check "id_type" {|(((A : Type) => (x : A) => x) Type)|} 36 | 37 | let id_type_never = 38 | check "id_type_never" 39 | {|(((A : Type) => (x : A) => x) Type ((A : Type) -> A) 40 | : Type)|} 41 | 42 | let return_id_propagate = 43 | check "return_id_propagate" 44 | {|((((id : (A : Type) -> (x : A) -> A) => id) (A => x => x)) 45 | : (A : Type) -> (x : A) -> A)|} 46 | 47 | let sequence = 48 | check "sequence" 49 | {|((A => (x : A) => B => (y : B) => y) 50 | : (A : Type) -> (x : A) -> (B : Type) -> (y : B) -> B)|} 51 | 52 | let bool = 53 | check "bool" {|(((A : Type) -> (x : A) -> (y : A) -> A) 54 | : Type)|} 55 | 56 | let true_ = 57 | check "true" 58 | {|(((A : Type) => (x : A) => (y : A) => x) 59 | : (A : Type) -> (x : A) -> (y : A) -> A)|} 60 | 61 | let true_unify = 62 | check "true_unify" 63 | {|(((A : Type) => x => (y : A) => x) 64 | : (A : Type) -> (x : A) -> (y : A) -> A)|} 65 | 66 | let false_ = 67 | check "false" 68 | {|((A => (x : A) => (y : A) => y) 69 | : (A : Type) -> (x : A) -> (y : A) -> A)|} 70 | 71 | let ind_false_T = 72 | check "False_T" 73 | {| 74 | (@self(False -> (f : @self(f -> @unroll False f)) -> Type) : Type) 75 | |} 76 | 77 | let ind_false = 78 | check "False" 79 | {| 80 | (@fix(False => f => 81 | (P : (f : @self(f -> @unroll False f)) -> Type) -> P f 82 | ) : @self(False -> (f : @self(f -> @unroll False f)) -> Type)) 83 | |} 84 | 85 | let let_alias = 86 | check "let_alias" 87 | {| 88 | Id : (A : Type) -> Type = (A : Type) => A; 89 | ((A : Type) => (x : A) => (x : Id A)) 90 | |} 91 | 92 | let simple_string = check "simple_string" {|("simple string" : String)|} 93 | 94 | let rank_2_propagate = 95 | check "rank_2_propagate" 96 | {| 97 | Unit = (A : Type) -> (x : A) -> A; 98 | (u => u Unit u : (u : Unit) -> Unit) 99 | |} 100 | 101 | let rank_2_propagate_let = 102 | check "rank_2_propagate" 103 | {| 104 | Unit = (A : Type) -> (x : A) -> A; 105 | noop : (u : Unit) -> Unit = u => u Unit u; 106 | noop 107 | |} 108 | 109 | let invalid_annotation = fail "invalid_annotation" {|(String : "A")|} 110 | let simplest_escape_check = fail "simplest_escape_check" "x => A => (x : A)" 111 | 112 | let bound_var_escape_check = 113 | fail "bound_var_escape_check" 114 | {| 115 | call = f => v => f v; 116 | (never : (A : Type) -> A) => call never 117 | |} 118 | 119 | let hole_lowering_check = 120 | fail "hole_lowering_check" 121 | {| 122 | x => (A : Type) => y => (id => (_ = (id x); _ = id y; (y : A))) (x => x) 123 | |} 124 | 125 | let trivial_equality = 126 | check "trivial_equality" 127 | {| 128 | Eq = (A : Type) => (x : A) => (y : A) => 129 | (P : (z : A) -> Type) -> (l : P x) -> P y; 130 | refl = (A : Type) => (x : A) => 131 | (P : (z : A) -> Type) => (l : P x) => l; 132 | (refl Type Type : Eq Type Type Type) 133 | |} 134 | 135 | let split_at_a_distance = 136 | check "split_at_a_distance" 137 | {| 138 | (l : Type) => 139 | (f : (X = Type; (A : X) => (x : A) -> A) Type) => f l 140 | |} 141 | 142 | let nat_256_equality = 143 | check "nat_256_equality" 144 | {| 145 | Eq = (A : Type) => (x : A) => (y : A) => 146 | (P : (z : A) -> Type) -> (l : P x) -> P y; 147 | refl : (A : Type) -> (x : A) -> Eq A x x 148 | = (A : Type) => (x : A) => 149 | (P : (z : A) -> Type) => (l : P x) => l; 150 | 151 | Nat = (A : Type) -> (z : A) -> (s : (acc : A) -> A) -> A; 152 | zero : Nat = (A : Type) => (z : A) => (s : (acc : A) -> A) => z; 153 | succ : (pred : Nat) -> Nat = (pred : Nat) => 154 | (A : Type) => (z : A) => (s : (acc : A) -> A) => s (pred A z s); 155 | one = succ zero; 156 | 157 | add = (a : Nat) => (b : Nat) => a Nat b succ; 158 | mul = (a : Nat) => (b : Nat) => a Nat zero ((n : Nat) => add n b); 159 | 160 | two = succ one; 161 | three = succ two; 162 | four = add two two; 163 | eight = add four four; 164 | sixteen = add eight eight; 165 | n256 = mul sixteen sixteen; 166 | sixteen_is_eight_times_two : Eq Nat sixteen (mul eight two) 167 | = refl Nat sixteen; 168 | (refl Nat n256 : Eq Nat (mul (mul eight eight) four) n256) 169 | |} 170 | 171 | let simple_alpha_rename = 172 | check "simple_alpha_rename" 173 | {|( (f : (B : Type) -> B) => (f ((C : Type) -> C) : (D : Type) -> D) 174 | : (f : (E : Type) -> E) -> (F : Type) -> F)|} 175 | 176 | let _tests = 177 | [ 178 | id_propagate; 179 | id_unify; 180 | let_id; 181 | return_id_propagate; 182 | sequence; 183 | true_unify; 184 | false_; 185 | ind_false_T; 186 | ind_false; 187 | rank_2_propagate; 188 | rank_2_propagate_let; 189 | simplest_escape_check; 190 | ] 191 | 192 | let _tests = 193 | [ 194 | univ_type; 195 | string_type; 196 | false_type; 197 | id; 198 | (* 199 | id_propagate; 200 | id_unify; 201 | let_id; 202 | *) 203 | id_type; 204 | id_type_never; 205 | (* return_id_propagate; *) 206 | (* sequence; *) 207 | bool; 208 | true_; 209 | (* true_unify; *) 210 | (* false_; *) 211 | let_alias; 212 | simple_string; 213 | (* 214 | ind_false_T; 215 | ind_false; 216 | rank_2_propagate; 217 | rank_2_propagate_let; 218 | *) 219 | invalid_annotation; 220 | (* simplest_escape_check; *) 221 | bound_var_escape_check; 222 | hole_lowering_check; 223 | trivial_equality; 224 | split_at_a_distance; 225 | nat_256_equality; 226 | simple_alpha_rename; 227 | ] 228 | 229 | let _tests = 230 | [ 231 | check "nat_256_equality" 232 | {| 233 | Eq : (A : Type) -> (x : A) -> (y : A) -> Type 234 | = A => x => y => (P : (z : A) -> Type) -> (l : P x) -> P y; 235 | refl : (A : Type) -> (x : A) -> Eq A x x 236 | = A => x => P => l => l; 237 | 238 | Nat = (A : Type) -> (z : A) -> (s : (acc : A) -> A) -> A; 239 | zero : Nat = A => z => s => z; 240 | succ : (pred : Nat) -> Nat = pred => A => z => s => s (pred A z s); 241 | one = succ zero; 242 | 243 | add : (a : Nat) -> (b : Nat) -> Nat 244 | = a => b => a Nat b succ; 245 | mul : (a : Nat) -> (b : Nat) -> Nat 246 | = a => b => a Nat zero (n => add n b); 247 | 248 | two = succ one; 249 | three = succ two; 250 | four = add two two; 251 | eight = add four four; 252 | sixteen = add eight eight; 253 | n256 = mul sixteen sixteen; 254 | n512 = mul n256 two; 255 | (refl Nat n512 : Eq Nat (mul (mul eight eight) eight) n512) 256 | |}; 257 | ] 258 | 259 | let tests = 260 | [ 261 | check "fix" 262 | {| 263 | Never : Type; 264 | Never = (A : Type) -> A; 265 | 266 | Unit : Type; 267 | unit : Unit; 268 | 269 | Unit = (u : Unit) & (P : (u : Unit) -> Type) -> (x : P(unit)) -> P(u); 270 | unit = P => x => x; 271 | ind_unit : (u : Unit) -> (P : (u : Unit) -> Type) -> 272 | (x : P(unit)) -> P(u) = u => u; 273 | 274 | Bool : Type; 275 | true : Bool; 276 | false : Bool; 277 | 278 | Bool = (b : Bool) & (P : (b : Bool) -> Type) -> 279 | (x : P(true)) -> (y : P(false)) -> P(b); 280 | true = P => x => y => x; 281 | false = P => x => y => y; 282 | ind_bool : (b : Bool) -> (P : (b : Bool) -> Type) -> 283 | (x : P(true)) -> (y : P(false)) -> P(b) = b => b; 284 | 285 | Equal : (A : Type) -> (x : A) -> (y : A) -> Type; 286 | refl : (A : Type) -> (x : A) -> Equal A x x; 287 | 288 | Equal = A => x => y => (eq : Equal A x y) & 289 | (P : (z : A) -> Type) -> (v : P(x)) -> P(y); 290 | refl = A => x => P => v => v; 291 | 292 | transport : (A : Type) -> (x : A) -> (y : A) -> 293 | (H : Equal A x y) -> (P : (z : A) -> Type) -> (v : P(x)) -> P(y); 294 | transport = A => x => y => H => H; 295 | 296 | true_not_false : (H : Equal(Bool)(true)(false)) -> Never; 297 | true_not_false = H => ( 298 | P : (b : Bool) -> Type = b => ind_bool(b)(_ => Type)(Unit)(Never); 299 | transport(Bool)(true)(false)(H)(P)(unit) 300 | ); 301 | 302 | id : (A : Type) -> (x : A) -> A = ( 303 | (A : Type) => (x : A) => x 304 | ); 305 | 306 | true 307 | |}; 308 | ] 309 | 310 | (* alcotest *) 311 | let test test = 312 | let check ~name ~annotated_term = 313 | Alcotest.test_case name `Quick @@ fun () -> 314 | let ctree = 315 | match Clexer.from_string Cparser.term_opt annotated_term with 316 | | Some ctree -> ctree 317 | | None -> failwith "failed to parse" 318 | in 319 | let ttree = 320 | match Solve.(solve_term initial ctree) with 321 | | Ok ttree -> ttree 322 | | Error exn -> 323 | failwith 324 | @@ Format.asprintf "failed to infer types: %s" 325 | (Printexc.to_string exn) 326 | in 327 | Format.eprintf "ttree : %a\n%!" Ttree.pp_term ttree; 328 | match Typer.infer_term ttree with 329 | | Ok _type_ -> Format.eprintf "typed\n%!" 330 | | Error exn -> 331 | failwith 332 | @@ Format.asprintf "failed to infer types: %s" 333 | (Printexc.to_string exn) 334 | in 335 | let fail ~name ~annotated_term = 336 | Alcotest.test_case name `Quick @@ fun () -> 337 | let ctree = 338 | match Clexer.from_string Cparser.term_opt annotated_term with 339 | | Some ctree -> ctree 340 | | None -> failwith "failed to parse" 341 | in 342 | let ttree = 343 | match Solve.(solve_term initial ctree) with 344 | | Ok ttree -> ttree 345 | | Error exn -> 346 | failwith 347 | @@ Format.asprintf "failed to infer types: %s" 348 | (Printexc.to_string exn) 349 | in 350 | match Typer.infer_term ttree with 351 | | Ok _type_ -> failwith "worked but should had failed" 352 | | Error _exn -> () 353 | in 354 | match test with 355 | | Check { name; annotated_term } -> check ~name ~annotated_term 356 | | Fail { name; annotated_term } -> fail ~name ~annotated_term 357 | 358 | let tests = ("typer", List.map test tests) 359 | end 360 | 361 | let () = Alcotest.run "Teika" [ Typer.tests ] 362 | 363 | (* TODO: (n : Nat & n >= 1, x : Nat) should be valid 364 | *) 365 | -------------------------------------------------------------------------------- /teika/typer.ml: -------------------------------------------------------------------------------- 1 | open Utils 2 | open Ttree 3 | open Terror 4 | 5 | module Value : sig 6 | type value 7 | 8 | and value_struct = 9 | | V_hole 10 | (* TODO: name on var? *) 11 | | V_var of { name : Name.t } 12 | | V_forward of { name : Name.t; mutable inner : value } 13 | | V_apply of { funct : value; arg : value } 14 | | V_lambda of { env : env; bound : pat; body : term } 15 | | V_univ 16 | | V_forall of { param : value; env : env; bound : pat; body : term } 17 | | V_self of { env : env; bound : var_pat; body : term } 18 | | V_thunk of { env : env; term : term } 19 | | V_link of { mutable value : value } 20 | 21 | and env [@@deriving show] 22 | 23 | (* environment *) 24 | val empty : env 25 | val access : env -> Index.t -> value 26 | val append : env -> value -> env 27 | 28 | (* constructors *) 29 | val v_null : value 30 | val v_var : at:Level.t -> name:Name.t -> value 31 | val fresh_v_hole : at:Level.t -> value 32 | val fresh_v_forward : name:Name.t -> value 33 | val v_apply : funct:value -> arg:value -> value 34 | val v_lambda : env:env -> bound:pat -> body:term -> value 35 | val v_univ : value 36 | val v_forall : param:value -> env:env -> bound:pat -> body:term -> value 37 | val v_self : env:env -> bound:var_pat -> body:term -> value 38 | val v_thunk : env:env -> term:term -> value 39 | 40 | (* utilities *) 41 | val repr : value -> value 42 | val struct_ : value -> value_struct 43 | val level : value -> Level.t 44 | val same : value -> value -> bool 45 | val assert_forward : value -> unit 46 | val init_forward : value -> to_:value -> unit 47 | val lock_forward : value -> (unit -> 'a) -> 'a 48 | val hole_lower : value -> to_:Level.t -> unit 49 | val hole_link : value -> to_:value -> unit 50 | val thunk_link : value -> to_:value -> unit 51 | end = struct 52 | type value = { mutable struct_ : value_struct; mutable at : Level.t } 53 | 54 | and value_struct = 55 | | V_hole 56 | | V_var of { name : Name.t } 57 | | V_forward of { name : Name.t; mutable inner : value [@opaque] } 58 | | V_apply of { funct : value; arg : value } 59 | | V_lambda of { env : env; [@opaque] bound : pat; body : term } 60 | (* TODO: is univ actually needed or useful here? *) 61 | | V_univ 62 | (* TODO: non dependent version of types and function *) 63 | | V_forall of { 64 | param : value; 65 | env : env; [@opaque] 66 | bound : pat; 67 | body : term; 68 | } 69 | | V_self of { env : env; [@opaque] bound : var_pat; body : term } 70 | | V_thunk of { env : env; [@opaque] term : term } 71 | | V_link of { mutable value : value } 72 | 73 | and env = value list [@@deriving show { with_path = false }] 74 | 75 | let v_new ~at struct_ = { struct_; at } 76 | 77 | let v_null = 78 | let name = Name.make "**null**" in 79 | v_new ~at:Level.zero @@ V_var { name } 80 | 81 | let v_var ~at ~name = v_new ~at @@ V_var { name } 82 | let fresh_v_hole ~at = v_new ~at @@ V_hole 83 | 84 | let fresh_v_forward ~name = 85 | (* TODO: proper level here *) 86 | let at = Level.zero in 87 | v_new ~at @@ V_forward { name; inner = v_null } 88 | 89 | let v_apply ~funct ~arg = 90 | let at = Level.max funct.at arg.at in 91 | v_new ~at @@ V_apply { funct; arg } 92 | 93 | let v_lambda ~env ~bound ~body = 94 | (* TODO: proper level for lambdas *) 95 | let at = Level.zero in 96 | v_new ~at @@ V_lambda { env; bound; body } 97 | 98 | let v_univ = v_new ~at:Level.zero @@ V_univ 99 | 100 | let v_forall ~param ~env ~bound ~body = 101 | (* TODO: proper level for forall *) 102 | let at = Level.zero in 103 | v_new ~at @@ V_forall { param; env; bound; body } 104 | 105 | let v_self ~env ~bound ~body = 106 | (* TODO: proper level for self *) 107 | let at = Level.zero in 108 | v_new ~at @@ V_self { env; bound; body } 109 | 110 | let v_thunk ~env ~term = 111 | (* TODO: proper level here *) 112 | let at = Level.zero in 113 | v_new ~at @@ V_thunk { env; term } 114 | 115 | let rec repr value = 116 | match value.struct_ with 117 | | V_link { value } -> repr value 118 | | V_hole | V_var _ | V_forward _ | V_apply _ | V_lambda _ | V_univ 119 | | V_forall _ | V_self _ | V_thunk _ -> 120 | value 121 | 122 | (* TODO: inline repr? *) 123 | let repr value = 124 | match value.struct_ with 125 | | V_link ({ value } as link) -> 126 | (* path compression *) 127 | let value = repr value in 128 | link.value <- value; 129 | value 130 | | V_hole | V_var _ | V_forward _ | V_apply _ | V_lambda _ | V_univ 131 | | V_forall _ | V_self _ | V_thunk _ -> 132 | value 133 | 134 | let struct_ value = (repr value).struct_ 135 | 136 | (* TODO: level vs at *) 137 | let level value = (repr value).at 138 | let same (left : value) (right : value) = left == right 139 | 140 | let assert_forward value = 141 | match value.struct_ with 142 | | V_forward { name = _; inner = _ } -> () 143 | | V_hole | V_var _ | V_apply _ | V_lambda _ | V_univ | V_forall _ | V_self _ 144 | | V_thunk _ | V_link _ -> 145 | failwith "assert_forward: not a forward" 146 | 147 | let init_forward value ~to_ = 148 | let value = repr value in 149 | match value.struct_ with 150 | | V_forward ({ name = _; inner } as forward) -> ( 151 | match same inner v_null with 152 | | true -> forward.inner <- to_ 153 | | false -> failwith "init_forward: already initialized") 154 | | V_hole | V_var _ | V_apply _ | V_lambda _ | V_univ | V_forall _ | V_self _ 155 | | V_thunk _ | V_link _ -> 156 | failwith "init_forward: not a forward" 157 | 158 | let lock_forward value f = 159 | match struct_ value with 160 | | V_forward ({ name = _; inner } as forward) -> 161 | forward.inner <- v_null; 162 | let finally () = forward.inner <- inner in 163 | Fun.protect ~finally f 164 | | V_hole | V_var _ | V_apply _ | V_lambda _ | V_univ | V_forall _ | V_self _ 165 | | V_thunk _ | V_link _ -> 166 | failwith "lock_forward: not a forward" 167 | 168 | let hole_lower hole ~to_ = 169 | let hole = repr hole in 170 | (match hole.struct_ with 171 | | V_hole -> () 172 | | _ -> failwith "hole_lower: not a hole"); 173 | hole.at <- Level.min hole.at to_ 174 | 175 | let hole_link hole ~to_ = 176 | let hole = repr hole in 177 | (match hole.struct_ with 178 | | V_hole -> () 179 | | _ -> failwith "link_hole: not a hole"); 180 | hole.struct_ <- V_link { value = to_ } 181 | 182 | let thunk_link thunk ~to_ = 183 | let thunk = repr thunk in 184 | (match thunk.struct_ with 185 | | V_thunk _ -> () 186 | | _ -> failwith "link_thunk: not a thunk"); 187 | thunk.struct_ <- V_link { value = to_ } 188 | 189 | let empty = [] 190 | 191 | let access env var = 192 | let var = (var : Index.t :> int) in 193 | match List.nth_opt env var with 194 | | Some value -> value 195 | | None -> failwith "lookup: unknown variable" 196 | 197 | let append env value = value :: env 198 | end 199 | 200 | module Eval = struct 201 | open Value 202 | 203 | let rec with_var_pat env bound ~arg = 204 | let (VPat { struct_ = bound; loc = _ }) = bound in 205 | match bound with 206 | | VP_annot { pat; annot = _ } -> with_var_pat env pat ~arg 207 | | VP_var { var = _ } -> 208 | (* TODO: name and maybe type here? *) 209 | append env arg 210 | 211 | let rec with_pat env bound ~arg = 212 | let (Pat { struct_ = bound; loc = _ }) = bound in 213 | match bound with 214 | | P_annot { pat; annot = _ } -> with_pat env pat ~arg 215 | | P_var { var = _ } -> 216 | (* TODO: name and maybe type here? *) 217 | append env arg 218 | | P_tuple { elements = _ } -> failwith "not implemented" 219 | 220 | let rec fresh_v_forward_of_var_pat pat = 221 | let (VPat { struct_ = pat; loc = _ }) = pat in 222 | match pat with 223 | | VP_annot { pat; annot = _ } -> fresh_v_forward_of_var_pat pat 224 | | VP_var { var = name } -> fresh_v_forward ~name 225 | 226 | let rec eval env term = 227 | let (Term { struct_ = term; loc = _ }) = term in 228 | match term with 229 | | T_annot { term; annot = _ } -> eval env term 230 | | T_var { var } -> weak_force @@ access env var 231 | | T_hoist { bound; body } -> 232 | let env = 233 | let arg = fresh_v_forward_of_var_pat bound in 234 | with_var_pat env bound ~arg 235 | in 236 | eval env body 237 | | T_fix { bound = _; var; arg; body } -> 238 | let forward = access env var in 239 | let () = assert_forward forward in 240 | let () = 241 | let arg = eval env arg in 242 | init_forward forward ~to_:arg 243 | in 244 | eval env body 245 | | T_let { bound; arg; body } -> 246 | let env = 247 | let arg = eval env arg in 248 | with_pat env bound ~arg 249 | in 250 | eval env body 251 | | T_apply { funct; arg } -> 252 | let funct = eval env funct in 253 | let arg = eval env arg in 254 | eval_apply ~funct ~arg 255 | | T_lambda { bound; body } -> v_lambda ~env ~bound ~body 256 | | T_forall { bound; param; body } -> 257 | let param = eval env param in 258 | v_forall ~param ~env ~bound ~body 259 | | T_self { bound; body } -> v_self ~env ~bound ~body 260 | | T_tuple _ | T_exists _ -> failwith "not implemented" 261 | 262 | and eval_apply ~funct ~arg = 263 | let funct = weak_force funct in 264 | match struct_ funct with 265 | | V_lambda { env; bound; body } -> 266 | let env = with_pat env bound ~arg in 267 | eval env body 268 | | V_var _ | V_forward _ | V_apply _ -> v_apply ~funct ~arg 269 | | V_hole | V_univ | V_forall _ | V_self _ -> 270 | failwith "eval_apply: type clash" 271 | | V_link _ | V_thunk _ -> failwith "eval_apply: unreacheable" 272 | 273 | and weak_force value = 274 | (* TODO: forcing every time removes some short circuits *) 275 | let value = repr value in 276 | match struct_ value with 277 | | V_thunk { env; term } -> 278 | (* TODO: detect recursive force? *) 279 | let final = eval env term in 280 | thunk_link value ~to_:final; 281 | final 282 | | V_hole | V_var _ | V_forward _ | V_apply _ | V_lambda _ | V_univ 283 | | V_forall _ | V_self _ | V_link _ -> 284 | value 285 | 286 | let rec strong_force value = 287 | (* TODO: forcing every time removes some short circuits *) 288 | (* TODO: path compression is bad for reasons *) 289 | let value = weak_force value in 290 | match struct_ value with 291 | | V_forward { name = _; inner } -> ( 292 | match same inner v_null with 293 | | true -> value 294 | | false -> strong_force inner) 295 | | V_apply { funct; arg } -> 296 | let funct = strong_force funct in 297 | strong_eval_apply ~funct ~arg 298 | | V_hole | V_var _ | V_lambda _ | V_univ | V_forall _ | V_self _ | V_link _ 299 | | V_thunk _ -> 300 | value 301 | 302 | and strong_eval_apply ~funct ~arg = 303 | match struct_ funct with 304 | | V_lambda { env; bound; body } -> 305 | let env = with_pat env bound ~arg in 306 | strong_force @@ eval env body 307 | | V_var _ | V_forward _ | V_apply _ -> v_apply ~funct ~arg 308 | | V_hole | V_univ | V_forall _ | V_self _ -> 309 | failwith "strong_eval_apply: type clash" 310 | | V_link _ | V_thunk _ -> failwith "strong_eval_apply: unreacheable" 311 | end 312 | 313 | module Unify = struct 314 | open Value 315 | open Eval 316 | 317 | let rec unify_check ~at ~hole in_ = 318 | (* TODO: short circuit on level *) 319 | (* TODO: color to avoid size explosion *) 320 | let in_ = weak_force in_ in 321 | match struct_ in_ with 322 | | V_hole -> ( 323 | match same hole in_ with 324 | | true -> failwith "occurs check" 325 | | false -> hole_lower in_ ~to_:at) 326 | | V_var { name = _ } -> ( 327 | (* TODO: poly comparison *) 328 | match level in_ >= at with 329 | | true -> failwith "escape check" 330 | | false -> ()) 331 | | V_forward { name = _; inner } -> 332 | lock_forward in_ @@ fun () -> unify_check ~at ~hole inner 333 | | V_apply { funct; arg } -> 334 | unify_check ~at ~hole funct; 335 | unify_check ~at ~hole arg 336 | | V_univ -> () 337 | | V_lambda { env; bound = _; body } -> unify_check_under ~at ~hole env body 338 | | V_forall { param; env; bound = _; body } -> 339 | unify_check ~at ~hole param; 340 | unify_check_under ~at ~hole env body 341 | | V_self { env; bound = _; body } -> unify_check_under ~at ~hole env body 342 | | V_thunk _ | V_link _ -> failwith "unify_check: unreacheable" 343 | 344 | and unify_check_under ~at ~hole env body = 345 | (* TODO: fill this *) 346 | let name = Name.make "**unify_check_under**" in 347 | let skolem = v_var ~at ~name in 348 | let at = Level.next at in 349 | let body = 350 | let env = append env skolem in 351 | eval env body 352 | in 353 | unify_check ~at ~hole body 354 | 355 | let unify_hole ~at ~hole ~to_ = 356 | match same hole to_ with 357 | | true -> () 358 | | false -> 359 | unify_check ~at ~hole to_; 360 | hole_link hole ~to_ 361 | 362 | let rec unify ~at lhs rhs = 363 | (* TODO: do repr shortcircuit first *) 364 | let lhs = weak_force lhs in 365 | let rhs = weak_force rhs in 366 | match same lhs rhs with true -> () | false -> unify_struct ~at lhs rhs 367 | 368 | and unify_struct ~at lhs rhs = 369 | match (struct_ lhs, struct_ rhs) with 370 | | V_hole, _ -> unify_hole ~at ~hole:lhs ~to_:rhs 371 | | _, V_hole -> unify_hole ~at ~hole:rhs ~to_:lhs 372 | | V_var { name = _ }, V_var { name = _ } -> failwith "var clash" 373 | | ( V_forward { name = lhs_name; inner = lhs_inner }, 374 | V_forward { name = rhs_name; inner = rhs_inner } ) -> ( 375 | match same lhs_inner v_null || same rhs_inner v_null with 376 | | true -> 377 | failwith 378 | @@ Format.asprintf "forward clash: %s == %s" (Name.repr lhs_name) 379 | (Name.repr rhs_name) 380 | | false -> 381 | (* TODO: is this a good idea? *) 382 | lock_forward lhs @@ fun () -> 383 | lock_forward rhs @@ fun () -> unify ~at lhs_inner rhs_inner) 384 | | ( V_apply { funct = lhs_funct; arg = lhs_arg }, 385 | V_apply { funct = rhs_funct; arg = rhs_arg } ) -> 386 | unify ~at lhs_funct rhs_funct; 387 | unify ~at lhs_arg rhs_arg 388 | | ( V_lambda { env = lhs_env; bound = _; body = lhs_body }, 389 | V_lambda { env = rhs_env; bound = _; body = rhs_body } ) -> 390 | unify_under ~at lhs_env lhs_body rhs_env rhs_body 391 | | V_univ, V_univ -> () 392 | | ( V_forall { param = lhs_param; env = lhs_env; bound = _; body = lhs_body }, 393 | V_forall 394 | { param = rhs_param; env = rhs_env; bound = _; body = rhs_body } ) -> 395 | unify ~at lhs_param rhs_param; 396 | unify_under ~at lhs_env lhs_body rhs_env rhs_body 397 | | ( V_self { env = lhs_env; bound = _; body = lhs_body }, 398 | V_self { env = rhs_env; bound = _; body = rhs_body } ) -> 399 | (* TODO: check only bound? *) 400 | unify_under ~at lhs_env lhs_body rhs_env rhs_body 401 | | ( ( V_var _ | V_forward _ | V_apply _ | V_lambda _ | V_univ | V_forall _ 402 | | V_self _ | V_thunk _ | V_link _ ), 403 | ( V_var _ | V_forward _ | V_apply _ | V_lambda _ | V_univ | V_forall _ 404 | | V_self _ | V_thunk _ | V_link _ ) ) -> 405 | error_type_clash () 406 | 407 | and unify_under ~at lhs_env lhs rhs_env rhs = 408 | (* TODO: should use pattern *) 409 | (* TODO: fill this *) 410 | let name = Name.make "**unify_check_under**" in 411 | let skolem = v_var ~at ~name in 412 | let at = Level.next at in 413 | let lhs = 414 | let lhs_env = append lhs_env skolem in 415 | eval lhs_env lhs 416 | in 417 | let rhs = 418 | let rhs_env = append rhs_env skolem in 419 | eval rhs_env rhs 420 | in 421 | unify ~at lhs rhs 422 | end 423 | 424 | module Machinery = struct 425 | open Value 426 | open Eval 427 | 428 | let rec inst_self ~self type_ = 429 | let type_ = strong_force type_ in 430 | match struct_ @@ type_ with 431 | | V_self { env; bound; body } -> 432 | let type_ = 433 | let env = with_var_pat env bound ~arg:self in 434 | eval env body 435 | in 436 | inst_self ~self type_ 437 | | V_hole | V_var _ | V_forward _ | V_apply _ | V_lambda _ | V_univ 438 | | V_forall _ | V_thunk _ | V_link _ -> 439 | type_ 440 | 441 | let split_forall value = 442 | let value = strong_force value in 443 | match struct_ value with 444 | | V_forall { param; env; bound; body } -> (param, env, bound, body) 445 | | V_hole -> failwith "hole is not a forall" 446 | | V_var _ | V_forward _ | V_apply _ | V_lambda _ | V_univ | V_self _ 447 | | V_thunk _ | V_link _ -> 448 | failwith "not a forall" 449 | 450 | let coerce ~at ~self lhs rhs = 451 | let lhs = inst_self ~self lhs in 452 | (* TODO: this is really bad *) 453 | let rhs = inst_self ~self rhs in 454 | Format.eprintf "%a == %a\n%!" pp_value lhs pp_value rhs; 455 | Unify.unify ~at lhs rhs 456 | end 457 | 458 | open Value 459 | open Eval 460 | open Unify 461 | open Machinery 462 | 463 | type value = Value.value 464 | 465 | (* infer *) 466 | type vars = Vars of { types : (Name.t * value) list } 467 | [@@ocaml.unboxed] [@@deriving show { with_path = false }] 468 | 469 | let rec v_skolem ~at pat = 470 | let (Pat { struct_ = pat; loc = _ }) = pat in 471 | match pat with 472 | | P_annot { pat; annot = _ } -> v_skolem ~at pat 473 | | P_var { var = name } -> v_var ~at ~name 474 | | P_tuple _ -> failwith "not implemented" 475 | 476 | let rec v_skolem_of_var_pat ~at pat = 477 | let (VPat { struct_ = pat; loc = _ }) = pat in 478 | match pat with 479 | | VP_annot { pat; annot = _ } -> v_skolem_of_var_pat ~at pat 480 | | VP_var { var = name } -> v_var ~at ~name 481 | 482 | let rec enter vars pat ~type_ = 483 | let (Pat { struct_ = pat; loc = _ }) = pat in 484 | match pat with 485 | | P_annot { pat; annot = _ } -> enter vars pat ~type_ 486 | | P_var { var = name } -> 487 | let (Vars { types }) = vars in 488 | (* TODO: why this *) 489 | let type_ = 490 | (* TODO: thunk strong force *) 491 | strong_force type_ 492 | in 493 | let types = (name, type_) :: types in 494 | Vars { types } 495 | | P_tuple _ -> failwith "not implemented" 496 | 497 | let rec enter_var_pat vars pat ~type_ = 498 | let (VPat { struct_ = pat; loc = _ }) = pat in 499 | match pat with 500 | | VP_annot { pat; annot = _ } -> enter_var_pat vars pat ~type_ 501 | | VP_var { var = name } -> 502 | let (Vars { types }) = vars in 503 | (* TODO: why this *) 504 | let type_ = 505 | (* TODO: thunk strong force *) 506 | strong_force type_ 507 | in 508 | let types = (name, type_) :: types in 509 | Vars { types } 510 | 511 | let solve vars env var = 512 | let rec solve types var = 513 | match (types, var) with 514 | | (_name, type_) :: _types, 0 -> type_ 515 | | (_name, _type) :: types, var -> solve types (var - 1) 516 | | [], _var -> 517 | (* TODO: this is a problem *) 518 | failwith "unexpected unbound variable" 519 | in 520 | let (Vars { types }) = vars in 521 | let self = access env var in 522 | let var = ((var : Index.t) :> int) in 523 | let type_ = solve types var in 524 | inst_self ~self type_ 525 | 526 | let rec type_of_pat env pat ~type_ = 527 | let (Pat { struct_ = pat; loc = _ }) = pat in 528 | match pat with 529 | | P_annot { pat; annot } -> 530 | let type_ = v_thunk ~env ~term:annot in 531 | type_of_pat env pat ~type_ 532 | | P_var { var = _ } -> type_ 533 | | P_tuple _ -> failwith "not implemented" 534 | 535 | let rec type_of_var_pat env pat ~type_ = 536 | let (VPat { struct_ = pat; loc = _ }) = pat in 537 | match pat with 538 | | VP_annot { pat; annot } -> 539 | let type_ = v_thunk ~env ~term:annot in 540 | type_of_var_pat env pat ~type_ 541 | | VP_var { var = _ } -> type_ 542 | 543 | (* TODO: ideally ensure that infer_term returns head normalized type *) 544 | let rec infer_term vars env ~at term = 545 | let expected_self = None in 546 | let expected = fresh_v_hole ~at in 547 | check_term vars env ~at term ~expected_self ~expected; 548 | (* TODO: is this correct or a good idea? *) 549 | let self = v_thunk ~env ~term in 550 | inst_self ~self expected 551 | 552 | and check_term vars env ~at term ~expected_self ~expected = 553 | (* TODO: not principled, let and annot will break this *) 554 | let (Term { struct_ = term; loc = _ }) = term in 555 | match term with 556 | | T_annot { term; annot } -> 557 | let annot = check_annot vars env ~at annot ~expected_self ~expected in 558 | check_term vars env ~at term ~expected_self ~expected:annot 559 | | T_var { var } -> 560 | (* TODO: use expected_self? *) 561 | let received = solve vars env var in 562 | let self = access env var in 563 | coerce ~at ~self received expected 564 | | T_hoist { bound; body } -> 565 | (* TODO: ensure it's eventually bound *) 566 | let type_ = infer_var_pat vars env ~at bound in 567 | let vars = enter_var_pat vars bound ~type_ in 568 | let arg = fresh_v_forward_of_var_pat bound in 569 | let env = with_var_pat env bound ~arg in 570 | check_term vars env ~at body ~expected_self ~expected 571 | | T_fix { bound; var; arg; body } -> 572 | (* TODO: ensure it's not trivially recursive? A = M(A) *) 573 | let forward = access env var in 574 | let () = assert_forward forward in 575 | let () = 576 | let expected = solve vars env var in 577 | check_var_pat vars env ~at bound ~expected; 578 | let self = forward in 579 | let expected_self = Some self in 580 | let expected = type_of_var_pat env bound ~type_:expected in 581 | check_term vars env ~at arg ~expected_self ~expected 582 | in 583 | let () = 584 | let arg = v_thunk ~env ~term:arg in 585 | init_forward forward ~to_:arg 586 | in 587 | let expected = 588 | (* TODO: this could unlock some reductions *) 589 | match expected_self with 590 | | Some self -> inst_self ~self expected 591 | | None -> expected 592 | in 593 | check_term vars env ~at body ~expected_self ~expected 594 | | T_let { bound; arg; body } -> 595 | let type_ = infer_pat vars env ~at bound in 596 | let () = 597 | check_term vars env ~at arg ~expected_self:None ~expected:type_ 598 | in 599 | let arg = v_thunk ~env ~term:arg in 600 | let vars = enter vars bound ~type_ in 601 | let env = with_pat env bound ~arg in 602 | check_term vars env ~at body ~expected_self ~expected 603 | | T_lambda { bound; body } -> 604 | let expected_param, expected_env, expected_bound, expected_body = 605 | split_forall expected 606 | in 607 | let () = check_pat vars env ~at bound ~expected:expected_param in 608 | let param = type_of_pat env bound ~type_:expected_param in 609 | let skolem = v_skolem ~at bound in 610 | let vars = enter vars bound ~type_:param in 611 | let env = with_pat env bound ~arg:skolem in 612 | let at = Level.next at in 613 | let expected = 614 | let env = with_pat expected_env expected_bound ~arg:skolem in 615 | eval env expected_body 616 | in 617 | let expected = strong_force expected in 618 | let expected, expected_self = 619 | match expected_self with 620 | | Some self -> 621 | let self = strong_force self in 622 | let self = eval_apply ~funct:self ~arg:skolem in 623 | let expected = inst_self ~self expected in 624 | (expected, Some self) 625 | | None -> (expected, None) 626 | in 627 | check_term vars env ~at body ~expected_self ~expected 628 | | T_apply { funct; arg } -> 629 | let funct_type = infer_term vars env ~at funct in 630 | let param, body_env, bound, body_type = split_forall funct_type in 631 | let () = 632 | check_term vars env ~at arg ~expected_self:None ~expected:param 633 | in 634 | let received = 635 | let arg = v_thunk ~env ~term:arg in 636 | let body_env = with_pat body_env bound ~arg in 637 | eval body_env body_type 638 | in 639 | (* TODO: coerce? *) 640 | unify ~at received expected 641 | | T_forall { bound; param; body } -> 642 | unify ~at v_univ expected; 643 | let () = 644 | check_term vars env ~at param ~expected_self:None ~expected:v_univ 645 | in 646 | let param = eval env param in 647 | check_pat vars env ~at bound ~expected:param; 648 | let skolem = v_skolem ~at bound in 649 | let at = Level.next at in 650 | let vars = enter vars bound ~type_:param in 651 | let env = append env skolem in 652 | check_term vars env ~at body ~expected_self:None ~expected:v_univ 653 | | T_self { bound; body } -> 654 | (* TODO: this is really ugly *) 655 | unify ~at v_univ expected; 656 | let expected_self = 657 | match expected_self with 658 | | Some expected_self -> expected_self 659 | | None -> failwith "self is only supported in a fixpoint" 660 | in 661 | check_var_pat vars env ~at bound ~expected:expected_self; 662 | let type_ = type_of_var_pat env bound ~type_:expected_self in 663 | let skolem = v_skolem_of_var_pat ~at bound in 664 | let at = Level.next at in 665 | let vars = enter_var_pat vars bound ~type_ in 666 | let env = with_var_pat env bound ~arg:skolem in 667 | check_term vars env ~at body ~expected_self:None ~expected:v_univ 668 | | T_tuple _ | T_exists _ -> failwith "not implemented" 669 | 670 | and check_annot vars env ~at annot ~expected_self ~expected = 671 | check_term vars env ~at annot ~expected_self:None ~expected:v_univ; 672 | let received = eval env annot in 673 | match expected_self with 674 | | Some self -> 675 | let received = inst_self ~self received in 676 | coerce ~at ~self received expected; 677 | received 678 | | None -> 679 | unify ~at received expected; 680 | received 681 | 682 | and infer_var_pat vars env ~at pat = 683 | let expected = fresh_v_hole ~at in 684 | check_var_pat vars env ~at pat ~expected; 685 | expected 686 | 687 | and check_var_pat vars env ~at pat ~expected = 688 | let (VPat { struct_ = pat_struct; loc = _ }) = pat in 689 | match pat_struct with 690 | | VP_annot { pat; annot } -> 691 | let annot = 692 | check_annot vars env ~at annot ~expected_self:None ~expected 693 | in 694 | check_var_pat vars env ~at pat ~expected:annot 695 | | VP_var { var = _ } -> () 696 | 697 | and infer_pat vars env ~at pat = 698 | let expected = fresh_v_hole ~at in 699 | check_pat vars env ~at pat ~expected; 700 | expected 701 | 702 | and check_pat vars env ~at pat ~expected = 703 | let (Pat { struct_ = pat_struct; loc = _ }) = pat in 704 | match pat_struct with 705 | | P_annot { pat; annot } -> 706 | let annot = 707 | check_annot vars env ~at annot ~expected_self:None ~expected 708 | in 709 | check_pat vars env ~at pat ~expected:annot 710 | | P_var { var = _ } -> () 711 | | P_tuple _ -> failwith "not implemented" 712 | 713 | (* external *) 714 | let infer_term term = 715 | let at = Level.(next zero) in 716 | let vars = 717 | let types = [ (Name.make "Type", v_univ) ] in 718 | Vars { types } 719 | in 720 | let env = append empty v_univ in 721 | try Ok (infer_term vars env ~at term) with exn -> Error exn 722 | -------------------------------------------------------------------------------- /smol/styper.ml: -------------------------------------------------------------------------------- 1 | (* TODO: remove all failwith *) 2 | 3 | module Error = struct 4 | open Syntax 5 | 6 | type error = 7 | | E_loc of { error : error; loc : Location.t } 8 | (* machinery *) 9 | | E_free_var_clash 10 | | E_bound_var_clash 11 | | E_type_clash 12 | | E_pattern_clash 13 | (* context *) 14 | | E_unknown_var of { var : Name.t } 15 | | E_variable_used of { var : Name.t } 16 | | E_variable_unused of { var : Name.t } 17 | | E_grades_invariant_violated 18 | | E_types_invariant_violated 19 | (* typer *) 20 | | E_unsupported_extensions 21 | | E_string_not_supported 22 | | E_missing_annotations 23 | | E_unroll_pattern_not_supported 24 | | E_expected_forall 25 | | E_expected_self 26 | 27 | exception Error of { error : error } 28 | 29 | let rec pp_error fmt error = 30 | let open Format in 31 | match error with 32 | | E_loc { error; loc = _ } -> pp_error fmt error 33 | | E_free_var_clash -> fprintf fmt "free var clash" 34 | | E_bound_var_clash -> fprintf fmt "bound var clash" 35 | | E_type_clash -> fprintf fmt "type clash" 36 | | E_pattern_clash -> fprintf fmt "pattern clash" 37 | | E_unknown_var { var } -> fprintf fmt "unknown variable: %a" Name.pp var 38 | (* TODO: show all other occurrences *) 39 | | E_variable_used { var } -> 40 | fprintf fmt "duplicated variable: %a" Name.pp var 41 | (* TODO: show error on pattern *) 42 | | E_variable_unused { var } -> 43 | fprintf fmt "variable not used: %a" Name.pp var 44 | | E_grades_invariant_violated -> 45 | fprintf fmt "compiler bug, grades invariant" 46 | | E_types_invariant_violated -> fprintf fmt "compiler bug, types invariant" 47 | | E_unsupported_extensions -> fprintf fmt "extensions are not supported" 48 | | E_string_not_supported -> fprintf fmt "strings are not supported" 49 | | E_missing_annotations -> fprintf fmt "not enough annotations here" 50 | | E_unroll_pattern_not_supported -> 51 | fprintf fmt "unroll patterns are not supported" 52 | | E_expected_forall -> fprintf fmt "expected a function" 53 | | E_expected_self -> fprintf fmt "expected a fixpoint" 54 | 55 | let pp_loc fmt loc = 56 | let open Format in 57 | (* TODO: loc ghost?*) 58 | let Location.{ loc_start; loc_end; loc_ghost = _ } = loc in 59 | fprintf fmt "[%d:%d .. %d:%d]" loc_start.pos_lnum 60 | (loc_start.pos_cnum - loc_start.pos_bol) 61 | loc_end.pos_lnum 62 | (loc_end.pos_cnum - loc_end.pos_bol) 63 | 64 | let rec pp_error_loc ~loc fmt error = 65 | let open Format in 66 | match error with 67 | | E_loc { error; loc = new_loc } -> ( 68 | match Location.is_none new_loc with 69 | | true -> pp_error_loc ~loc:new_loc fmt error 70 | | false -> pp_error_loc ~loc:new_loc fmt error) 71 | | error -> ( 72 | match Location.is_none loc with 73 | | true -> fprintf fmt "type error : %a" pp_error error 74 | | false -> fprintf fmt "type error at %a : %a" pp_loc loc pp_error error 75 | ) 76 | 77 | let () = 78 | Printexc.register_printer @@ function 79 | | Error { error } -> 80 | Some (Format.asprintf "%a" (pp_error_loc ~loc:Location.none) error) 81 | | _ -> None 82 | 83 | let error error = raise (Error { error }) 84 | end 85 | 86 | module Machinery = struct 87 | open Stree 88 | open Error 89 | 90 | let rec open_term ~from ~to_ term = 91 | let open_term ~from term = open_term ~from ~to_ term in 92 | let open_ty_pat ~from pat = open_ty_pat ~from ~to_ pat in 93 | let open_pat ~from pat = open_pat ~from ~to_ pat in 94 | match term with 95 | | ST_loc { term; loc } -> 96 | let term = open_term ~from term in 97 | ST_loc { term; loc } 98 | | ST_free_var { level } -> ST_free_var { level } 99 | | ST_bound_var { index } -> ( 100 | match Index.equal from index with 101 | | true -> to_ 102 | | false -> ST_bound_var { index }) 103 | | ST_forall { param; return } -> 104 | let param = open_ty_pat ~from param in 105 | let return = 106 | (* TODO: what if pairs in patterns *) 107 | let from = Index.next from in 108 | open_term ~from return 109 | in 110 | ST_forall { param; return } 111 | | ST_lambda { param; return } -> 112 | let param = open_ty_pat ~from param in 113 | let return = 114 | let from = Index.next from in 115 | open_term ~from return 116 | in 117 | ST_lambda { param; return } 118 | | ST_apply { lambda; arg } -> 119 | let lambda = open_term ~from lambda in 120 | let arg = open_term ~from arg in 121 | ST_apply { lambda; arg } 122 | | ST_self { self; body } -> 123 | let self = open_pat ~from self in 124 | let body = 125 | let from = Index.next from in 126 | open_term ~from body 127 | in 128 | ST_self { self; body } 129 | | ST_fix { self; body } -> 130 | let self = open_ty_pat ~from self in 131 | let body = 132 | let from = Index.next from in 133 | open_term ~from body 134 | in 135 | ST_fix { self; body } 136 | | ST_unroll { term } -> 137 | let term = open_term ~from term in 138 | ST_unroll { term } 139 | | ST_let { bound; value; return } -> 140 | let bound = open_ty_pat ~from bound in 141 | let value = open_term ~from value in 142 | let return = 143 | let from = Index.next from in 144 | open_term ~from return 145 | in 146 | ST_let { bound; value; return } 147 | | ST_annot { term; annot } -> 148 | let term = open_term ~from term in 149 | let annot = open_term ~from annot in 150 | ST_annot { term; annot } 151 | 152 | and open_ty_pat ~from ~to_ pat = 153 | let (SP_typed { pat; type_ }) = pat in 154 | let pat = open_pat ~from ~to_ pat in 155 | let type_ = open_term ~from ~to_ type_ in 156 | SP_typed { pat; type_ } 157 | 158 | and open_pat ~from ~to_ pat = 159 | match pat with 160 | | SP_loc { pat; loc } -> 161 | let pat = open_pat ~from ~to_ pat in 162 | SP_loc { pat; loc } 163 | | SP_var { var } -> SP_var { var } 164 | | SP_erasable { pat } -> 165 | let pat = open_pat ~from ~to_ pat in 166 | SP_erasable { pat } 167 | | SP_annot { pat; annot } -> 168 | let pat = open_pat ~from ~to_ pat in 169 | let annot = open_term ~from ~to_ annot in 170 | SP_annot { pat; annot } 171 | 172 | let open_term ~to_ term = open_term ~from:Index.zero ~to_ term 173 | 174 | let rec close_term ~from ~to_ term = 175 | let close_term ~to_ term = close_term ~from ~to_ term in 176 | let close_ty_pat ~to_ pat = close_ty_pat ~from ~to_ pat in 177 | let close_pat ~to_ pat = close_pat ~from ~to_ pat in 178 | match term with 179 | | ST_loc { term; loc } -> 180 | let term = close_term ~to_ term in 181 | ST_loc { term; loc } 182 | | ST_free_var { level } -> ( 183 | match Level.equal from level with 184 | | true -> ST_bound_var { index = to_ } 185 | | false -> ST_free_var { level }) 186 | | ST_bound_var { index } -> ST_bound_var { index } 187 | | ST_forall { param; return } -> 188 | let param = close_ty_pat ~to_ param in 189 | let return = 190 | let to_ = Index.next to_ in 191 | close_term ~to_ return 192 | in 193 | ST_forall { param; return } 194 | | ST_lambda { param; return } -> 195 | let param = close_ty_pat ~to_ param in 196 | let return = 197 | let to_ = Index.next to_ in 198 | close_term ~to_ return 199 | in 200 | ST_lambda { param; return } 201 | | ST_apply { lambda; arg } -> 202 | let lambda = close_term ~to_ lambda in 203 | let arg = close_term ~to_ arg in 204 | ST_apply { lambda; arg } 205 | | ST_self { self; body } -> 206 | let self = close_pat ~to_ self in 207 | let body = 208 | let to_ = Index.next to_ in 209 | close_term ~to_ body 210 | in 211 | ST_self { self; body } 212 | | ST_fix { self; body } -> 213 | let self = close_ty_pat ~to_ self in 214 | let body = 215 | let to_ = Index.next to_ in 216 | close_term ~to_ body 217 | in 218 | ST_fix { self; body } 219 | | ST_unroll { term } -> 220 | let term = close_term ~to_ term in 221 | ST_unroll { term } 222 | | ST_let { bound; value; return } -> 223 | let bound = close_ty_pat ~to_ bound in 224 | let value = close_term ~to_ value in 225 | let return = 226 | let to_ = Index.next to_ in 227 | close_term ~to_ return 228 | in 229 | ST_let { bound; value; return } 230 | | ST_annot { term; annot } -> 231 | let term = close_term ~to_ term in 232 | let annot = close_term ~to_ annot in 233 | ST_annot { term; annot } 234 | 235 | and close_ty_pat ~from ~to_ pat = 236 | let (SP_typed { pat; type_ }) = pat in 237 | let pat = close_pat ~from ~to_ pat in 238 | let type_ = close_term ~from ~to_ type_ in 239 | SP_typed { pat; type_ } 240 | 241 | and close_pat ~from ~to_ pat = 242 | match pat with 243 | | SP_loc { pat; loc } -> 244 | let pat = close_pat ~from ~to_ pat in 245 | SP_loc { pat; loc } 246 | | SP_var { var } -> SP_var { var } 247 | | SP_erasable { pat } -> 248 | let pat = close_pat ~from ~to_ pat in 249 | SP_erasable { pat } 250 | | SP_annot { pat; annot } -> 251 | let pat = close_pat ~from ~to_ pat in 252 | let annot = close_term ~from ~to_ annot in 253 | SP_annot { pat; annot } 254 | 255 | (* TODO: expansion of unroll *) 256 | let rec expand_head_term term = 257 | match term with 258 | (* TODO: use this loc during equality?*) 259 | | ST_loc { term; loc = _ } -> expand_head_term term 260 | (* TODO: equality expansion *) 261 | | ST_free_var _ as term -> term 262 | | ST_bound_var _ as term -> term 263 | | ST_forall _ as term -> term 264 | | ST_lambda _ as term -> term 265 | | ST_apply { lambda; arg } -> ( 266 | match expand_head_term lambda with 267 | (* TODO: use pattern when moving to subst *) 268 | | ST_lambda { param = _; return } -> open_term ~to_:arg return 269 | | lambda -> ST_apply { lambda; arg }) 270 | | ST_self _ as term -> term 271 | | ST_fix _ as term -> term 272 | | ST_unroll _ as term -> term 273 | (* TODO: use pattern when moving to subst *) 274 | | ST_let { bound = _; value; return } -> 275 | expand_head_term @@ open_term ~to_:value return 276 | | ST_annot { term; annot = _ } -> expand_head_term term 277 | 278 | (* TODO: document multi step equality *) 279 | let rec equal_term ~received ~expected = 280 | match received == expected with 281 | | true -> () 282 | | false -> equal_term_structural ~received ~expected 283 | 284 | and equal_term_structural ~received ~expected = 285 | let received = expand_head_term received in 286 | let expected = expand_head_term expected in 287 | match (received, expected) with 288 | (* TODO: locs? *) 289 | | ST_loc { term = received; loc = _ }, expected 290 | | received, ST_loc { term = expected; loc = _ } -> 291 | equal_term ~received ~expected 292 | | ST_free_var { level = received }, ST_free_var { level = expected } -> ( 293 | match Level.equal received expected with 294 | | true -> () 295 | | false -> error E_free_var_clash) 296 | | ST_bound_var { index = received }, ST_bound_var { index = expected } -> ( 297 | match Index.equal received expected with 298 | | true -> () 299 | | false -> error E_bound_var_clash) 300 | | ( ST_forall { param = received_param; return = received_return }, 301 | ST_forall { param = expected_param; return = expected_return } ) -> 302 | let () = 303 | equal_ty_pat ~received:received_param ~expected:expected_param 304 | in 305 | equal_term ~received:received_return ~expected:expected_return 306 | | ( ST_lambda { param = received_param; return = received_return }, 307 | ST_lambda { param = expected_param; return = expected_return } ) -> 308 | let () = 309 | equal_ty_pat ~received:received_param ~expected:expected_param 310 | in 311 | equal_term ~received:received_return ~expected:expected_return 312 | | ( ST_apply { lambda = received_lambda; arg = received_arg }, 313 | ST_apply { lambda = expected_lambda; arg = expected_arg } ) -> 314 | let () = 315 | equal_term ~received:received_lambda ~expected:expected_lambda 316 | in 317 | equal_term ~received:received_arg ~expected:expected_arg 318 | | ( ST_self { self = received_self; body = received_body }, 319 | ST_self { self = expected_self; body = expected_body } ) -> 320 | let () = equal_pat ~received:received_self ~expected:expected_self in 321 | equal_term ~received:received_body ~expected:expected_body 322 | | ( ST_fix { self = received_self; body = received_body }, 323 | ST_fix { self = expected_self; body = expected_body } ) -> 324 | let () = equal_ty_pat ~received:received_self ~expected:expected_self in 325 | equal_term ~received:received_body ~expected:expected_body 326 | | ST_unroll { term = received }, ST_unroll { term = expected } -> 327 | equal_term ~received ~expected 328 | (* TODO: document why let here *) 329 | | ( ST_let 330 | { 331 | bound = received_bound; 332 | value = received_value; 333 | return = received_return; 334 | }, 335 | ST_let 336 | { 337 | bound = expected_bound; 338 | value = expected_value; 339 | return = expected_return; 340 | } ) -> 341 | let () = 342 | equal_ty_pat ~received:received_bound ~expected:expected_bound 343 | in 344 | let () = equal_term ~received:received_value ~expected:expected_value in 345 | equal_term ~received:received_return ~expected:expected_return 346 | (* TODO: document why annot here *) 347 | (* TODO: should check also for annot equality? *) 348 | | ST_annot { term = received; annot = _ }, expected 349 | | received, ST_annot { term = expected; annot = _ } -> 350 | equal_term ~received ~expected 351 | | ( ( ST_free_var _ | ST_bound_var _ | ST_forall _ | ST_lambda _ 352 | | ST_apply _ | ST_self _ | ST_fix _ | ST_unroll _ | ST_let _ ), 353 | ( ST_free_var _ | ST_bound_var _ | ST_forall _ | ST_lambda _ 354 | | ST_apply _ | ST_self _ | ST_fix _ | ST_unroll _ | ST_let _ ) ) -> 355 | error E_type_clash 356 | 357 | and equal_ty_pat ~received ~expected = 358 | let (SP_typed { pat = received_pat; type_ = received_type }) = received in 359 | let (SP_typed { pat = expected_pat; type_ = expected_type }) = expected in 360 | let () = equal_pat ~received:received_pat ~expected:expected_pat in 361 | equal_term ~received:received_type ~expected:expected_type 362 | 363 | and equal_pat ~received ~expected = 364 | (* TODO: normalize pattern *) 365 | (* TODO: check pat? *) 366 | match (received, expected) with 367 | (* TODO: locs *) 368 | | SP_loc { pat = received; loc = _ }, expected 369 | | received, SP_loc { pat = expected; loc = _ } -> 370 | equal_pat ~received ~expected 371 | | SP_var { var = _ }, SP_var { var = _ } -> () 372 | | SP_erasable { pat = received }, SP_erasable { pat = expected } -> 373 | equal_pat ~received ~expected 374 | | SP_annot { pat = received; annot = _ }, expected 375 | | received, SP_annot { pat = expected; annot = _ } -> 376 | equal_pat ~received ~expected 377 | | (SP_var _ | SP_erasable _), (SP_var _ | SP_erasable _) -> 378 | error E_pattern_clash 379 | 380 | let typeof_pat pat = 381 | let (SP_typed { pat; type_ }) = pat in 382 | let rec is_erasable pat = 383 | match pat with 384 | (* TODO: weird *) 385 | | SP_loc { pat; loc = _ } -> is_erasable pat 386 | | SP_var { var = _ } -> false 387 | | SP_erasable { pat = _ } -> true 388 | | SP_annot { pat; annot = _ } -> is_erasable pat 389 | in 390 | (type_, `Erasability (is_erasable pat)) 391 | end 392 | 393 | module Assume = struct 394 | (* TODO: document assumption mode *) 395 | open Syntax 396 | open Ltree 397 | open Stree 398 | 399 | (* TODO: linearity on assume? *) 400 | module Context : sig 401 | type 'a context 402 | type 'a t = 'a context 403 | 404 | (* monad *) 405 | (* TODO: this should not be exposed *) 406 | val run : 407 | level:Level.t -> names:Level.t Name.Map.t -> (unit -> 'a context) -> 'a 408 | 409 | val pure : 'a -> 'a context 410 | val ( let* ) : 'a context -> ('a -> 'b context) -> 'b context 411 | 412 | (* locs *) 413 | val with_loc : Location.t -> (unit -> 'a context) -> 'a context 414 | 415 | (* vars *) 416 | val enter : Name.t -> (unit -> 'a context) -> 'a context 417 | val lookup : Name.t -> Level.t context 418 | 419 | (* machinery *) 420 | val close_term : term -> term context 421 | end = struct 422 | open Machinery 423 | open Error 424 | 425 | (* TODO: names map vs names list / stack *) 426 | type 'a context = level:Level.t -> names:Level.t Name.Map.t -> 'a 427 | type 'a t = 'a context 428 | 429 | let run ~level ~names f = f () ~level ~names 430 | let pure x ~level:_ ~names:_ = x 431 | let ( let* ) ctx f ~level ~names = f (ctx ~level ~names) ~level ~names 432 | 433 | let with_loc loc f ~level ~names = 434 | try f () ~level ~names 435 | with Error { error } -> 436 | let error = E_loc { error; loc } in 437 | raise (Error { error }) 438 | 439 | let enter name f ~level ~names = 440 | let level = Level.next level in 441 | let names = Name.Map.add name level names in 442 | f () ~level ~names 443 | 444 | let lookup name ~level:_ ~names = 445 | match Name.Map.find_opt name names with 446 | | Some level -> level 447 | | None -> error (E_unknown_var { var = name }) 448 | 449 | let close_term term ~level ~names:_ = 450 | close_term ~from:level ~to_:Index.zero term 451 | end 452 | 453 | open Context 454 | open Error 455 | 456 | let rec assume_term term = 457 | match term with 458 | | LT_loc { term; loc } -> 459 | let* term = with_loc loc @@ fun () -> assume_term term in 460 | pure @@ ST_loc { term; loc } 461 | | LT_var { var } -> 462 | let* level = lookup var in 463 | pure @@ ST_free_var { level } 464 | | LT_extension _ -> error E_unsupported_extensions 465 | | LT_forall { param; return } -> 466 | let* param, enter = assume_ty_pat param in 467 | let* return = enter @@ fun () -> assume_term return in 468 | pure @@ ST_forall { param; return } 469 | | LT_lambda { param; return } -> 470 | let* param, enter = assume_ty_pat param in 471 | let* return = enter @@ fun () -> assume_term return in 472 | pure @@ ST_lambda { param; return } 473 | | LT_apply { lambda; arg } -> 474 | let* lambda = assume_term lambda in 475 | let* arg = assume_term arg in 476 | pure @@ ST_apply { lambda; arg } 477 | | LT_self { self; body } -> assume_self ~self ~body 478 | | LT_fix { self; body } -> assume_fix ~self ~body 479 | | LT_unroll { term } -> 480 | let* term = assume_term term in 481 | pure @@ ST_unroll { term } 482 | | LT_let { bound; return } -> 483 | (* TODO: assume bind? *) 484 | (* TODO: use this loc *) 485 | let (LBind { loc = _; pat = bound; value }) = bound in 486 | (* TODO: should let always be typed here *) 487 | let* bound, enter = assume_ty_pat bound in 488 | let* value = assume_term value in 489 | let* return = enter @@ fun () -> assume_term return in 490 | pure @@ ST_let { bound; value; return } 491 | | LT_annot { term; annot } -> 492 | let* annot = assume_term annot in 493 | let* term = assume_term term in 494 | pure @@ ST_annot { term; annot } 495 | | LT_string _ -> error E_string_not_supported 496 | 497 | and assume_self ~self ~body = 498 | let* self, enter = assume_pat self in 499 | let* body = enter @@ fun () -> assume_term body in 500 | pure @@ ST_self { self; body } 501 | 502 | and assume_fix ~self ~body = 503 | let* self, enter = assume_ty_pat self in 504 | let* body = enter @@ fun () -> assume_term body in 505 | pure @@ ST_fix { self; body } 506 | 507 | and assume_ty_pat pat = 508 | let wrap ~enter ~type_ pat = pure @@ (SP_typed { pat; type_ }, enter) in 509 | match pat with 510 | | LP_loc { pat; loc } -> 511 | let* SP_typed { pat; type_ }, enter = 512 | with_loc loc @@ fun () -> assume_ty_pat pat 513 | in 514 | wrap ~enter ~type_ @@ SP_loc { pat; loc } 515 | | LP_var _ -> error E_missing_annotations 516 | | LP_unroll _ -> error E_unroll_pattern_not_supported 517 | | LP_erasable _ -> 518 | let* SP_typed { pat; type_ }, enter = assume_ty_pat pat in 519 | wrap ~enter ~type_ @@ SP_erasable { pat } 520 | | LP_annot { pat; annot } -> 521 | let* annot = assume_term annot in 522 | let* pat, enter = assume_pat pat in 523 | wrap ~enter ~type_:annot @@ SP_annot { pat; annot } 524 | 525 | and assume_pat pat = 526 | (* TODO: with should do auto close *) 527 | match pat with 528 | | LP_loc { pat; loc } -> 529 | with_loc loc @@ fun () -> 530 | let* pat, enter = assume_pat pat in 531 | let pat = SP_loc { pat; loc } in 532 | pure @@ (pat, enter) 533 | | LP_var { var } -> 534 | let enter k = 535 | enter var @@ fun () -> 536 | (* TODO: better place or name for close term*) 537 | let* term = k () in 538 | close_term term 539 | in 540 | pure @@ (SP_var { var }, enter) 541 | | LP_erasable { pat } -> 542 | let* pat, enter = assume_pat pat in 543 | pure @@ (SP_erasable { pat }, enter) 544 | | LP_unroll _ -> error E_unroll_pattern_not_supported 545 | | LP_annot { pat; annot } -> 546 | let* annot = assume_term annot in 547 | let* pat, enter = assume_pat pat in 548 | let pat = SP_annot { pat; annot } in 549 | pure @@ (pat, enter) 550 | end 551 | 552 | open Syntax 553 | open Ltree 554 | open Stree 555 | open Machinery 556 | 557 | (* TODO: this being hard coded is bad *) 558 | let st_type = ST_free_var { level = Level.zero } 559 | 560 | module Context : sig 561 | type 'a context 562 | type 'a t = 'a context 563 | 564 | (* monad *) 565 | val run : (unit -> 'a context) -> 'a 566 | val pure : 'a -> 'a context 567 | val ( let* ) : 'a context -> ('a -> 'b context) -> 'b context 568 | 569 | (* locs *) 570 | val with_loc : Location.t -> (unit -> 'a context) -> 'a context 571 | 572 | (* mode *) 573 | val enter_erasable_zone : (unit -> 'a context) -> 'a context 574 | 575 | (* vars *) 576 | val enter : 577 | Name.t -> erasable:bool -> type_:term -> (unit -> 'a context) -> 'a context 578 | 579 | val lookup : Name.t -> ([ `Type of term ] * Level.t) context 580 | 581 | (* machinery *) 582 | val assume_self : self:Ltree.pat -> body:Ltree.term -> term context 583 | val assume_fix : self:Ltree.pat -> body:Ltree.term -> term context 584 | val subst_term : to_:term -> term -> term context 585 | val open_term : term -> term context 586 | val close_term : term -> term context 587 | end = struct 588 | open Machinery 589 | open Error 590 | 591 | type status = Var_pending | Var_used 592 | 593 | (* TODO: names map vs names list / stack *) 594 | (* TODO: vars map vs vars list / stack *) 595 | type 'a context = 596 | level:Level.t -> 597 | names:Level.t Name.Map.t -> 598 | types:term Level.Map.t -> 599 | grades:status Level.Map.t -> 600 | status Level.Map.t * 'a 601 | 602 | type 'a t = 'a context 603 | 604 | let run k = 605 | let level = Level.(next zero) in 606 | (* TODO: move this to Name? *) 607 | let names = Name.Map.(add (Name.make "Type") Level.zero empty) in 608 | let types = Level.Map.(add Level.zero st_type empty) in 609 | let grades = Level.Map.(add Level.zero Var_used empty) in 610 | let _grades, x = k () ~level ~names ~types ~grades in 611 | (* TODO: check grades here *) 612 | x 613 | 614 | let pure x ~level:_ ~names:_ ~types:_ ~grades = (grades, x) 615 | 616 | let ( let* ) ctx f ~level ~names ~types ~grades = 617 | let grades, x = ctx ~level ~names ~types ~grades in 618 | f x ~level ~names ~types ~grades 619 | 620 | let with_loc loc f ~level ~names ~types ~grades = 621 | try f () ~level ~names ~types ~grades 622 | with Error { error } -> 623 | let error = E_loc { error; loc } in 624 | raise (Error { error }) 625 | 626 | let enter_erasable_zone f ~level ~names ~types ~grades = 627 | let _grades, x = f () ~level ~names ~types ~grades:Level.Map.empty in 628 | (* TODO: check grades to be empty here *) 629 | (grades, x) 630 | 631 | let enter_linear name ~type_ f ~level ~names ~types ~grades = 632 | let level = Level.next level in 633 | let names = Name.Map.add name level names in 634 | let types = Level.Map.add level type_ types in 635 | let grades = Level.Map.add level Var_pending grades in 636 | let grades, x = f () ~level ~names ~types ~grades in 637 | match Level.Map.find_opt level grades with 638 | | Some Var_pending -> error (E_variable_unused { var = name }) 639 | | Some Var_used -> (Level.Map.remove level grades, x) 640 | | None -> error E_grades_invariant_violated 641 | 642 | let enter_erasable name ~type_ f ~level ~names ~types ~grades = 643 | let level = Level.next level in 644 | let names = Name.Map.add name level names in 645 | let types = Level.Map.add level type_ types in 646 | (* TODO: explain why it enters variable as used *) 647 | (* TODO: Var_erasable? *) 648 | let grades = Level.Map.add level Var_used grades in 649 | let grades, x = f () ~level ~names ~types ~grades in 650 | (Level.Map.remove level grades, x) 651 | 652 | let enter name ~erasable ~type_ f ~level ~names ~types ~grades = 653 | match erasable with 654 | | true -> enter_erasable name ~type_ f ~level ~names ~types ~grades 655 | | false -> enter_linear name ~type_ f ~level ~names ~types ~grades 656 | 657 | let lookup name ~level:_ ~names ~types ~grades = 658 | match Name.Map.find_opt name names with 659 | | Some level -> ( 660 | match Level.Map.find_opt level types with 661 | | Some type_ -> ( 662 | match Level.Map.find_opt level grades with 663 | | Some Var_pending -> 664 | let grades = Level.Map.add level Var_used grades in 665 | (grades, (`Type type_, level)) 666 | | Some Var_used -> error (E_variable_used { var = name }) 667 | | None -> 668 | (* removed by erasable zone *) 669 | (grades, (`Type type_, level))) 670 | | None -> error E_types_invariant_violated) 671 | | None -> error (E_unknown_var { var = name }) 672 | 673 | let assume_self ~self ~body ~level ~names ~types:_ ~grades = 674 | let x = 675 | let open Assume in 676 | Context.run ~level ~names @@ fun () -> assume_self ~self ~body 677 | in 678 | (grades, x) 679 | 680 | let assume_fix ~self ~body ~level ~names ~types:_ ~grades = 681 | let x = 682 | let open Assume in 683 | Context.run ~level ~names @@ fun () -> assume_fix ~self ~body 684 | in 685 | (grades, x) 686 | 687 | let subst_term ~to_ term ~level:_ ~names:_ ~types:_ ~grades = 688 | (grades, open_term ~to_ term) 689 | 690 | let open_term term ~level ~names:_ ~types:_ ~grades = 691 | (grades, open_term ~to_:(ST_free_var { level }) term) 692 | 693 | let close_term term ~level ~names:_ ~types:_ ~grades = 694 | (grades, close_term ~from:level ~to_:Index.zero term) 695 | end 696 | 697 | open Context 698 | open Error 699 | 700 | (* TODO: think better about enter pat *) 701 | let rec enter_pat ~erasable pat ~type_ k = 702 | match pat with 703 | (* TODO: weird *) 704 | | SP_loc { pat; loc = _ } -> enter_pat pat ~erasable ~type_ k 705 | | SP_var { var } -> enter ~erasable var ~type_ k 706 | | SP_erasable { pat } -> enter_pat ~erasable:true pat ~type_ k 707 | | SP_annot { pat; annot = _ } -> enter_pat pat ~erasable ~type_ k 708 | 709 | let enter_ty_pat ~erasable pat k = 710 | let (SP_typed { pat; type_ }) = pat in 711 | enter_pat pat ~erasable ~type_ k 712 | 713 | (* TODO: this is clearly bad *) 714 | let enter_erasable_zone_conditional ~erasable k = 715 | match erasable with true -> enter_erasable_zone k | false -> k () 716 | 717 | let rec infer_term term = 718 | let wrap ~type_ term = pure @@ ST_typed { term; type_ } in 719 | match term with 720 | | LT_loc { term; loc } -> 721 | let* (ST_typed { term; type_ }) = 722 | with_loc loc @@ fun () -> infer_term term 723 | in 724 | wrap ~type_ @@ ST_loc { term; loc } 725 | | LT_var { var } -> 726 | let* `Type type_, level = lookup var in 727 | wrap ~type_ @@ ST_free_var { level } 728 | | LT_extension _ -> error E_unsupported_extensions 729 | | LT_forall { param; return } -> 730 | let* param = infer_ty_pat param in 731 | let* return = 732 | enter_erasable_zone @@ fun () -> 733 | check_term_with_ty_pat ~erasable:true param return ~expected:st_type 734 | in 735 | wrap ~type_:st_type @@ ST_forall { param; return } 736 | | LT_lambda { param; return } -> 737 | let* param = infer_ty_pat param in 738 | let* (ST_typed { term = return; type_ = return_type }) = 739 | infer_term_with_ty_pat ~erasable:false param return 740 | in 741 | let type_ = ST_forall { param; return = return_type } in 742 | wrap ~type_ @@ ST_lambda { param; return } 743 | | LT_apply { lambda; arg } -> ( 744 | let* (ST_typed { term = lambda; type_ = forall }) = infer_term lambda in 745 | (* TODO: maybe machinery to eliminate forall *) 746 | match expand_head_term forall with 747 | | ST_forall { param; return } -> 748 | let* arg = 749 | let expected, `Erasability erasable = typeof_pat param in 750 | enter_erasable_zone_conditional ~erasable @@ fun () -> 751 | check_term arg ~expected 752 | in 753 | let* type_ = subst_term ~to_:arg return in 754 | wrap ~type_ @@ ST_apply { lambda; arg } 755 | (* TODO: expand cases *) 756 | | _ -> error E_expected_forall) 757 | | LT_self { self; body } -> 758 | let* assumed_self = assume_self ~self ~body in 759 | let* self = check_pat self ~expected:assumed_self in 760 | let* body = 761 | enter_erasable_zone @@ fun () -> 762 | check_term_with_pat ~erasable:true self ~type_:assumed_self body 763 | ~expected:st_type 764 | in 765 | let self = ST_self { self; body } in 766 | (* this equality is about peace of mind *) 767 | let () = equal_term ~received:self ~expected:assumed_self in 768 | wrap ~type_:st_type @@ self 769 | | LT_fix { self; body } -> 770 | let* self = infer_ty_pat self in 771 | let type_, `Erasability erasable = typeof_pat self in 772 | let* body = 773 | enter_erasable_zone_conditional ~erasable @@ fun () -> 774 | check_term_with_ty_pat ~erasable:false self body ~expected:type_ 775 | in 776 | wrap ~type_ @@ ST_fix { self; body } 777 | | LT_unroll { term } -> ( 778 | (* TODO: rename to fix *) 779 | let* (ST_typed { term; type_ = self }) = infer_term term in 780 | (* TODO: maybe machinery to eliminate forall *) 781 | match expand_head_term self with 782 | | ST_self { self = _; body } -> 783 | let* type_ = subst_term ~to_:term body in 784 | wrap ~type_ @@ ST_unroll { term } 785 | (* TODO: expand cases *) 786 | | _ -> error E_expected_self) 787 | | LT_let { bound; return } -> 788 | (* TODO: check bind? *) 789 | (* TODO: use this loc *) 790 | let (LBind { loc = _; pat = bound; value }) = bound in 791 | (* TODO: remove need for typing of let *) 792 | let* bound = infer_ty_pat bound in 793 | let* value = 794 | let value_type, `Erasability erasable = typeof_pat bound in 795 | enter_erasable_zone_conditional ~erasable @@ fun () -> 796 | check_term value ~expected:value_type 797 | in 798 | let* (ST_typed { term = return; type_ = return_type }) = 799 | infer_term_with_ty_pat ~erasable:false bound return 800 | in 801 | (* TODO: could use let at type level *) 802 | let* type_ = subst_term ~to_:value return_type in 803 | wrap ~type_ @@ ST_let { bound; value; return } 804 | | LT_annot { term; annot } -> 805 | let* annot = 806 | enter_erasable_zone @@ fun () -> check_term annot ~expected:st_type 807 | in 808 | let* term = check_term term ~expected:annot in 809 | wrap ~type_:annot @@ ST_annot { term; annot } 810 | | LT_string _ -> error E_string_not_supported 811 | 812 | and check_term term ~expected = 813 | (* TODO: check term equality for nested annot ((x : A) : B)? *) 814 | (* TODO: propagate *) 815 | match (term, expand_head_term expected) with 816 | | LT_loc { term; loc }, expected -> 817 | let* term = with_loc loc @@ fun () -> check_term term ~expected in 818 | pure @@ ST_loc { term; loc } 819 | | ( LT_lambda { param; return }, 820 | ST_forall { param = expected_param; return = expected_return } ) -> 821 | let* param = 822 | (* TODO: use this erasable? *) 823 | let expected_param_type, `Erasability _erasable = 824 | typeof_pat expected_param 825 | in 826 | check_ty_pat param ~expected:expected_param_type 827 | in 828 | let () = 829 | (* TODO : loc for error message *) 830 | equal_ty_pat ~received:param ~expected:expected_param 831 | in 832 | let* return = 833 | check_term_with_ty_pat ~erasable:false param return 834 | ~expected:expected_return 835 | in 836 | pure @@ ST_lambda { param; return } 837 | | ( LT_fix { self; body }, 838 | (ST_self { self = expected_self; body = expected_body } as expected) ) -> 839 | let* self = check_ty_pat self ~expected in 840 | let () = 841 | (* TODO : loc for error message *) 842 | let (SP_typed { pat = self; type_ = _ }) = self in 843 | equal_pat ~received:self ~expected:expected_self 844 | in 845 | let* body = 846 | check_term_with_ty_pat ~erasable:false self body ~expected:expected_body 847 | in 848 | pure @@ ST_fix { self; body } 849 | | term, expected -> 850 | let* (ST_typed { term; type_ = received }) = infer_term term in 851 | let () = equal_term ~received ~expected in 852 | pure term 853 | 854 | and infer_ty_pat pat = 855 | let wrap ~type_ pat = pure @@ SP_typed { pat; type_ } in 856 | match pat with 857 | | LP_loc { pat; loc } -> 858 | with_loc loc @@ fun () -> 859 | let* (SP_typed { pat; type_ }) = infer_ty_pat pat in 860 | wrap ~type_ @@ SP_loc { pat; loc } 861 | | LP_var _ -> error E_missing_annotations 862 | | LP_erasable { pat } -> 863 | let* (SP_typed { pat; type_ }) = infer_ty_pat pat in 864 | wrap ~type_ @@ SP_erasable { pat } 865 | | LP_unroll _ -> error E_unroll_pattern_not_supported 866 | | LP_annot { pat; annot } -> 867 | let* annot = 868 | enter_erasable_zone @@ fun () -> check_term annot ~expected:st_type 869 | in 870 | check_ty_pat pat ~expected:annot 871 | 872 | and check_ty_pat pat ~expected = 873 | let* pat = check_pat pat ~expected in 874 | pure @@ SP_typed { pat; type_ = expected } 875 | 876 | and check_pat pat ~expected = 877 | match pat with 878 | | LP_loc { pat; loc } -> 879 | with_loc loc @@ fun () -> 880 | let* pat = check_pat pat ~expected in 881 | pure @@ SP_loc { pat; loc } 882 | | LP_var { var } -> pure @@ SP_var { var } 883 | | LP_erasable { pat } -> 884 | let* pat = check_pat pat ~expected in 885 | pure @@ SP_erasable { pat } 886 | | LP_unroll _ -> error E_unroll_pattern_not_supported 887 | | LP_annot { pat; annot } -> 888 | let* annot = 889 | enter_erasable_zone @@ fun () -> check_term annot ~expected:st_type 890 | in 891 | let* pat = check_pat pat ~expected:annot in 892 | let () = equal_term ~received:annot ~expected in 893 | pure @@ SP_annot { pat; annot } 894 | 895 | and infer_term_with_ty_pat ~erasable pat term = 896 | enter_ty_pat ~erasable pat @@ fun () -> 897 | let* (ST_typed { term; type_ }) = infer_term term in 898 | let* term = close_term term in 899 | let* type_ = close_term type_ in 900 | pure @@ ST_typed { term; type_ } 901 | 902 | and check_term_with_ty_pat ~erasable pat term ~expected = 903 | enter_ty_pat ~erasable pat @@ fun () -> 904 | (* TODO: open and close should probably not be here *) 905 | let* expected = open_term expected in 906 | let* term = check_term term ~expected in 907 | close_term term 908 | 909 | and check_term_with_pat ~erasable pat ~type_ term ~expected = 910 | enter_pat ~erasable pat ~type_ @@ fun () -> 911 | let* expected = open_term expected in 912 | let* term = check_term term ~expected in 913 | close_term term 914 | --------------------------------------------------------------------------------