├── .Makefile.d
├── .envrc
├── .gitignore
├── CHANGELOG.md
├── Makefile
├── Makefile.conf
├── README.md
├── Setup.hs
├── _CoqProject
├── app
└── Main.hs
├── docs
├── .gitignore
├── .nojekyll
├── README.md
├── _sidebar.md
├── confusions.md
├── contributing.md
├── index.html
├── installation.md
├── operations.md
├── package.json
├── roadmap.md
├── syntax.md
├── theory.md
└── yarn.lock
├── editor
├── code
│ ├── .gitignore
│ ├── client
│ │ ├── out
│ │ │ ├── extension.js
│ │ │ └── extension.js.map
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── src
│ │ │ └── extension.ts
│ │ └── tsconfig.json
│ ├── language-configuration.json
│ ├── out
│ │ └── client.js
│ ├── package-lock.json
│ ├── package.json
│ ├── pedant-syntax-highlighting-0.0.1.vsix
│ ├── syntaxes
│ │ └── pedant.tmLanguage.json
│ └── tsconfig.json
└── vim
│ └── ped.vim
├── examples
├── decisions
│ └── urns.ped
├── functions.ped
├── givewell
│ ├── amf.ped
│ ├── common.ped
│ ├── deworm_the_world.ped
│ ├── givedirectly.ped
│ └── newgivedirectly.ped
├── highschool_physics_example.ped
├── simple_example.ped
├── tests.ped
└── tests
│ ├── functions.ped
│ └── simpleimport.ped
├── hie.yaml
├── make-ubuntu.bash
├── package.yaml
├── pedant.cabal
├── shell.nix
├── src
├── Pedant.hs
└── Pedant
│ ├── FileResolver.hs
│ ├── InBuilt.hs
│ ├── LSP.hs
│ ├── Parser.hs
│ ├── Parser
│ └── Types.hs
│ ├── TypeCheck.hs
│ ├── TypeCheck
│ ├── Dimensions.hs
│ ├── Expressions.hs
│ ├── LambdaCalculus.hs
│ └── Types.hs
│ └── Types.hs
├── stack.yaml
├── stack.yaml.lock
├── test
├── Main.hs
└── Spec
│ └── Parser.hs
└── theories
├── .Numerics.aux
├── Collections.v
├── HindleyMilner.v
├── MyExtraction.v
├── Numerics.glob
├── Numerics.v
└── Types.v
/.Makefile.d:
--------------------------------------------------------------------------------
1 | theories/Numerics.vo theories/Numerics.glob theories/Numerics.v.beautified theories/Numerics.required_vo: theories/Numerics.v
2 | theories/Numerics.vio: theories/Numerics.v
3 | theories/Numerics.vos theories/Numerics.vok theories/Numerics.required_vos: theories/Numerics.v
4 | theories/MyExtraction.vo theories/MyExtraction.glob theories/MyExtraction.v.beautified theories/MyExtraction.required_vo: theories/MyExtraction.v
5 | theories/MyExtraction.vio: theories/MyExtraction.v
6 | theories/MyExtraction.vos theories/MyExtraction.vok theories/MyExtraction.required_vos: theories/MyExtraction.v
7 |
--------------------------------------------------------------------------------
/.envrc:
--------------------------------------------------------------------------------
1 | use nix
2 | export PATH="$PATH:/home/sam/.cabal/bin"
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .stack-work
2 | dist
3 | *.hi
4 | dist-newstyle
5 | *.o
6 | time
7 | .direnv
8 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Revision history for pedant
2 |
3 | ## 0.1.0.0 -- 2021-12-09
4 |
--------------------------------------------------------------------------------
/Makefile.conf:
--------------------------------------------------------------------------------
1 | # This configuration file was generated by running:
2 | # coq_makefile -f _CoqProject -o Makefile
3 |
4 |
5 | ###############################################################################
6 | # #
7 | # Project files. #
8 | # #
9 | ###############################################################################
10 |
11 | COQMF_VFILES = theories/Numerics.v theories/MyExtraction.v
12 | COQMF_MLIFILES =
13 | COQMF_MLFILES =
14 | COQMF_MLGFILES =
15 | COQMF_MLPACKFILES =
16 | COQMF_MLLIBFILES =
17 | COQMF_METAFILE =
18 | COQMF_CMDLINE_VFILES =
19 |
20 | ###############################################################################
21 | # #
22 | # Path directives (-I, -R, -Q). #
23 | # #
24 | ###############################################################################
25 |
26 | COQMF_OCAMLLIBS =
27 | COQMF_SRC_SUBDIRS =
28 | COQMF_COQLIBS = -R . Pedant
29 | COQMF_COQLIBS_NOML = -R . Pedant
30 | COQMF_CMDLINE_COQLIBS =
31 |
32 | ###############################################################################
33 | # #
34 | # Coq configuration. #
35 | # #
36 | ###############################################################################
37 |
38 | COQMF_COQLIB=/nix/store/8w5xjkyvip78z07r9k1his40xjmixw7f-coq-8.16.1/lib/coq/
39 | COQMF_COQCORELIB=/nix/store/8w5xjkyvip78z07r9k1his40xjmixw7f-coq-8.16.1/lib/coq/../coq-core/
40 | COQMF_DOCDIR=/nix/store/8w5xjkyvip78z07r9k1his40xjmixw7f-coq-8.16.1/share/doc/
41 | COQMF_OCAMLFIND=/nix/store/k68kbcccpjg48gfn0g2ba552v1fqqqvj-ocaml4.14.1-findlib-1.9.6/bin/ocamlfind
42 | COQMF_CAMLFLAGS=-thread -rectypes -w -a+1..3-4+5..8-9+10..26-27+28..40-41-42+43-44-45+46..47-48+49..57-58+59..66-67-68+69-70 -safe-string -strict-sequence
43 | COQMF_WARN=-warn-error +a-3
44 | COQMF_HASNATDYNLINK=true
45 | COQMF_COQ_SRC_SUBDIRS=boot config lib clib kernel library engine pretyping interp gramlib parsing proofs tactics toplevel printing ide stm vernac plugins/btauto plugins/cc plugins/derive plugins/extraction plugins/firstorder plugins/funind plugins/ltac plugins/ltac2 plugins/micromega plugins/nsatz plugins/ring plugins/rtauto plugins/ssr plugins/ssrmatching plugins/syntax
46 | COQMF_COQ_NATIVE_COMPILER_DEFAULT=ondemand
47 | COQMF_WINDRIVE=
48 |
49 | ###############################################################################
50 | # #
51 | # Native compiler. #
52 | # #
53 | ###############################################################################
54 |
55 | COQMF_COQPROJECTNATIVEFLAG =
56 |
57 | ###############################################################################
58 | # #
59 | # Extra variables. #
60 | # #
61 | ###############################################################################
62 |
63 | COQMF_OTHERFLAGS =
64 | COQMF_INSTALLCOQDOCROOT = Pedant
65 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Pedant
2 | Pedant is a minimal math DSL. It's originally designed for use in cost
3 | effectiveness analysis. However, it can be used for any important calculations.
4 |
5 | The goal of pedant is to make sure that it's difficult or impossible to make
6 | math and stats errors, and also allow for the full enumeration of the assumptions
7 | used for a calculation. Currently, its only feature is dimensional analysis, but
8 | I'm planning to add stats and constraints on stats in the future.
9 |
10 | It might be better to think of pedant as a replacement for excel rather than a
11 | programming language.
12 |
13 | # Compiling and running this project.
14 |
15 | To run this project, you will need cabal and ghc. Simply run:
16 |
17 | ```
18 | stack build
19 | stack run pedant compile filename ## e.g., stack run pedant examples/simple_example.ped
20 | ```
21 |
22 | You can also use nix. A shell.nix is provided with the project.
23 |
24 | For Ubuntu, you can also use `make-ubuntu.bash` file as a convenience wrapper.
25 |
26 | # Basic syntax
27 | The syntax of pedant is really simple to learn. It's simply a collection of
28 | math formulas. It would be easier to understand by simply looking at the examples
29 | in `examples`.
30 |
31 | Each line can either be empty or not have a declaration of a variable on it. The variable is equated to a term:
32 |
33 | ```pedant
34 | donation_size = 100000
35 | ```
36 |
37 | On the right is the value for that term.
38 |
39 | That value can make up an expression:
40 |
41 | ```pedant
42 | consumption_possible_by_funds =
43 | (1 - percent_of_transfers_invested)
44 | * size_of_transfer_per_person
45 | / duration_of_initial_consumption
46 | ```
47 |
48 | If you want, you can break expressions over multiple lines, with the condition
49 | that every line within the expression must be indented.
50 |
51 | The available operators are:
52 | - `*` multiplication
53 | - `/` division
54 | - `^` exponentiation
55 | - `+` addition
56 | - `-` subtraction
57 |
58 | There is also one inbuilt function currently available, the natural logarithm: `ln`.
59 | Functions are applied by whitespace, like functional languages such as Haskell.
60 | For instance `ln 5`.
61 |
62 | Parentheses can be used to impact the order of operations. Order of operations
63 | follow BODMAS, as you would expect.
64 |
65 | When declaring a value, only values higher up in the file can be referenced.
66 | This ensures there is no circularity in definitions.
67 |
68 | C style line and block comments are also available:
69 |
70 | ```pedant
71 | // This is a line comment
72 | /*
73 | This is a block comment
74 | */
75 | ```
76 |
77 | As of now, .ped files must end with a newline, POSIX-style.
78 |
79 | # Dimensional Analysis in Pedant
80 | A dimensional checker runs over pedant to ensure that there are no dimensional
81 | violations.
82 |
83 | If you don't want a dimensional checker, all dimensionless operations are valid.
84 | Meaning you simply need to not add units to your code.
85 |
86 | However, the basics of dimensional checking is that you can declare a unit and assign a unit to a value,
87 | such as
88 |
89 | ```pedant
90 | unit person
91 | average_household_size = 4.7 person
92 | ```
93 |
94 | The "unit person" line declares a unit "person" to be used later, which is then
95 | used in the next line
96 |
97 | This indicates that average_household_size is in units `person`.
98 |
99 | The checker is really simple, it basically follows two main rules:
100 |
101 | - When multiplying two values together, we must also multiply the units. Also you divide the units when you divide two values
102 | - You cannot add or subtract two values of different units
103 |
104 | So for instance, the following construction is valid:
105 | ```pedant
106 | unit bag apple
107 | bag_count = bag
108 | apples_per_bag = apple bag-1
109 | total_apples = apples_per_bag * bag_count
110 | ```
111 |
112 | as `total_apples` would be of units `apple`.
113 |
114 | (Also note that you can put multiple units in the same unit declaration)
115 |
116 | But this is invalid:
117 |
118 | ```pedant
119 | unit bag apple
120 | bag_count = bag
121 | apples_per_bag = apple bag-1
122 | total_apples = apples_per_bag + bag_count
123 | ```
124 |
125 | As you are adding two values of different units.
126 |
127 | This helps you ensure that the math and reasoning behind your calculations are correct.
128 |
129 | As above in `bag-1`, a dimension can have a number after it that indicates what the power of that dimension is. In this case, this means `1/bag`, or "per bag". You can also do things like `m2` to mean meters squared.
130 |
131 | # More advanced syntax
132 |
133 | ## Lists
134 |
135 | Lists in pedant are represented through standard array syntax:
136 |
137 | ```pedant
138 | one_to_five = [1, 2, 3, 4, 5]
139 | ```
140 |
141 | Empty arrays are not allowed.
142 |
143 | You can do all operations you can on normal numbers on lists. The operations
144 | work pointwise, so:
145 | ```pedant
146 | // add one to each number
147 | two_to_six = one_to_five + 1
148 | // [2, 3, 4, 5, 6]
149 |
150 | // add one to each number
151 | two_to_ten = one_to_five * 2
152 | // [2, 4, 6, 8, 10]
153 | ```
154 |
155 | ## Records
156 | A record type is a type that has contains a collection of other different types
157 | associated with different keys. In pedant, they are defined in a similar way
158 | to Haskell
159 |
160 | ```pedant
161 | unit meters
162 | my_point = { x = 20 meters, y = 25 meters }
163 | my_point_x = my_point.x
164 | my_point_y = my_point.y
165 | ```
166 |
167 | ## Functions
168 | You may define your own custom functions, such as:
169 |
170 | ```pedant
171 | present_value payment discount duration =
172 | payment * (1 - discount ^ (-duration)) / ln discount
173 | ```
174 |
175 | In this example, the present_value function has arguments payment, discount and
176 | duration. These arguments are can then be referenced in the function body.
177 |
178 | Full recursion is not supported.
179 |
180 | Functions in Haskell are all statically typed, except the types of arguments
181 | are inferred from the way that you use them. For instance:
182 |
183 | ```pedant
184 | addOne x = x + 1
185 | ```
186 |
187 | is inferred to be a function which takes a dimensionless value x and adds one
188 | to it. It managed to work this out because you are adding 1 to x, because 1 is
189 | dimensionless, and you can only add numbers that are the same units, x must be
190 | dimensionless.
191 |
192 | If for instance, the function was:
193 |
194 | ```pedant
195 | addOneUsd
196 | addOne x = x + 1 usd
197 | ```
198 | Then the function can be inferred to take a variable x which is in usd and return
199 | a value in usd.
200 |
201 | To represent function type signatures, we write `usd -> usd`. Meaning a function
202 | which takes usd and returns usd.
203 |
204 | Functions can be polymorphic, say for instance:
205 |
206 | ```pedant
207 | multUsd x = x * 2 usd
208 | ```
209 |
210 | is of type `'a -> 'a usd`. Types like `'a 'b 'c` etc are polymorphic types, meaning
211 | that they could be anything that the user wants to put in them. This says that
212 | the functions adds the usd dimension to whatever dimension `'a` ends up being.
213 |
214 | ## Extension of Dimensional Analysis
215 | For my purposes, this actually wasn't a good enough definition of
216 | dimensional analysis. For instance, in standard dimensional analysis, you cannot take the log or exponent of a dimensionful quantity. However, in the field of economics, you often have a time variable in an exponent when calculating things like interest or discount rates:
217 |
218 | ```pedant
219 | princple_plus_interest = principle * (rate ^ duration)
220 | ```
221 |
222 | As of such, I designed an extension to dimensional analysis that allows
223 | you to take powers of dimensionful quantities.
224 |
225 | A power dimension is a dimension that starts with a `^` character, such as:
226 |
227 | ```pedant
228 | unit year
229 | discount_rate = 1.04 ^year-1
230 | ```
231 |
232 | There are only two valid places you can put a variable with a power dimension, in a logarithm, or the base of an exponential. For instance:
233 |
234 | ```pedant
235 | value = start_value * (discount_rate ^ (-3 year))
236 | ```
237 |
238 | In this case, the dimension of the discount_rate and the exponent multiply together, being `year-1` and `year`, so they cancel each other out.
239 |
240 |
241 | In a logarithm:
242 | ```pedant
243 | value = ln(discount_rate)
244 | ```
245 | The power dimension simply transforms back into a normal dimension, into `year-1`.
246 |
--------------------------------------------------------------------------------
/Setup.hs:
--------------------------------------------------------------------------------
1 | module Main where
2 |
3 | import Distribution.Simple
4 | main = defaultMain
--------------------------------------------------------------------------------
/_CoqProject:
--------------------------------------------------------------------------------
1 | -R . Pedant
2 | theories/Numerics.v
3 | theories/MyExtraction.v
4 |
--------------------------------------------------------------------------------
/app/Main.hs:
--------------------------------------------------------------------------------
1 | module Main where
2 |
3 | import Pedant
4 |
5 | main :: IO ()
6 | main = pedantMain
7 |
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 |
--------------------------------------------------------------------------------
/docs/.nojekyll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hazelfire/pedant/02428bd1aa8a7d2ad52c6610f9dda0f75df491fe/docs/.nojekyll
--------------------------------------------------------------------------------
/docs/README.md:
--------------------------------------------------------------------------------
1 | # Pedant
2 |
3 | Pedant is a minimal math DSL. It's originally designed for use in cost
4 | effectiveness analysis. However, it can be used for any important
5 | calculations.
6 |
7 | The goal of pedant is to make sure that it's difficult or impossible to
8 | make math and stats errors, and also allow for the full enumeration of the
9 | assumptions used for a calculation. Currently, its only feature is
10 | dimensional analysis, but I'm planning to add stats and constraints on
11 | stats in the future.
12 |
13 | It might be better to think of pedant as a replacement for excel rather
14 | than a programming language.
15 |
16 | ## Quick Start
17 | To get started, we recommend visiting our [Installation Guide](installation.md). Then looking through some [examples](https://github.com/Hazelfire/pedant/tree/main/examples). Pedant should be pretty easy to get a hang of for the basics.
18 |
--------------------------------------------------------------------------------
/docs/_sidebar.md:
--------------------------------------------------------------------------------
1 | * [Home](/)
2 | * [Installation](installation.md)
3 | * [Contributing](contributing.md)
4 | * [Syntax](syntax.md)
5 | * [Theory](theory.md)
6 | * [Roadmap](roadmap.md)
7 | * [Operations](operations.md)
8 |
--------------------------------------------------------------------------------
/docs/confusions.md:
--------------------------------------------------------------------------------
1 | # Frequently Asked Questions
2 |
3 | Here's a list of commonly confusing elements about pedant. We're working to make
4 | the language to be as understandable as possible, but there are some compromises
5 | made that you may stumble across.
6 |
7 | ## Units and powers
8 |
9 | A squared unit, such as 1 second squared, is represented as `1 seconds2`.
10 |
11 | if you want to find the area of a squared paddock, you must cannot do
12 |
13 | ```pedant
14 | width = 5 meters
15 | area = width^2
16 | ```
17 |
18 | You must multiply it with itself, like:
19 |
20 | ```pedant
21 | width = 5 meters
22 | area = width * width
23 | ```
24 |
25 | To understand why, and to see a discussion about this, see [issue #5](https://github.com/Hazelfire/pedant/issues/5)
26 |
27 | ## Functions
28 |
29 | Functions currently work the same way that
30 |
--------------------------------------------------------------------------------
/docs/contributing.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | If you want to contribute to this project I would recommend checking out the GitHub issues.
4 | If you aren't familiar with Haskell and want to learn more, looking for issues
5 | tag "good first issue" is a good place to start.
6 |
7 | ## Making cost effectiveness analysis
8 |
9 | One of the best ways that you can contribute to the project is trying out that
10 | end and giving me feedback. I would recommend trying to create a
11 | cost-effectiveness analysis yourself using the language and identifying any
12 | struggles that you and reporting back in GitHub issues.
13 |
14 | If you're looking for something to do a cost-effectiveness analysis on I would
15 | recommend trying to complete [our coverage](https://github.com/Hazelfire/pedant/tree/main/examples/givewell) of
16 | [GiveWell's cost-effectiveness analysis](https://www.givewell.org/how-we-work/our-criteria/cost-effectiveness/cost-effectiveness-models).
17 | If you want to make a new Cost Effectiveness Analysis, I would recommend
18 | looking through institutions in the [effective institutions project](https://effectiveinstitutionsproject.org/) as well as a [Nuno's
19 | shallow evaluations of longtermist organisations](https://forum.effectivealtruism.org/posts/xmmqDdGqNZq5RELer/shallow-evaluations-of-longtermist-organizations).
20 |
--------------------------------------------------------------------------------
/docs/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Pedant Documentation
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/docs/installation.md:
--------------------------------------------------------------------------------
1 | # Installing pedant
2 |
3 | So far, this project has only ever been compiled on Linux systems.
4 | However, if you are on Windows or Mac, there is no particular reason that
5 | this project would be difficult to build, as it only uses a standard Haskell stack to compile.
6 |
7 | ## Nix
8 |
9 | The main developer uses a nix based system.
10 |
11 | Provided with this project is a `shell.nix`, running `nix-shell` in the
12 | root directory of this project should give you all the tools you need to
13 | compile. All you need to do after this is to:
14 |
15 | ```
16 | cabal build
17 | cabal install
18 | ```
19 |
20 | to install pedant as a command line tool. Remember to add `~/.cabal/bin` to your `$PATH` to get access to the tool.
21 |
22 | ## Ubuntu and Debian
23 |
24 | There is a `make-ubuntu.bash` in the main directory of the project that
25 | you can use to build this project, or guide you through the steps
26 | required.
27 |
28 | ## Other systems
29 |
30 | To install this, you will need
31 | [Stack](https://docs.haskellstack.org/en/stable/README/) and
32 | [Cabal-install](https://www.haskell.org/cabal/). Following similar
33 | instructions in `make-ubuntu.bash` should help you build pedant.
34 |
--------------------------------------------------------------------------------
/docs/operations.md:
--------------------------------------------------------------------------------
1 | # Operations
2 |
3 | This is a table of what is valid and not valid in pedant, and how operations
4 | change units.
5 |
6 | A full table of operations in spreadsheet form [is available here](https://docs.google.com/spreadsheets/d/1KplvmW2t0QW4mk1WbNkQLf2_ZxG0Lvd6aq14nHqMJtI/edit?usp=sharing).
7 |
8 | ## Dimensionless quantities
9 |
10 | Dimensionless quantities can be `+`, `-`, `*`, `/`, `^` to other dimensionless
11 | quantities to make dimensionless quantities. `ln x` where x is a dimensionless
12 | quantity is also dimensionless.
13 |
14 | This has the curious effect of allowing you to, if you wish, write calculations
15 | without units. All operations with numbers without units are valid.
16 |
17 | ## Normal units
18 |
19 | A normal unit is a number such as `5 years` or `2 meters`. A normal unit can
20 | be `+` and `-` with other items of the same unit. When `*` or `/`, it will multiply
21 | and divide the units.
22 |
23 | Normal units cannot be used in the base of an `^`. Normal
24 | units can be used in the exponent of `^`. If the base is a dimensionless quantity,
25 | it becomes the power unit version of a normal unit, say ` 1 ^ (5 years)` becomes
26 | units `^years`. If the base is a power, then the power unit multiplies together,
27 | For instance `(1 ^meters)^(5 years)` is in the units `^meters years`. If the
28 | units cancel out in the exponent, say `(1 ^years-1)^(5 years)` then the resulting
29 | unit is dimensionless.
30 |
31 | Taking the `ln x` of a normal unit is technically still dimensionful according
32 | to the [theory](theory.md), however I have yet to find practical use for the construct.
33 |
34 | # Power Units
35 | Power units cannot be `+` or `-` to any unit, including itself. `*` and `/` two
36 | items of the same power unit combine them into the same power unit. Say `5 ^years-1 * 10 ^years-1`
37 | is of units `^years-1` and so is `5 ^years-1 / 10 ^years-1`. Power units can be
38 | used in the base but not the exponent of a `^`, and the behaviour is detailed
39 | in the Normal Units section.
40 |
41 | Taking the logarithm of a power unit gives the normal unit version. For instance
42 | `ln (5 ^years-1)` is in units `years-1`.
43 |
--------------------------------------------------------------------------------
/docs/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "devDependencies": {
3 | "docsify-cli": "^4.4.3"
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/docs/roadmap.md:
--------------------------------------------------------------------------------
1 | # Roadmap
2 |
3 | The current roadmap for pedant include three main stages:
4 |
5 | - Creating a basic language that can do dimensional analysis over calculations to check for missing assumptions
6 | - Create a language that can do Monte Carlo experiments over these calculations to account for uncertainty
7 | - Create a web interface to this language to make the calculations more accessible.
8 |
9 | The details of these stages are outlined below, as well as thoughts of possible
10 | developments in these stages.
11 |
12 | ## Stage 1, Basic Language
13 |
14 | The duration of this phase is the 25th of November to the 25th of December.
15 |
16 | This phase is designed to get down a basic workable language that can check for
17 | dimensional errors within calculations. The features that are included within this
18 | stage are:
19 |
20 | - Dimensional checker
21 | - Adding records and lists
22 | - Functions with type inference
23 | - Modules
24 | - A basic language server to check for error quickly
25 | - A testing suite
26 |
27 | Things that would be nice but we might not get time for include:
28 |
29 | - Representing all of GiveWell's work within Pedant
30 |
31 | ## Stage 2, Uncertainty
32 |
33 | The duration of this phase is from the 25th of December to the 25th of January.
34 |
35 | This phase is designed to allow creating distributions of possible input parameters,
36 | and therefore determine how uncertain the results of calculations are. The features
37 | that are to be included in this stage are:
38 |
39 | - Writing in distributions instead of numbers for calculations, possibly in a syntax
40 | similar to [Squiggle](https://squiggle-language.com/).
41 | - Checking uncertainty of the result of calculations
42 |
43 | Things that would be nice to have, but we might not get time for include:
44 |
45 | - Doing sensitivity analysis over parameters, to determine which parameters need
46 | to be made more certain.
47 | - Doing calculations over distributions, such as value of information, which could
48 | be valuable in evaluating research.
49 |
50 |
51 | ## Stage 3, Web interface
52 |
53 | The duration of this phase is from the 25th of January to the 25th of February.
54 |
55 | This phase is designed to make it easier to access cost effectiveness analysis
56 | by providing a web interface. What is to be included in this phase is:
57 |
58 | - Reading pedant files and presenting them in a web interface, like a column
59 | of excel sheets
60 | - Reading doc comments in pedant files to add to the web interface.
61 | - Including sliders to change around the values of different parameters to calculate
62 | the final result.
63 |
64 | Some things that would be nice to have, but we might not get time for include:
65 |
66 | - Being able to write pedant files in a browser.
67 |
--------------------------------------------------------------------------------
/docs/syntax.md:
--------------------------------------------------------------------------------
1 | # Syntax
2 | The syntax of pedant is really simple to learn. It's simply a collection of
3 | math formulas. It would be easier to understand by simply looking at the examples
4 | in `examples`.
5 |
6 | Each line can either be empty or not have a declaration of a variable on it. The variable is equated to a term:
7 |
8 | ```pedant
9 | donation_size = 100000
10 | ```
11 |
12 | On the right is the value for that term.
13 |
14 | That value can make up an expression:
15 |
16 | ```pedant
17 | consumption_possible_by_funds =
18 | (1 - percent_of_transfers_invested)
19 | * size_of_transfer_per_person
20 | / duration_of_initial_consumption
21 | ```
22 |
23 | If you want, you can break expressions over multiple lines, with the condition
24 | that every line within the expression must be indented.
25 |
26 | The available operators are:
27 | - `*` multiplication
28 | - `/` division
29 | - `^` exponentiation
30 | - `+` addition
31 | - `-` subtraction
32 |
33 | There is also one inbuilt function currently available, the natural logarithm: `ln`.
34 | Functions are applied by whitespace, like functional languages such as Haskell.
35 | For instance `ln 5`.
36 |
37 | Parentheses can be used to impact the order of operations. Order of operations
38 | follow BODMAS, as you would expect.
39 |
40 | When declaring a value, only values higher up in the file can be referenced.
41 | This ensures there is no circularity in definitions.
42 |
43 | C style line and block comments are also available:
44 |
45 | ```pedant
46 | // This is a line comment
47 | /*
48 | This is a block comment
49 | */
50 | ```
51 | ## Lists
52 |
53 | Lists in pedant are represented through standard array syntax:
54 |
55 | ```pedant
56 | one_to_five = [1, 2, 3, 4, 5]
57 | ```
58 |
59 | Empty arrays are not allowed.
60 |
61 | You can do all operations you can on normal numbers on lists. The operations
62 | work pointwise, so:
63 | ```pedant
64 | // add one to each number
65 | two_to_six = one_to_five + 1
66 | // [2, 3, 4, 5, 6]
67 |
68 | // add one to each number
69 | two_to_ten = one_to_five * 2
70 | // [2, 4, 6, 8, 10]
71 | ```
72 |
73 | ## Records
74 | A record type is a type that has contains a collection of other different types
75 | associated with different keys. In pedant, they are defined in a similar way
76 | to Haskell
77 |
78 | ```pedant
79 | unit meters
80 | my_point = { x = 20 meters, y = 25 meters }
81 | my_point_x = my_point.x
82 | my_point_y = my_point.y
83 | ```
84 |
85 | ## Functions
86 | You may define your own custom functions, such as:
87 |
88 | ```pedant
89 | present_value payment discount duration =
90 | payment * (1 - discount ^ (-duration)) / ln discount
91 | ```
92 |
93 | In this example, the present_value function has arguments payment, discount and
94 | duration. These arguments are can then be referenced in the function body.
95 |
96 | Full recursion is not supported.
97 |
98 | Functions in Haskell are all statically typed, except the types of arguments
99 | are inferred from the way that you use them. For instance:
100 |
101 | ```pedant
102 | addOne x = x + 1
103 | ```
104 |
105 | is inferred to be a function which takes a dimensionless value x and adds one
106 | to it. It managed to work this out because you are adding 1 to x, because 1 is
107 | dimensionless, and you can only add numbers that are the same units, x must be
108 | dimensionless.
109 |
110 | If for instance, the function was:
111 |
112 | ```pedant
113 | addOneUsd
114 | addOne x = x + 1 usd
115 | ```
116 | Then the function can be inferred to take a variable x which is in usd and return
117 | a value in usd.
118 |
119 | To represent function type signatures, we write `usd -> usd`. Meaning a function
120 | which takes usd and returns usd.
121 |
122 | Functions can be polymorphic, say for instance:
123 |
124 | ```pedant
125 | multUsd x = x * 2 usd
126 | ```
127 |
128 | is of type `'a -> 'a usd`. Types like `'a 'b 'c` etc are polymorphic types, meaning
129 | that they could be anything that the user wants to put in them. This says that
130 | the functions adds the usd dimension to whatever dimension `'a` ends up being.
131 |
132 |
133 | ## Extension of Dimensional Analysis
134 | For my purposes, this actually wasn't a good enough definition of
135 | dimensional analysis. For instance, in standard dimensional analysis, you cannot take the log or exponent of a dimensionful quantity. However, in the field of economics, you often have a time variable in an exponent when calculating things like interest or discount rates:
136 |
137 | ```pedant
138 | princple_plus_interest = principle * (rate ^ duration)
139 | ```
140 |
141 | As of such, I designed an extension to dimensional analysis that allows
142 | you to take powers of dimensionful quantities.
143 |
144 | A power dimension is a dimension that starts with a `^` character, such as:
145 |
146 | ```pedant
147 | unit year
148 | discount_rate = 1.04 ^year-1
149 | ```
150 |
151 | There are only two valid places you can put a variable with a power dimension, in a logarithm, or the base of an exponential. For instance:
152 |
153 | ```pedant
154 | value = start_value * (discount_rate ^ (-3 year))
155 | ```
156 |
157 | In this case, the dimension of the discount_rate and the exponent multiply together, being `year-1` and `year`, so they cancel each other out.
158 |
159 |
160 | In a logarithm:
161 | ```pedant
162 | value = ln(discount_rate)
163 | ```
164 | The power dimension simply transforms back into a normal dimension, into `year-1`.
165 |
--------------------------------------------------------------------------------
/docs/theory.md:
--------------------------------------------------------------------------------
1 | # Dimensional Analysis Theory
2 |
3 | Dimensional analysis is a fairly simple concept with a very complicated
4 | name. The ideas come from physics, where some calculations are not
5 | physical, mainly because it would be unclear what the units would be of
6 | that calculation. For instance:
7 |
8 | You can add two things of the same units:
9 |
10 | ```pedant
11 | unit meters
12 | length1 = 2 meters
13 | length2 = 4 meters
14 | unknown = time + length
15 | ```
16 |
17 | Because if you add 2 meters and 4 meters, you end up with 6 meters.
18 | However, adding meters to seconds:
19 |
20 | ```pedant
21 | unit seconds meters
22 | time = 2 seconds
23 | length = 4 meters
24 | unknown = time + length
25 | ```
26 |
27 | 2 seconds + 4 meters is 6... somethings. We wouldn't really know what unit
28 | to give this 6. As of such, this is not a "physical" calculation, and as
29 | of such you would never find this calculation meaning anything in the real
30 | world.
31 |
32 | This is useful! Because it, in a sense, allows us to "Type Check" our
33 | calculations. Where some calculations are invalid, and others are not. This type checking allows you to find missing assumptions and errors within your calculations, as Sam Nolan has done with [GiveDirectly](https://forum.effectivealtruism.org/posts/WmQwtYEajNDuPdyZx/type-checking-givewell-s-givedirectly-cost-effective).
34 |
35 | In physics, there are basically two rules in dimensional analysis:
36 |
37 | - When you multiply or divide two numbers, you must multiply or divide the units.
38 | - You cannot add, subtract or equate two calculations that are of different units.
39 |
40 | This is usually fine for the purposes of physics. But for the purposes of
41 | economics. This leaves some important considerations out. Notably, what
42 | about powers and logarithms?
43 |
44 | ## An extension of Dimensional Analysis
45 |
46 | One very common construction in economics is that of interest, most notably:
47 |
48 | (This isn't valid pedant, it won't compile)
49 | ```pedant
50 | unit years usd
51 | princpal = 1000 usd
52 | duration = 10 years
53 | interest_rate = 1.05
54 | after_interest = principal * interest_rate ^ duration
55 | ```
56 |
57 | Normally, when dealing with powers within physics, the general rule is
58 | that the base and the exponent must both be dimensionless. If either of
59 | them are dimensionful, then this is not valid.
60 |
61 | However, in this case, we have an exponent and a base that both seem to
62 | have dimensions. Most notably, the exponent, `duration`, is the units `years`, which would violate this restriction.
63 |
64 | What's even more interesting in that the `interest_rate` is that you can also think that maybe `interest_rate` should have a unit. We might say that the interest rate is "5% per year", so clearly it has some sort of unit that depends on years.
65 |
66 | The next thing to notice however, is that if `interest_rate` does have a unit, it isn't a normal unit. For instance, we can't add interest rates. a rate of 105% plus another rate of 105% getting to 210% doesn't really make any sense. It does however make sense to multiply the interest rates, for instance 105% times 105% is just the interest rate over two years.
67 |
68 | Because of this observation, pedant defines a special type of unit for this case, called a "power unit". A power unit starts with a `^`. For instance, in this case, the unit of `interest_rate` is of `^year-1`.
69 |
70 | Power units can be used in only two places, the base of an exponent and inside a logarithm. The exact details on what happens to each of them in those areas are in the [operations table](operations.md).
71 |
72 | ## The beginnings of a theory of dimensionality
73 | This begs the question, are there any more extensions to dimensional analysis? What is the definition of dimensional analysis? What defines something as physical mathematically?
74 |
75 | Our definition of a dimensional quantity is based off the concept of converting
76 | units to other equivalent units. We define a conversion function. A conversion
77 | function simply converts a number from one set of units to an equivalent set of
78 | units.
79 |
80 | For instance, if we have 6 meters, and we want to convert this into centimeters,
81 | we have 600 centimeters. The conversion function, knowing the result, it's units
82 | and what units to convert it into, can validly convert from one set to the other.
83 | The conversion function for going from meters to centimeters is simply multiplying
84 | by 100.
85 |
86 | The conversion function shouldn't care about how we managed to get this 6 meters,
87 | it shouldn't need to know. For instance:
88 |
89 | - 4 meters + 2 meters = 6 meters, and 400 centimeters + 200 centimeters = 600 centimeters
90 | - 5 meters + 1 meter = 6 meters, and 500 centimeters + 100 centimeters = 600 centimeters.
91 |
92 | A valid conversion function works no matter how it was created.
93 |
94 | Now lets prove that the expression 4 meters + 2 seconds = 6 unknowns Is not dimensional by
95 | this definition. We will us a proof of contradiction.
96 |
97 | Assuming the units of 6 does have a conversion function, we should be able to convert
98 | the meters into seconds. Because we constructed our 6 with 4 meters + 2 seconds,
99 | this becomes 400 centimeters + 2 seconds = 402 unknowns.
100 |
101 | However, if this 6 unknowns was constructed in a different way, say 5 meters + 1 second = 6 unknowns.
102 | Then the result would be 500 centimeters + 1 second = 501 unknowns.
103 |
104 | This means that this conversion function, converting from meters to centimeters,
105 | would give two different answers (and indeed, can give an infinite number of answers)
106 | where these answers depend on information it shouldn't know. Therefore, this function
107 | is not a function, and 4 meters + 2 seconds is not dimensional.
108 |
109 | This structure of proof is used to determine whether a calculation is dimensional
110 | or not dimensional.
111 |
112 | ## A full formal specification
113 |
114 | To do this proof formally, we will have to lay down the proper definitions.
115 |
116 | A calculation is defined by:
117 |
118 | $$
119 | f(x_1,x_2, ... x_n, y_1, y_2, ... , y_m)
120 | $$
121 |
122 | Where $x_1, x_2, ..., x_n$ are the values in the function, and $y_1, y_2, ..., y_m$
123 | are the units used in the calculation. For instance, adding two units units that
124 | are the same can be represented as:
125 |
126 | $$
127 | f(x_1, x_2, y_1) = x_1y_1 + x_2y_1
128 | $$
129 |
130 | And adding two units that are different can be represented as
131 |
132 | $$
133 | f(x_1, x_2, y_1, y_2) = x_1y_1 + x_2y_2
134 | $$
135 |
136 | Normal units are specified by simply multiplying the unit by the value. Dimensionless
137 | constants are represented as just the value with no unit multiplied to it.
138 |
139 | We define a *conversion function* $g$ with the following signature:
140 |
141 | $$
142 | g(u, y_1, y_2, ..., y_m, y_1', y_2', ..., y_m')
143 | $$
144 |
145 | This conversion function takes a value $u$, and converts the units of the value.
146 | It is given a set of old units, $y_1, y_2, ..., y_m$ and a set of new units to
147 | convert these to $y_1', y_2', ... y_m'$.
148 |
149 | The function $g$ is called a conversion function for $f$ if:
150 |
151 | $$
152 | g(f(x_1,x_2, ..., x_n, y_1, y_2, ..., y_m), y_1, y_2, ..., y_m, y_1', y_2', ..., y_m') = f(x_1, x_2, ..., x_n, y_1', y_2', ..., y_m')
153 | $$
154 |
155 | For all possible values of $x_i$, $y_i$ and $y_i'$.
156 |
157 | For example, for the following function (adding two items of the same units $y_1$).
158 |
159 | $$
160 | f(x_1, x_2, y_1) = x_1y_1 + x_2y_1
161 | $$
162 |
163 | The conversion function is:
164 |
165 | $$
166 | g(u, y_1, y_1') = \frac{uy_1'}{y_1}
167 | $$
168 |
169 | As
170 |
171 | $$
172 | g(f(x_1, x_2, y_1), y_1, y_1') = f(x_1, x_2, y_1') \\
173 | \frac{(x_1y_1 + x_2y_1)y_1'}{y_1} = x_1y_1' + x_2y_1' \\
174 | x_1y_1' + x_2y_1' = x_1y_1' + x_2y_1'
175 | $$
176 |
177 | However, for adding two items that are of different units:
178 |
179 | $$
180 | f(x_1, x_2, y_1, y_2) = x_1y_1 + x_2y_2
181 | $$
182 |
183 | We can prove the conversion function doesn't exist. Because assuming
184 | $x_1 = 4$, $x_2 = 2$, $y_1 = y_2 = y_2' = 1$ and $y_1' = 10$.
185 |
186 | $$
187 | g(f(x_1, x_2, y_1, y_2), y_1, y_2, y_1', y_2') = f(x_1, x_2, y_1',y_2') \\
188 | g(f(4, 2, 1, 1), 1, 1, 10, 1) = f(4, 2, 10, 1) \\
189 | g(6, 1, 1, 10, 1) = 42
190 | $$
191 |
192 | However, if we were to take another set of assumptions, particularly
193 | $x_1 = 5$, $x_2 = 1$, $y_1 = y_2 = y_2' = 1$ and $y_1' = 10$. Then
194 | we would have
195 |
196 | $$
197 | g(f(x_1, x_2, y_1, y_2), y_1, y_2, y_1', y_2') = f(x_1, x_2, y_1',y_2') \\
198 | g(f(5, 1, 1, 1), 1, 1, 10, 1) = f(5, 1, 10, 1) \\
199 | g(6, 1, 1, 10, 1) = 51
200 | $$
201 |
202 | Therefore we come across a contradiction, as
203 |
204 | $$
205 | g(6, 1, 1, 10, 1) = 42 = 51
206 | $$
207 |
208 | So therefore $g$ does not exist.
209 |
210 | These types of proofs can be used to show a particular calculation to be dimensionful
211 | or not dimensionful according to the theory.
212 |
213 | ## New units
214 |
215 | In [operations](operations.md), a spreadsheet is provided with all the possible
216 | operations. Each operation is considered invalid if it does not have a conversion
217 | function. This however leaves some interesting units that are not yet included
218 | in this calculation.
219 |
220 | ### Logarithmic units
221 | The first one, which is not particularly interesting or useful, is that it seems
222 | possible to have the logarithm of a dimensionful quantity:
223 |
224 | $$
225 | f(x_1, y_1) = \ln (x_1y_1)
226 | $$
227 |
228 | With conversion function
229 |
230 | $$
231 | g(u, y_1, y_1') = \ln (\frac{y_1'e^u}{y_1})
232 | $$
233 |
234 | This unit is particularly useless as the only operations that seems to be valid
235 | on this unit are:
236 |
237 | 1. `e^x`, to transform it back into a normal unit
238 | 2. Adding and substracting it from dimensionless quantities to get back the logarthmic units
239 | 3. Adding it to another logarthmic unit to multiply the units.
240 |
241 | Because this unit has so little interesting properties, doesn't represent any
242 | useful notion, and breaks many of the assumptions that one makes when thinking
243 | about units. I have decided not to include this.
244 |
245 | ### Dimensionful constants
246 |
247 | Dimensionful constants however, are an interesting concept that I have so far
248 | seen two use cases of. They are however very confusing.
249 |
250 | The first use case is a strange problem that shows up with interest rates. When
251 | we normally talk about interest rates, we might say that the rate is `4%`. For
252 | instance, in GiveWell's Cost Effectiveness Analysis, the Cost Effectiveness Analysis
253 | the discount rate is `4%`, and is written down as `0.04`.
254 |
255 | However, in pedant, you'll see that it has been written down as `1.04 years-1`.
256 | Why? Because to write it down as `0.04` it would be of a units that can't currently
257 | be represented in pedant.
258 |
259 | Let's take a look at the present value formula, one used pervasively in GiveWell's
260 | Cost Effectiveness Analysis:
261 |
262 | $$
263 | pv(p,r,d) = p\frac{1- (1 + r)^{-d}}{\ln (1 + r)}
264 | $$
265 |
266 | Where $p$ is the payment, $r$ is the discount rate, and $d$ is the duration.
267 | You'll notice that every time $r$ is used, 1 is added to it. This is because, in
268 | the terms of pedant, it becomes a power unit when 1 is added to it.
269 |
270 | This means that `0.04` is in a sense the unit `(^years-1) - 1`. And you must
271 | add 1 to make it a proper `^years-1`.
272 |
273 | This is defined in our theory, where the calculation is:
274 |
275 | $$
276 | f(x_1, y_1) = x_1^{\frac{1}{y_1}} - 1
277 | $$
278 |
279 | Which has the conversion function
280 |
281 | $$
282 | g(u, y_1, y_1') = (u + 1)^{\frac{y_1}{y_1'}} - 1
283 | $$
284 |
285 | A second use case for dimensionful constants came up when writing very fine units
286 | for GiveDirectly's cost effectiveness analysis.
287 |
288 | A core element of the analysis is the idea that a portion of the payments are directed
289 | towards investments, and the other half are immediately consumed. As much as this
290 | seems intuitive, it's difficult to write in pedant.
291 |
292 | Say you have much finer units than we might be use to, and the portion directed
293 | towards investments are in units `usdinvested usdreceived-1`, meaning "Amount
294 | of money invested of the amount received", then you would intuitively define
295 | Amount of money consumed to be `usdconsumed usdreceived-1`. However, the issue
296 | comes when these are related. For instance, the amount of transfers invested in
297 | the analysis is `39%`, or in pedant `0.39 usdinvested usdrecieved-1`, but to
298 | calculate the amount consumed, then you must go `1 - 0.39 usdinvested usdrecieved-1`.
299 | This fails because you can't subtract a dimensionless quantity from a dimensionful
300 | one.
301 |
302 | One possible way to fix this issue is to add in a special type of unit aliasing,
303 | we know that `usdinvested usdreceived-1 + usdconsumed usdreceived-1 = 1`, so
304 | what if we were to define
305 |
306 | `unit alias usdconsumed usdreceived-1 = 1 - usdinvested usdreceived-1`
307 |
308 | This tells pedant that when you have 1 - `usdinvested usdreceived-1` you get
309 | `usdconsumed usdreceived-1`. However, in doing this, what shows up is yet another
310 | dimensionful constant (the 1 in the definition).
311 |
312 | I like adding this definition into the pedant file, as it explicitly represents
313 | an assumption that I would like to be picked up and tracked.
314 |
315 | I don't currently fully understand dimensionful constants, and what they mean,
316 | and if they have already been investigated.
317 |
--------------------------------------------------------------------------------
/editor/code/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
--------------------------------------------------------------------------------
/editor/code/client/out/extension.js:
--------------------------------------------------------------------------------
1 | "use strict";
2 | Object.defineProperty(exports, "__esModule", { value: true });
3 | exports.deactivate = exports.activate = void 0;
4 | const vscode = require("vscode");
5 | const node_1 = require("vscode-languageclient/node");
6 | let client;
7 | function activate(context) {
8 | const serverOptions = {
9 | run: {
10 | command: 'pedant',
11 | args: ['lsp'],
12 | transport: node_1.TransportKind.stdio
13 | },
14 | debug: {
15 | command: 'pedant',
16 | args: ['lsp'],
17 | transport: node_1.TransportKind.stdio
18 | }
19 | };
20 | const clientOptions = {
21 | documentSelector: [{ scheme: 'file', language: 'pedant' }],
22 | synchronize: {
23 | fileEvents: vscode.workspace.createFileSystemWatcher('**/*.ped')
24 | }
25 | };
26 | client = new node_1.LanguageClient('pedantLanguageServer', 'Pedant Language Server', serverOptions, clientOptions);
27 | client.start();
28 | }
29 | exports.activate = activate;
30 | function deactivate() {
31 | if (!client) {
32 | return undefined;
33 | }
34 | return client.stop();
35 | }
36 | exports.deactivate = deactivate;
37 | //# sourceMappingURL=extension.js.map
--------------------------------------------------------------------------------
/editor/code/client/out/extension.js.map:
--------------------------------------------------------------------------------
1 | {"version":3,"file":"extension.js","sourceRoot":"","sources":["../src/extension.ts"],"names":[],"mappings":";;;AAAA,iCAAiC;AACjC,qDAAiH;AAEjH,IAAI,MAAsB,CAAC;AAE3B,SAAgB,QAAQ,CAAC,OAAgC;IACrD,MAAM,aAAa,GAAkB;QACjC,GAAG,EAAE;YACD,OAAO,EAAE,QAAQ;YACjB,IAAI,EAAE,CAAC,KAAK,CAAC;YACb,SAAS,EAAE,oBAAa,CAAC,KAAK;SACjC;QACD,KAAK,EAAE;YACH,OAAO,EAAE,QAAQ;YACjB,IAAI,EAAE,CAAC,KAAK,CAAC;YACb,SAAS,EAAE,oBAAa,CAAC,KAAK;SACjC;KACJ,CAAC;IAEF,MAAM,aAAa,GAA0B;QACzC,gBAAgB,EAAE,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,CAAC;QAC1D,WAAW,EAAE;YACT,UAAU,EAAE,MAAM,CAAC,SAAS,CAAC,uBAAuB,CAAC,UAAU,CAAC;SACnE;KACJ,CAAC;IAEF,MAAM,GAAG,IAAI,qBAAc,CAAC,sBAAsB,EAAE,wBAAwB,EAAE,aAAa,EAAE,aAAa,CAAC,CAAC;IAE5G,MAAM,CAAC,KAAK,EAAE,CAAC;AACnB,CAAC;AAxBD,4BAwBC;AAED,SAAgB,UAAU;IACtB,IAAI,CAAC,MAAM,EAAE;QACT,OAAO,SAAS,CAAC;KACpB;IACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;AACzB,CAAC;AALD,gCAKC"}
--------------------------------------------------------------------------------
/editor/code/client/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "pedant-lsp-client",
3 | "version": "0.0.1",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "pedant-lsp-client",
9 | "version": "0.0.1",
10 | "license": "MIT",
11 | "dependencies": {
12 | "vscode-languageclient": "^8.1.0"
13 | },
14 | "devDependencies": {
15 | "@types/vscode": "^1.75.1",
16 | "@vscode/test-electron": "^2.2.3"
17 | },
18 | "engines": {
19 | "vscode": "^1.75.0"
20 | }
21 | },
22 | "node_modules/@tootallnate/once": {
23 | "version": "1.1.2",
24 | "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz",
25 | "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==",
26 | "dev": true,
27 | "engines": {
28 | "node": ">= 6"
29 | }
30 | },
31 | "node_modules/@types/vscode": {
32 | "version": "1.78.0",
33 | "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.78.0.tgz",
34 | "integrity": "sha512-LJZIJpPvKJ0HVQDqfOy6W4sNKUBBwyDu1Bs8chHBZOe9MNuKTJtidgZ2bqjhmmWpUb0TIIqv47BFUcVmAsgaVA==",
35 | "dev": true
36 | },
37 | "node_modules/@vscode/test-electron": {
38 | "version": "2.3.0",
39 | "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.3.0.tgz",
40 | "integrity": "sha512-fwzA9RtazH1GT/sckYlbxu6t5e4VaMXwCVtyLv4UAG0hP6NTfnMaaG25XCfWqlVwFhBMcQXHBCy5dmz2eLUnkw==",
41 | "dev": true,
42 | "dependencies": {
43 | "http-proxy-agent": "^4.0.1",
44 | "https-proxy-agent": "^5.0.0",
45 | "jszip": "^3.10.1",
46 | "semver": "^7.3.8"
47 | },
48 | "engines": {
49 | "node": ">=16"
50 | }
51 | },
52 | "node_modules/agent-base": {
53 | "version": "6.0.2",
54 | "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
55 | "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
56 | "dev": true,
57 | "dependencies": {
58 | "debug": "4"
59 | },
60 | "engines": {
61 | "node": ">= 6.0.0"
62 | }
63 | },
64 | "node_modules/balanced-match": {
65 | "version": "1.0.2",
66 | "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
67 | "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
68 | },
69 | "node_modules/brace-expansion": {
70 | "version": "2.0.1",
71 | "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
72 | "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
73 | "dependencies": {
74 | "balanced-match": "^1.0.0"
75 | }
76 | },
77 | "node_modules/core-util-is": {
78 | "version": "1.0.3",
79 | "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz",
80 | "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==",
81 | "dev": true
82 | },
83 | "node_modules/debug": {
84 | "version": "4.3.4",
85 | "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
86 | "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
87 | "dev": true,
88 | "dependencies": {
89 | "ms": "2.1.2"
90 | },
91 | "engines": {
92 | "node": ">=6.0"
93 | },
94 | "peerDependenciesMeta": {
95 | "supports-color": {
96 | "optional": true
97 | }
98 | }
99 | },
100 | "node_modules/http-proxy-agent": {
101 | "version": "4.0.1",
102 | "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz",
103 | "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==",
104 | "dev": true,
105 | "dependencies": {
106 | "@tootallnate/once": "1",
107 | "agent-base": "6",
108 | "debug": "4"
109 | },
110 | "engines": {
111 | "node": ">= 6"
112 | }
113 | },
114 | "node_modules/https-proxy-agent": {
115 | "version": "5.0.1",
116 | "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
117 | "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==",
118 | "dev": true,
119 | "dependencies": {
120 | "agent-base": "6",
121 | "debug": "4"
122 | },
123 | "engines": {
124 | "node": ">= 6"
125 | }
126 | },
127 | "node_modules/immediate": {
128 | "version": "3.0.6",
129 | "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz",
130 | "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==",
131 | "dev": true
132 | },
133 | "node_modules/inherits": {
134 | "version": "2.0.4",
135 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
136 | "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
137 | "dev": true
138 | },
139 | "node_modules/isarray": {
140 | "version": "1.0.0",
141 | "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
142 | "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==",
143 | "dev": true
144 | },
145 | "node_modules/jszip": {
146 | "version": "3.10.1",
147 | "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz",
148 | "integrity": "sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==",
149 | "dev": true,
150 | "dependencies": {
151 | "lie": "~3.3.0",
152 | "pako": "~1.0.2",
153 | "readable-stream": "~2.3.6",
154 | "setimmediate": "^1.0.5"
155 | }
156 | },
157 | "node_modules/lie": {
158 | "version": "3.3.0",
159 | "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz",
160 | "integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==",
161 | "dev": true,
162 | "dependencies": {
163 | "immediate": "~3.0.5"
164 | }
165 | },
166 | "node_modules/lru-cache": {
167 | "version": "6.0.0",
168 | "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
169 | "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
170 | "dependencies": {
171 | "yallist": "^4.0.0"
172 | },
173 | "engines": {
174 | "node": ">=10"
175 | }
176 | },
177 | "node_modules/minimatch": {
178 | "version": "5.1.6",
179 | "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz",
180 | "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==",
181 | "dependencies": {
182 | "brace-expansion": "^2.0.1"
183 | },
184 | "engines": {
185 | "node": ">=10"
186 | }
187 | },
188 | "node_modules/ms": {
189 | "version": "2.1.2",
190 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
191 | "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
192 | "dev": true
193 | },
194 | "node_modules/pako": {
195 | "version": "1.0.11",
196 | "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz",
197 | "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==",
198 | "dev": true
199 | },
200 | "node_modules/process-nextick-args": {
201 | "version": "2.0.1",
202 | "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
203 | "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==",
204 | "dev": true
205 | },
206 | "node_modules/readable-stream": {
207 | "version": "2.3.8",
208 | "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz",
209 | "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==",
210 | "dev": true,
211 | "dependencies": {
212 | "core-util-is": "~1.0.0",
213 | "inherits": "~2.0.3",
214 | "isarray": "~1.0.0",
215 | "process-nextick-args": "~2.0.0",
216 | "safe-buffer": "~5.1.1",
217 | "string_decoder": "~1.1.1",
218 | "util-deprecate": "~1.0.1"
219 | }
220 | },
221 | "node_modules/safe-buffer": {
222 | "version": "5.1.2",
223 | "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
224 | "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==",
225 | "dev": true
226 | },
227 | "node_modules/semver": {
228 | "version": "7.5.0",
229 | "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.0.tgz",
230 | "integrity": "sha512-+XC0AD/R7Q2mPSRuy2Id0+CGTZ98+8f+KvwirxOKIEyid+XSx6HbC63p+O4IndTHuX5Z+JxQ0TghCkO5Cg/2HA==",
231 | "dependencies": {
232 | "lru-cache": "^6.0.0"
233 | },
234 | "bin": {
235 | "semver": "bin/semver.js"
236 | },
237 | "engines": {
238 | "node": ">=10"
239 | }
240 | },
241 | "node_modules/setimmediate": {
242 | "version": "1.0.5",
243 | "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz",
244 | "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==",
245 | "dev": true
246 | },
247 | "node_modules/string_decoder": {
248 | "version": "1.1.1",
249 | "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
250 | "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
251 | "dev": true,
252 | "dependencies": {
253 | "safe-buffer": "~5.1.0"
254 | }
255 | },
256 | "node_modules/util-deprecate": {
257 | "version": "1.0.2",
258 | "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
259 | "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
260 | "dev": true
261 | },
262 | "node_modules/vscode-jsonrpc": {
263 | "version": "8.1.0",
264 | "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.1.0.tgz",
265 | "integrity": "sha512-6TDy/abTQk+zDGYazgbIPc+4JoXdwC8NHU9Pbn4UJP1fehUyZmM4RHp5IthX7A6L5KS30PRui+j+tbbMMMafdw==",
266 | "engines": {
267 | "node": ">=14.0.0"
268 | }
269 | },
270 | "node_modules/vscode-languageclient": {
271 | "version": "8.1.0",
272 | "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.1.0.tgz",
273 | "integrity": "sha512-GL4QdbYUF/XxQlAsvYWZRV3V34kOkpRlvV60/72ghHfsYFnS/v2MANZ9P6sHmxFcZKOse8O+L9G7Czg0NUWing==",
274 | "dependencies": {
275 | "minimatch": "^5.1.0",
276 | "semver": "^7.3.7",
277 | "vscode-languageserver-protocol": "3.17.3"
278 | },
279 | "engines": {
280 | "vscode": "^1.67.0"
281 | }
282 | },
283 | "node_modules/vscode-languageserver-protocol": {
284 | "version": "3.17.3",
285 | "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.3.tgz",
286 | "integrity": "sha512-924/h0AqsMtA5yK22GgMtCYiMdCOtWTSGgUOkgEDX+wk2b0x4sAfLiO4NxBxqbiVtz7K7/1/RgVrVI0NClZwqA==",
287 | "dependencies": {
288 | "vscode-jsonrpc": "8.1.0",
289 | "vscode-languageserver-types": "3.17.3"
290 | }
291 | },
292 | "node_modules/vscode-languageserver-types": {
293 | "version": "3.17.3",
294 | "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.3.tgz",
295 | "integrity": "sha512-SYU4z1dL0PyIMd4Vj8YOqFvHu7Hz/enbWtpfnVbJHU4Nd1YNYx8u0ennumc6h48GQNeOLxmwySmnADouT/AuZA=="
296 | },
297 | "node_modules/yallist": {
298 | "version": "4.0.0",
299 | "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
300 | "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
301 | }
302 | }
303 | }
304 |
--------------------------------------------------------------------------------
/editor/code/client/package.json:
--------------------------------------------------------------------------------
1 |
2 | {
3 | "name": "pedant-lsp-client",
4 | "description": "Pedant Language Server Client",
5 | "author": "Sam Nolan",
6 | "license": "MIT",
7 | "version": "0.0.1",
8 | "engines": {
9 | "vscode": "^1.75.0"
10 | },
11 | "dependencies": {
12 | "vscode-languageclient": "^8.1.0"
13 | },
14 | "devDependencies": {
15 | "@types/vscode": "^1.75.1",
16 | "@vscode/test-electron": "^2.2.3"
17 | }
18 |
19 | }
--------------------------------------------------------------------------------
/editor/code/client/src/extension.ts:
--------------------------------------------------------------------------------
1 | import * as vscode from 'vscode';
2 | import { LanguageClient, LanguageClientOptions, ServerOptions, TransportKind } from 'vscode-languageclient/node';
3 |
4 | let client: LanguageClient;
5 |
6 | export function activate(context: vscode.ExtensionContext) {
7 | const serverOptions: ServerOptions = {
8 | run: {
9 | command: 'pedant',
10 | args: ['lsp'],
11 | transport: TransportKind.stdio
12 | },
13 | debug: {
14 | command: 'pedant',
15 | args: ['lsp'],
16 | transport: TransportKind.stdio
17 | }
18 | };
19 |
20 | const clientOptions: LanguageClientOptions = {
21 | documentSelector: [{ scheme: 'file', language: 'pedant' }],
22 | synchronize: {
23 | fileEvents: vscode.workspace.createFileSystemWatcher('**/*.ped')
24 | }
25 | };
26 |
27 | client = new LanguageClient('pedantLanguageServer', 'Pedant Language Server', serverOptions, clientOptions);
28 |
29 | client.start();
30 | }
31 |
32 | export function deactivate(): Thenable | undefined {
33 | if (!client) {
34 | return undefined;
35 | }
36 | return client.stop();
37 | }
38 |
--------------------------------------------------------------------------------
/editor/code/client/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "commonjs",
4 | "target": "es2020",
5 | "lib": [
6 | "es2020"
7 | ],
8 | "outDir": "out",
9 | "rootDir": "src",
10 | "sourceMap": true
11 | },
12 | "include": [
13 | "src"
14 | ],
15 | "exclude": [
16 | "node_modules",
17 | ".vscode-test"
18 | ]
19 | }
--------------------------------------------------------------------------------
/editor/code/language-configuration.json:
--------------------------------------------------------------------------------
1 | {
2 | "comments": {
3 | "lineComment": "//"
4 | },
5 | "brackets": [
6 | ["{", "}"],
7 | ["[", "]"],
8 | ["(", ")"]
9 | ],
10 | "autoClosingPairs": [
11 | { "open": "{", "close": "}" },
12 | { "open": "[", "close": "]" },
13 | { "open": "(", "close": ")" },
14 | { "open": "\"", "close": "\"", "notIn": ["string"] }
15 | ],
16 | "surroundingPairs": [
17 | { "open": "{", "close": "}" },
18 | { "open": "[", "close": "]" },
19 | { "open": "(", "close": ")" },
20 | { "open": "\"", "close": "\"" }
21 | ]
22 | }
--------------------------------------------------------------------------------
/editor/code/out/client.js:
--------------------------------------------------------------------------------
1 | "use strict";
2 | var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3 | if (k2 === undefined) k2 = k;
4 | var desc = Object.getOwnPropertyDescriptor(m, k);
5 | if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6 | desc = { enumerable: true, get: function() { return m[k]; } };
7 | }
8 | Object.defineProperty(o, k2, desc);
9 | }) : (function(o, m, k, k2) {
10 | if (k2 === undefined) k2 = k;
11 | o[k2] = m[k];
12 | }));
13 | var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14 | Object.defineProperty(o, "default", { enumerable: true, value: v });
15 | }) : function(o, v) {
16 | o["default"] = v;
17 | });
18 | var __importStar = (this && this.__importStar) || function (mod) {
19 | if (mod && mod.__esModule) return mod;
20 | var result = {};
21 | if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22 | __setModuleDefault(result, mod);
23 | return result;
24 | };
25 | Object.defineProperty(exports, "__esModule", { value: true });
26 | exports.deactivate = exports.activate = void 0;
27 | const vscode = __importStar(require("vscode"));
28 | const node_1 = require("vscode-languageclient/node");
29 | let client;
30 | function activate(context) {
31 | const serverOptions = {
32 | run: {
33 | command: 'pedant',
34 | args: ['lsp'],
35 | transport: node_1.TransportKind.stdio
36 | },
37 | debug: {
38 | command: 'pedant',
39 | args: ['lsp'],
40 | transport: node_1.TransportKind.stdio
41 | }
42 | };
43 | const clientOptions = {
44 | documentSelector: [{ scheme: 'file', language: 'pedant' }],
45 | synchronize: {
46 | fileEvents: vscode.workspace.createFileSystemWatcher('**/*.ped')
47 | }
48 | };
49 | client = new node_1.LanguageClient('pedantLanguageServer', 'Pedant Language Server', serverOptions, clientOptions);
50 | client.start();
51 | }
52 | exports.activate = activate;
53 | function deactivate() {
54 | if (!client) {
55 | return undefined;
56 | }
57 | return client.stop();
58 | }
59 | exports.deactivate = deactivate;
60 |
--------------------------------------------------------------------------------
/editor/code/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "pedant-syntax-highlighting",
3 | "displayName": "Pedant Syntax Highlighting",
4 | "description": "Syntax highlighting for Pedant files",
5 | "version": "0.0.1",
6 | "categories": [
7 | "Programming Languages"
8 | ],
9 | "engines": {
10 | "vscode": "^1.75.0"
11 | },
12 | "activationEvents": [
13 | "onLanguage:pedant"
14 | ],
15 | "contributes": {
16 | "languages": [
17 | {
18 | "id": "pedant",
19 | "aliases": [
20 | "Pedant",
21 | "pedant"
22 | ],
23 | "extensions": [
24 | ".ped"
25 | ],
26 | "configuration": "./language-configuration.json"
27 | }
28 | ],
29 | "grammars": [
30 | {
31 | "language": "pedant",
32 | "scopeName": "source.pedant",
33 | "path": "./syntaxes/pedant.tmLanguage.json"
34 | }
35 | ],
36 | "configuration": {
37 | "type": "object",
38 | "title": "Example configuration",
39 | "properties": {}
40 | }
41 | },
42 | "main": "./client/out/extension",
43 | "scripts": {
44 | "compile": "tsc -b",
45 | "watch": "tsc -watch",
46 | "postinstall": "npm run compile"
47 | },
48 | "devDependencies": {
49 | "@types/node": "^20.1.1",
50 | "typescript": "^5.0.4",
51 | "vsce": "^2.15.0",
52 | "vscode": "^1.1.37"
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/editor/code/pedant-syntax-highlighting-0.0.1.vsix:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hazelfire/pedant/02428bd1aa8a7d2ad52c6610f9dda0f75df491fe/editor/code/pedant-syntax-highlighting-0.0.1.vsix
--------------------------------------------------------------------------------
/editor/code/syntaxes/pedant.tmLanguage.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://raw.githubusercontent.com/martinring/tmlanguage/master/tmlanguage.json",
3 | "name": "Pedant",
4 | "scopeName": "source.pedant",
5 | "fileTypes": [
6 | "ped"
7 | ],
8 | "patterns": [
9 | {
10 | "name": "comment.line.pedant",
11 | "match": "\/\/.*$",
12 | "captures": {
13 | "0": {
14 | "name": "punctuation.definition.comment.pedant"
15 | }
16 | }
17 | },
18 | {
19 | "name": "keyword.control.import.pedant",
20 | "match": "\\bimport\\b",
21 | "captures": {
22 | "0": {
23 | "name": "keyword.control.import.pedant"
24 | }
25 | }
26 | },
27 | {
28 | "name": "keyword.control.unit.pedant",
29 | "match": "\\bunit\\b",
30 | "captures": {
31 | "0": {
32 | "name": "keyword.control.unit.pedant"
33 | }
34 | }
35 | },
36 | {
37 | "name": "constant.numeric.pedant",
38 | "match": "\\b\\d+(\\.\\d+)?\\b",
39 | "captures": {
40 | "0": {
41 | "name": "constant.numeric.pedant"
42 | }
43 | }
44 | },
45 | {
46 | "name": "constant.language.unit.pedant",
47 | "match": "\\b\\w+\\b(?=\\s*-?\\d*)",
48 | "captures": {
49 | "0": {
50 | "name": "constant.language.unit.pedant"
51 | }
52 | }
53 | },
54 | {
55 | "name": "constant.language.unit-power.pedant",
56 | "match": "\\b\\w+-\\d+\\b",
57 | "captures": {
58 | "0": {
59 | "name": "constant.language.unit-power.pedant"
60 | }
61 | }
62 | },
63 | {
64 | "name": "variable.assignment.pedant",
65 | "begin": "\\b[A-Za-z_][A-Za-z0-9_]*\\b",
66 | "end": "=",
67 | "beginCaptures": {
68 | "0": {
69 | "name": "variable.assignment.pedant"
70 | }
71 | },
72 | "endCaptures": {
73 | "0": {
74 | "name": "keyword.operator.assignment.pedant"
75 | }
76 | },
77 | "patterns": [
78 | {
79 | "include": "#expression"
80 | }
81 | ]
82 | }
83 | ],
84 | "repository": {
85 | "expression": {
86 | "patterns": [
87 | {
88 | "include": "#constant.numeric.pedant"
89 | },
90 | {
91 | "include": "#constant.language.unit.pedant"
92 | },
93 | {
94 | "include": "#constant.language.unit-power.pedant"
95 | }
96 | ]
97 | }
98 | }
99 | }
--------------------------------------------------------------------------------
/editor/code/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "commonjs",
4 | "target": "es2020",
5 | "lib": ["es2020"],
6 | "outDir": "out",
7 | "rootDir": "src",
8 | "sourceMap": true
9 | },
10 | "include": [
11 | "src"
12 | ],
13 | "exclude": [
14 | "node_modules",
15 | ".vscode-test"
16 | ],
17 | "references": [
18 | { "path": "./client" },
19 | ]
20 | }
--------------------------------------------------------------------------------
/editor/vim/ped.vim:
--------------------------------------------------------------------------------
1 | " Vim syntax file
2 | " Language: Dimensional
3 | " Maintainer: Sam Nolan
4 | " Latest Revision: 31 Oct 2021
5 |
6 | if exists("b:current_syntax")
7 | finish
8 | endif
9 |
10 | " Keywords
11 |
12 | syn match celNumber '\d\+.\?\d*' nextgroup=celType skipwhite
13 | syn match celType '\^\?\([a-z]\+-\?\d*\s*\)\+' contained
14 | syn keyword celFunc ln
15 | syn keyword celUnit unit nextgroup=unitDecl
16 | syn match unitDecl '[a-z ]\+' contained
17 | syn match celOperator '[*+-/]'
18 | syn match celVariable '^[a-zA-Z_]\+'
19 | syn match celComment '//\.\+'
20 |
21 | let b:current_syntax = "ped"
22 |
23 | hi def link celTodo Todo
24 | hi def link celComment Comment
25 | hi def link celBlockCmd Statement
26 | hi def link celFunc Function
27 | hi def link celString Constant
28 | hi def link celDesc PreProc
29 | hi def link celNumber Number
30 | hi def link celType Type
31 | hi def link celVariable Identifier
32 | hi def link celOperator Label
33 | hi def link celUnit Keyword
34 | hi def link unitDecl Type
35 |
--------------------------------------------------------------------------------
/examples/decisions/urns.ped:
--------------------------------------------------------------------------------
1 | addOne = 2
2 |
--------------------------------------------------------------------------------
/examples/functions.ped:
--------------------------------------------------------------------------------
1 | // Comment
2 | /* Block comment */
3 | import common(name1, name2)
4 | unit usd
5 | addOne x = x + 1
6 | addOneUsd x = x + 1 usd
7 | multiline_declaration =
8 | 2 + 5 + addOne 6
9 | y = 2
--------------------------------------------------------------------------------
/examples/givewell/amf.ped:
--------------------------------------------------------------------------------
1 | unit usd people nets
2 | percent_of_funding_allocated = [ 0.2, 0.2, 0.2, 0.2, 0.2 ]
3 | percentage_of_total_cost_covered_by_amf = [ 0.463366436660079, 0.523185480024128, 0.493797334236766, 0.537510479303246, 0.505974655790821 ]
4 | percentage_of_total_cost_covered_by_other_philanthropy = [ 0.0308772754273122, 0, 0, 0, 0 ]
5 |
6 | donation_size_to_amf = 20000 usd
7 |
8 | total_spending_all_contributors = donation_size_to_amf / percentage_of_total_cost_covered_by_amf
9 | cost_per_llin = [ 5.54028684170494 usd nets-1, 4.49727990174396 usd nets-1, 4.85803649068832 usd nets-1, 4.29279767509372 usd nets-1, 5.35221114333387 usd nets-1 ]
10 | people_covered_from_nets = 1.8 people nets-1
11 | cost_per_person_covered = cost_per_llin / people_covered_from_nets
12 | total_number_of_people_covered = total_spending_all_contributors / cost_per_person_covered
13 |
14 | // Population
15 | percent_of_population_under_5 =
16 | [ 13629465.1519444 / 87670443.7766788
17 | , 2143394.87689442 / 12643148.9336671
18 | , 1114855.20710404 / 7921526.58517355
19 | , 7089749.81596238 / 41117856.2413241
20 | , 33521637.830718 / 214823785.7115
21 | ]
22 | percent_of_population_5_to_9 = [ 12763832.6557654/87670443.7766788, 1933404.91137462 / 12643148.9336671, 1074824.79732476 / 7921526.58517355, 6314355.69254718/41117856.2413241, 31315804.4739165 / 214823785.7115 ]
23 | percent_of_population_10_to_14 = [ 11301847.5012717 / 87670443.7766788, 1636389.58910505 / 12643148.9336671, 957955.990123928 / 7921526.58517355, 5673366.42359162 / 41117856.2413241, 28910632.0515107 / 214823785.7115 ]
24 | percent_of_population_5_to_14 = percent_of_population_5_to_9 + percent_of_population_10_to_14
25 |
26 | children_under_5_covered = total_number_of_people_covered * percent_of_population_under_5
27 | children_between_5_to_14_covered = total_number_of_people_covered * percent_of_population_5_to_14
28 |
29 | equivalent_coverage_years_for_llin = [2.11*(1-0.173704447529312), 2.11, 2.11, 2.11, 2.11 ]
30 |
31 | person_years_of_coverage_under_5 = equivalent_coverage_years_for_llin * children_under_5_covered
32 | person_years_of_coverage_5_to_14 = equivalent_coverage_years_for_llin * children_between_5_to_14_covered
33 | person_years_of_coverage_under_15 = person_years_of_coverage_under_5 + person_years_of_coverage_5_to_14
34 |
35 | risk_of_death_averted_under_5 = 0.17
36 | baseline_under_5_mortality_amf_countries = [ 24.27862218, 26.20755637, 17.7483628774233, 19.9594475483085, 30.3436759585857 ]
37 | under_5_mortality_amf_countries = [ 7.741741232, 14.600538209998, 8.27977451508132, 7.30501792233482, 15.0472190771806 ]
38 | mortality_decrease = under_5_mortality_amf_countries / baseline_under_5_mortality_amf_countries
39 | portion_attributed_to_nets = 0.25
40 |
41 | counterfactual_mortality = under_5_mortality_amf_countries + baseline_under_5_mortality_amf_countries * (1 - mortality_decrease) * portion_attributed_to_nets
42 |
43 | adjustment_for_net_coverage_years_lost_previous_distributions = [ 0.97, 0.97, 0.97, 0.97, 0.98 ]
44 |
45 | // Other Adjustments
46 | net_use_adjustment = 0.9
47 | internal_validity_adjustment = 0.95
48 | external_validity_adjustment = 0.95
49 |
50 | portion_attributed_to_malaria = [ 1.80049262480695, 1.24496223958352, 1.01165861903982, 1.52840683916445, 0.949907345167584 ]
51 |
52 | efficacy_reduction_due_to_insectiside_resistance = [ 0.0517082573721674, 0.316487728760146, 0.2791928529, 0.2482803052, 0.01466377341 ]
53 |
54 | deaths_averted_per_thousand_years = risk_of_death_averted_under_5 * counterfactual_mortality * adjustment_for_net_coverage_years_lost_previous_distributions *portion_attributed_to_malaria * net_use_adjustment * internal_validity_adjustment * external_validity_adjustment * (1 - efficacy_reduction_due_to_insectiside_resistance)
55 |
56 | total_number_under_5_deaths_averted = (person_years_of_coverage_under_5 / 1000) * deaths_averted_per_thousand_years
57 |
58 | value_assigned_to_averting_death_of_an_individual_under_5 = 117
59 | total_units_of_value_under_5 = value_assigned_to_averting_death_of_an_individual_under_5 * total_number_under_5_deaths_averted
60 |
61 | number_of_malaria_deaths_per_year = [ 57160.15, 11336.5, 5436.19, 22586.59, 191106.09 ]
62 | number_of_malaria_deaths_under_5 = [ 44077.95, 8181.1, 1880.41, 17385.77, 95636.13 ]
63 | number_of_malaria_deaths_over_5 = number_of_malaria_deaths_per_year - number_of_malaria_deaths_under_5
64 |
65 | ratio_of_over_to_under_5_malaria_deaths = number_of_malaria_deaths_over_5 / number_of_malaria_deaths_under_5
66 |
67 | relative_efficacy_for_mortality_over_5 = 0.8
68 |
69 | total_over_5_deaths_averted = relative_efficacy_for_mortality_over_5 * ratio_of_over_to_under_5_malaria_deaths * total_number_under_5_deaths_averted
70 |
71 | value_averting_death_over_5 = 83
72 | total_units_value_over_5_deaths_averted = value_averting_death_over_5 * total_over_5_deaths_averted
73 |
74 | total_deaths_averted = total_over_5_deaths_averted + total_number_under_5_deaths_averted
75 |
76 | reduction_in_malaria_under_5 = 0.45
77 | expected_reduction_in_malaria = reduction_in_malaria_under_5 * adjustment_for_net_coverage_years_lost_previous_distributions * net_use_adjustment * internal_validity_adjustment * external_validity_adjustment *portion_attributed_to_malaria *(1 - efficacy_reduction_due_to_insectiside_resistance)
78 |
79 | malaria_prevalence_under_5 = [ 28309.33, 33757.61, 30426.94, 38599.14, 21795.84 ] / 100000
80 | malaria_prevalence_5_to_9 = [ 31278.98, 37364.59, 33589.94, 0.43, 0.24 ] / 100000
81 | malaria_prevalence_10_to_14 = [ 29161.94, 34825.88, 31313.64, 39785.54, 22203.43 ] / 100000
82 | malaria_prevalence_5_to_14 = [ 30284.77, 36200.85, 32517.22, 41325.53, 23114.47] / 100000
83 | expected_increase_in_malaria_prevalence_in_absense_of_amf = 0.2
84 | counterfactual_malaria_prevalence_under_5 = malaria_prevalence_under_5 * (1 + expected_increase_in_malaria_prevalence_in_absense_of_amf)
85 | counterfactual_malaria_prevalence_5_to_14 = malaria_prevalence_5_to_14 * (1 + expected_increase_in_malaria_prevalence_in_absense_of_amf)
86 |
87 | percentage_point_reduction_in_probability_of_a_covered_child_infected_under_5 = expected_reduction_in_malaria * counterfactual_malaria_prevalence_under_5
88 | percentage_point_reduction_in_probability_of_a_covered_child_infected_5_to_14 = expected_reduction_in_malaria * counterfactual_malaria_prevalence_5_to_14
89 |
90 | reduction_in_people_infected_under_5 = person_years_of_coverage_under_5 * percentage_point_reduction_in_probability_of_a_covered_child_infected_under_5
91 | reduction_in_people_infected_5_to_14 = person_years_of_coverage_5_to_14 * percentage_point_reduction_in_probability_of_a_covered_child_infected_5_to_14
92 |
93 | increase_in_income_from_reducing_malaria = 0.023
94 | additional_replicability_adjustment = 0.52
95 | adjusted_increase_in_ln_income = (ln (1 + increase_in_income_from_reducing_malaria) - ln 1) * additional_replicability_adjustment
96 |
97 | average_number_of_years_between_nets_distributed_and_benefits = 10
98 | discount_rate = 1.04
99 | benefit_on_one_years_income = adjusted_increase_in_ln_income / (discount_rate) ^ average_number_of_years_between_nets_distributed_and_benefits
100 | duration_of_long_term_benefit = 40
101 | pv_of_lifetime_benefits = benefit_on_one_years_income * (1 - (discount_rate ^ (-duration_of_long_term_benefit))) / (ln discount_rate)
102 | multiplier_for_resource_sharing_within_households = 2
103 |
104 | pv_of_benefits_from_reducing_malaria = multiplier_for_resource_sharing_within_households * pv_of_lifetime_benefits
105 |
106 | total_units_of_increase_in_ln_income_under_5 = reduction_in_people_infected_under_5 * pv_of_benefits_from_reducing_malaria
107 | total_units_of_increase_in_ln_income_5_to_14 = reduction_in_people_infected_5_to_14 * pv_of_benefits_from_reducing_malaria
108 |
109 | value_per_ln_consumption = 1.44
110 | total_units_of_value_increase_consumption = value_per_ln_consumption * (total_units_of_increase_in_ln_income_under_5 + total_units_of_increase_in_ln_income_5_to_14)
111 |
112 | total_units_of_value = total_units_of_value_increase_consumption + total_units_value_over_5_deaths_averted + total_units_of_value_under_5
113 |
114 | risk_of_double_treatment = 0
115 | risk_of_ineffective_goods = 0
116 | ris_of_goods_not_reaching_recipients = 0.05
117 | total_waste_risk = risk_of_double_treatment + risk_of_ineffective_goods + ris_of_goods_not_reaching_recipients
118 |
119 | risk_misappropriation_without_monitoring = 0.02
120 | risk_false_monitoring_results = 0.02
121 | total_adjustment_monitoring = risk_misappropriation_without_monitoring + risk_false_monitoring_results
122 |
123 | total_downside_adjustment = 1 - total_adjustment_monitoring - total_waste_risk
124 |
125 | total_units_of_value_generated_after_downside = total_downside_adjustment * total_units_of_value
126 |
127 | malaria_morbidity = 0.09
128 | short_term_anemia_effects = 0.09
129 | prevent_of_diseases_other_than_malaria = 0.02
130 | preventation_of_stillbirths = 0.09
131 | investment_of_income_increases = 0.03
132 | rebound_effects = -0.04
133 | treatment_cost_from_prevention = 0.06
134 | subnational_adjustments = 0.02
135 | marginal_funding_goes_to_lower_priority = -0.05
136 | mosquito_insecticide_resistance_in_trials = 0.05
137 | differences_in_mosquito_species = -0.02
138 | adjustment_for_program_impact = [ -0.41, 0, 0, 0, -0.35 ]
139 | total_adjustment_factors_excluded = 1 + malaria_morbidity + short_term_anemia_effects + prevent_of_diseases_other_than_malaria + preventation_of_stillbirths + investment_of_income_increases + rebound_effects + treatment_cost_from_prevention + subnational_adjustments + marginal_funding_goes_to_lower_priority + mosquito_insecticide_resistance_in_trials + differences_in_mosquito_species + adjustment_for_program_impact
140 |
141 | total_units_of_value_after_excluded_adjustments = total_adjustment_factors_excluded * total_units_of_value_generated_after_downside
142 |
--------------------------------------------------------------------------------
/examples/givewell/common.ped:
--------------------------------------------------------------------------------
1 |
2 | unit years
3 | discount_rate = 1.04 ^years-1
4 | pv payment discount duration =
5 | payment * (1 - discount ^ (-duration)) / ln discount
6 |
--------------------------------------------------------------------------------
/examples/givewell/deworm_the_world.ped:
--------------------------------------------------------------------------------
1 | treatement_effect = 0.109
2 | treatement_coverage = 0.75
3 | treatment_control = 0.05
4 | estimated_treatement = treatment_effect / ((treatment_coverage - treatment_control) - treatment_effect * treatment_control)
5 | replicability_adjustment = 0.13
6 | adjustment_for_years_of_treatement = 0.9
7 | additional_years_of_treatment = 2.41
8 | estimated_annual_treatment = estimated_treatement * replicability_adjustment / additional_years_of_treatment * adjustment_for_years_of_treatement
9 |
10 | discount_rate = 1.04
11 | average_years_deworming_long_term = 8
12 | benefit_of_one_year = estimated_annual_treatment / (discount_rate ^ average_years_deworming_long_term)
13 | duration_of_long_term_benifits = 40
14 |
15 | pv_of_lifetime_benefits = benefit_of_one_year * (1 - (discount_rate ^ (-(duration_of_long_term_benifits)))) / (ln discount_rate)
16 |
17 | multiplier_for_resource_sharing_within_households = 2
18 | adjusted_long_term_benefits = pv_of_lifetime_benefits * multiplier_for_resource_sharing_within_households
19 |
20 | percent_allocated_each_country = [0.3567755983, 0.04274711779, 0.1023037306, 0.09573907075, 0.09989506006]
21 | total_costs_covered_by_dtw = [ 0.6615351534, 0.6062993536, 0.589803699, 0.6270353205, 0.6220757006 ]
22 | total_costs_covered_philanthropy = [0, 0.01847612766, 0.01217006489, 0, 0.02311365514]
23 | total_costs_covered_governments_financial = [0, 0.0003790082282, 0, 0, 0]
24 | total_costs_covered_governments_in_kind = 0.3
25 | total_costs_covered_drug_donations = [0.03846484661, 0.07484545534, 0.0980261853, 0.07296462213, 0.05481063817]
26 |
27 | donation_size = 10000
28 | donation_to_each_country = donation_size * percent_allocated_each_country
29 | total_spending_all_contributors = donation_to_each_country / total_costs_covered_by_dtw
30 | proportion_of_deworming_going_to_children = 1
31 | worm_burden_adjustment = [0.02913396089, 0.04541513563, 0.110400789, 0.178456373, 0.08326108198]
32 | pv_of_lifetime_burden_adjusted = worm_burden_adjustment * proportion_of_deworming_going_to_children * adjusted_long_term_benefits
33 | value_of_ln_consumption = 1.44 value people-1 years-1
34 | value_of_deworming = value_of_ln_consumption * pv_of_lifetime_burden_adjusted
35 | cost_per_child_dewormed = [ 0.6229606886, 0.9687729239, 0.8573351642, 1.518246103, 0.7480077174 ]
36 | total_children_dewormed = total_spending_all_contributors / cost_per_child_dewormed
37 |
38 | value_generated = total_children_dewormed * value_of_deworming
39 |
40 | risk_of_double_treatment = 0
41 | risk_of_ineffective_goods = 0.02
42 | ris_of_goods_not_reaching_recipients = 0
43 | total_waste_risk = risk_of_double_treatment + risk_of_ineffective_goods + ris_of_goods_not_reaching_recipients
44 |
45 | risk_misappropriation_without_monitoring = 0.01
46 | risk_false_monitoring_results = 0
47 | total_adjustment_monitoring = risk_misappropriation_without_monitoring + risk_false_monitoring_results
48 |
49 |
50 | risk_change_of_priorities = 0
51 | risk_non_funding_bottlenecks = 0
52 | within_org_fungibility = 0.05
53 | total_adjustment_unintented_purpose = risk_change_of_priorities * risk_non_funding_bottlenecks * within_org_fungibility
54 | total_downside_adjustment = 1 - total_adjustment_monitoring - total_waste_risk - total_adjustment_unintented_purpose
55 |
56 | risk_adjusted_value_generated = total_downside_adjustment * value_generated
57 |
58 | total_units_of_value_generated_after_downside = total_downside_adjustment * total_units_of_value
59 |
60 | // Excluded Effects
61 | direct_health_effects = 0.008
62 | hiv_reduction = 0.06
63 | possible_increases_in_diseases_of_affluence = -0.025
64 | averted_mortality = 0.035
65 | short_term_anemia_effects = 0.09
66 | investment_of_income_increases = 0.1
67 | non_income_long_run_benefits_of_deworming = 0.04
68 | drug_resistance = -0.04
69 | unprogrammed_deworming = -0.14
70 | marginal_funding_goes_to_lower_priority_areas = -0.025
71 | general_equilibrium_effects = -0.03
72 | total_adjustment_effects = 1 + direct_health_effects + hiv_reduction + possible_increases_in_diseases_of_affluence + averted_mortality + short_term_anemia_effects + investment_of_income_increases + non_income_long_run_benefits_of_deworming + drug_resistance + unprogrammed_deworming + marginal_funding_goes_to_lower_priority_areas + general_equilibrium_effects
73 |
74 | value_after_excluded_effects = total_adjustment_effects * total_units_of_value_generated_after_downside
75 |
76 | value_per_dollar = value_after_excluded_effects / donation_to_each_country
77 |
78 | cost_per_treatement_dwt = cost_per_child_dewormed * total_costs_covered_by_dtw
79 | cost_per_treatement_philanthropy = cost_per_child_dewormed * total_costs_covered_philanthropy
80 | cost_per_treatment_goverment_financial = cost_per_child_dewormed * total_costs_covered_financial
81 | cost_per_treatment_goverment_in_kind = cost_per_child_dewormed * total_costs_covered_in_kind
82 | cost_per_treatment_drug_donations = cost_per_child_dewormed * total_costs_covered_drug_donations
83 |
84 | total_cost_per_treatment = cost_per_treatement_dwt + cost_per_treatement_philanthropy + cost_per_treatment_goverment_financial + cost_per_treatment_goverment_in_kind + cost_per_treatment_drug_donations
85 |
86 | total_expenditure_dwt = total_spending_all_contributors * total_costs_covered_by_dtw
87 | total_expenditure_philanthropy = total_spending_all_contributors * total_costs_covered_philanthropy
88 | total_expenditure_goverment_financial = total_spending_all_contributors * total_costs_covered_financial
89 | total_expenditure_goverment_in_kind = total_spending_all_contributors * total_costs_covered_in_kind
90 | total_expenditure_drug_donations = total_spending_all_contributors * total_costs_covered_drug_donations
91 | total_expenditure = total_expenditure_dwt + total_expenditure_philanthropy + total_expenditure_goverment_in_kind + total_expenditure_goverment_financial + total_expenditure_drug_donations
92 |
93 | expenditure_causally_downstream = total_expenditure_dwt + total_expenditure_philanthropy + total_expenditure_goverment_financial
94 |
95 | // I am very confused.... this seems like a mistake
96 | value_averting_death_individual_from_malaria = 117
97 |
98 | conterfactual_government_financial_costs = value_averting_death_individual_from_malaria / 32386.43361
99 | counterfactual_government_in_kind_costs = value_averting_death_individual_from_malaria / 32386.43361
100 | counterfactual_donated_drug_costs = value_averting_death_individual_from_malaria / 21476.62938
101 |
102 | chance_government_costs_replace_philanthropic_costs = 0.2
103 | chance_distribution_unfunded = 0.8
104 |
105 | fraction_government_costs_replace_philanthropic_costs = 1
106 | fraction_distribution_unfunded = 0.8
107 |
108 | change_in_funding_goverment_replace_philanthropy_goverment_financial = total_expenditure_dwt + total_expenditure_philanthropy
109 | change_in_funding_goverment_replace_philanthropy_goverment_in_kind = 0
110 | change_in_funding_goverment_replace_philanthropy_donated_drugs = 0
111 |
112 | change_in_funding_distribution_unfunded_goverment_financial = 0
113 | change_in_funding_distribution_unfunded_goverment_in_kind = - total_expenditure_goverment_in_kind
114 | change_in_funding_distribution_unfunded_goverment_drugs = -total_expenditure_drug_donations
115 |
116 | value_generated_non_philanthropy_goverment_replace_philanthropy_goverment_financial = value_per_dollar * change_in_funding_goverment_replace_philanthropy_goverment_financial
117 | value_generated_non_philanthropy_goverment_replace_philanthropy_goverment_in_kind = value_per_dollar * change_in_funding_goverment_replace_philanthropy_goverment_in_kind
118 | value_generated_non_philanthropy_goverment_replace_philanthropy_goverment_drugs = value_per_dollar * change_in_funding_goverment_replace_philanthropy_donated_drugs
119 |
120 | value_generated_non_philanthropy_distribution_unfunded_goverment_financial = value_per_dollar * change_in_funding_distribution_unfunded_goverment_financial
121 | value_generated_non_philanthropy_distribution_unfunded_goverment_in_kind = value_per_dollar * change_in_funding_distribution_unfunded_goverment_in_kind
122 | value_generated_non_philanthropy_distribution_unfunded_donated_drugs = value_per_dollar * change_in_funding_distribution_unfunded_donated_drugs
123 |
124 | value_generated_counterfactual_non_philanthropy_goverment_replace_philanthropy_goverment_financial = - counterfactual_government_financial_costs * change_in_funding_distribution_unfunded_goverment_financial
125 | value_generated_non_philanthropy_goverment_replace_philanthropy_goverment_in_kind = - counterfactual_government_in_kind_costs * change_in_funding_distribution_unfunded_goverment_in_kind
126 | value_generated_non_philanthropy_goverment_replace_philanthropy_donated_drugs = - counterfactual_donated_drug_costs * change_in_funding_distribution_unfunded_donated_drugs
127 |
128 | value_generated_counterfactual_non_philanthropy_distribution_unfunded_goverment_financial = - counterfactual_government_financial_costs * change_in_funding_distribution_unfunded_goverment_financial
129 | value_generated_non_philanthropy_distribution_unfunded_goverment_in_kind = - counterfactual_government_in_kind_costs * change_in_funding_distribution_unfunded_goverment_in_kind
130 | value_generated_non_philanthropy_distribution_unfunded_donated_drugs = (- counterfactual_donated_drug_costs) * change_in_funding_distribution_unfunded_donated_drugs
131 |
132 |
--------------------------------------------------------------------------------
/examples/givewell/givedirectly.ped:
--------------------------------------------------------------------------------
1 | // GiveDirectly's cost effectiveness Analysis. Taken from GiveWell
2 |
3 | import common(discount_rate, pv, years)
4 | // units
5 | unit usd usddonated usdtransfered usdinvested usdtransfered usdconsumed usdreturned
6 | unit people value households
7 | unit discountedpv
8 |
9 | // Donation size is arbitrary
10 | donation_size = 100000 usddonated
11 |
12 | transfers_of_cost = 0.83 usdtransfered usddonated-1
13 | total_funding_available = transfers_of_cost * donation_size
14 | total_transfer_size = 1000 usdtransfered households-1
15 | average_household_size = 4.7 people households-1
16 | size_of_transfer_per_people = total_transfer_size / average_household_size
17 |
18 | percent_of_transfers_invested = 0.39 usdinvested usdtransfered-1
19 | amount_invested = percent_of_transfers_invested * size_of_transfer_per_people
20 | duration_of_initial_consumption = 1 years
21 |
22 | // This is really awkwardly hacked into types... I wonder if there's a better
23 | // way to do this
24 | percent_of_transfers_consumed =
25 | 1 usdconsumed usdtransfered-1 -
26 | percent_of_transfers_invested *
27 | (1 usdconsumed usdinvested-1)
28 |
29 | consumption_possible_by_funds =
30 | percent_of_transfers_consumed *
31 | size_of_transfer_per_people /
32 | duration_of_initial_consumption
33 | return_on_investment = 0.10 usdreturned usdinvested-1 years-1
34 | usd_returned_consumed = 1 usdconsumed usdreturned-1
35 | annual_increase_consumption_by_roi = return_on_investment * amount_invested * usd_returned_consumed
36 |
37 | log_consumption_created baseline increase = ln( (baseline + increase) / baseline)
38 | baseline_consumption_per_capita = 285.92 usdconsumed people-1 years-1
39 | immediate_increase_of_consumption =
40 | log_consumption_created baseline_consumption_per_capita consumption_possible_by_funds *
41 | duration_of_initial_consumption
42 | future_increase_in_ln_consumption =
43 | log_consumption_created baseline_consumption_per_capita annual_increase_consumption_by_roi
44 | duration_of_investment = 10 years
45 | pv_of_investment = pv future_increase_in_ln_consumption discount_rate (duration_of_investment - (1 years))
46 | percent_of_investment_returned = 0.2 usdreturned usdinvested-1
47 | duration_of_end_of_benefits = 1 years
48 | percent_roi_final_year = (percent_of_investment_returned / duration_of_end_of_benefits) + return_on_investment
49 | final_year_discount = discount_rate ^ duration_of_investment
50 | pv_of_last_years = (log_consumption_created baseline_consumption_per_capita (amount_invested * percent_roi_final_year * usd_returned_consumed)) / final_year_discount * duration_of_end_of_benefits
51 | total_pv_increase = pv_of_last_years + pv_of_investment + immediate_increase_of_consumption
52 | discount_for_potential_negative_spillover = 0.95
53 | total_pv_after_spillover = discount_for_potential_negative_spillover * total_pv_increase
54 | total_increase_ln_consumption_per_household = total_pv_after_spillover * average_household_size
55 |
56 | number_of_transfers_made = total_funding_available / total_transfer_size
57 | total_units_of_ln_consumption = number_of_transfers_made * total_increase_ln_consumption_per_household
58 | value_of_ln_consumption = 1.44 value people-1 years-1
59 | total_units_of_value = value_of_ln_consumption * total_units_of_ln_consumption
60 |
61 | percent_of_cash_benefits_from_short_term = immediate_increase_of_consumption / total_pv_increase
62 | percent_of_cash_benefits_from_long_term = (pv_of_last_years + pv_of_investment) / total_pv_increase
63 |
64 | // Downside Adjustments
65 | risk_of_cash_not_reach_participants = 0.03
66 |
67 | risk_of_within_org_fungibility = 0.05
68 |
69 | downside_adjustment_factor = 1 - risk_of_within_org_fungibility - risk_of_cash_not_reach_participants
70 | value_after_downsides = downside_adjustment_factor * total_units_of_value
71 |
72 | // Downside effects
73 | changes_in_ppp = -0.07
74 | developmental_effects = 0.05
75 | reduced_morbidity = 0.04
76 | child_mortality_effects = 0.04
77 |
78 | total_adjustment_factor = 1 + changes_in_ppp + developmental_effects + reduced_morbidity + child_mortality_effects
79 |
80 | // After adjustments
81 | total_value_after_adjustments = total_units_of_value * total_adjustment_factor * downside_adjustment_factor
82 | total_value_after_adjustments_per_dollar = total_value_after_adjustments / donation_size
83 |
--------------------------------------------------------------------------------
/examples/givewell/newgivedirectly.ped:
--------------------------------------------------------------------------------
1 | unit usd people years value
2 | donation_size = 100000 usd
3 | transfers_of_cost = 0.83
4 | total_funding_available = transfers_of_cost * donation_size
5 | total_transfer_size = 1000 usd
6 | average_household_size = 4.7 people
7 | size_of_transfer_per_people = total_transfer_size / average_household_size
8 |
9 | percent_of_transfers_invested = 0.39
10 | amount_invested = percent_of_transfers_invested * size_of_transfer_per_people
11 | duration_of_initial_consumption = 1 years
12 | consumption_possible_by_funds =
13 | (1 - percent_of_transfers_invested) *
14 | size_of_transfer_per_people /
15 | duration_of_initial_consumption
16 | return_on_investment = 0.10 years-1
17 | annual_increase_consumption_by_roi = return_on_investment * amount_invested
18 |
19 | baseline_consumption_per_capita = 285.92 usd people-1 years-1
20 | immediate_increase_of_consumption =
21 | ln((consumption_possible_by_funds + baseline_consumption_per_capita) / baseline_consumption_per_capita) *
22 | duration_of_initial_consumption
23 | future_increase_in_ln_consumption = ln((annual_increase_consumption_by_roi + baseline_consumption_per_capita) /
24 | baseline_consumption_per_capita)
25 | discount_rate = 1.04 ^years-1
26 | duration_of_investment = 10 years
27 | pv_of_investment = future_increase_in_ln_consumption * (1 - (discount_rate ^ (-(duration_of_investment - (1 years)))))/
28 | (ln discount_rate)
29 | percent_of_investment_returned = 0.2
30 | duration_of_end_of_benefits = 1 years
31 | pv_of_last_years = ((ln ((baseline_consumption_per_capita + amount_invested * ( percent_of_investment_returned /duration_of_end_of_benefits + return_on_investment)) / baseline_consumption_per_capita)) / (discount_rate ^ duration_of_investment)) * duration_of_end_of_benefits
32 | total_pv_increase = pv_of_last_years + pv_of_investment + immediate_increase_of_consumption
33 | discount_for_potential_negative_spillover = 0.05
34 | total_pv_after_spillover = (1 - discount_for_potential_negative_spillover) * total_pv_increase
35 | total_increase_ln_consumption_per_household = total_pv_after_spillover * average_household_size
36 |
37 | number_of_transfers_made = total_funding_available / total_transfer_size
38 | total_units_of_ln_consumption = number_of_transfers_made * total_increase_ln_consumption_per_household
39 | value_of_ln_consumption = 1.44 value people-1 years-1
40 | total_units_of_value = value_of_ln_consumption * total_units_of_ln_consumption
41 |
42 | percent_of_cash_benefits_from_short_term = immediate_increase_of_consumption / total_pv_increase
43 | percent_of_cash_benefits_from_long_term = (pv_of_last_years + pv_of_investment) / total_pv_increase
44 |
45 | // Downside Adjustments
46 | risk_of_cash_not_reach_participants = 0.03
47 |
48 | risk_of_within_org_fungibility = 0.05
49 |
50 | downside_adjustment_factor = 1 - risk_of_within_org_fungibility - risk_of_cash_not_reach_participants
51 | value_after_downsides = downside_adjustment_factor * total_units_of_value
52 |
53 | // Downside effects
54 | changes_in_ppp = -0.07
55 | developmental_effects = 0.05
56 | reduced_morbidity = 0.04
57 | child_mortality_effects = 0.04
58 |
59 | total_adjustment_factor = 1 + changes_in_ppp + developmental_effects + reduced_morbidity + child_mortality_effects
60 |
61 | // After adjustments
62 | total_value_after_adjustments = total_units_of_value * total_adjustment_factor * downside_adjustment_factor
63 | total_value_after_adjustments_per_dollar = total_value_after_adjustments / donation_size
64 |
--------------------------------------------------------------------------------
/examples/highschool_physics_example.ped:
--------------------------------------------------------------------------------
1 | // run with:
2 | // $ stack run pedant compile examples/highschool_physics_example.ped
3 | unit verticalMeters seconds kilograms jules newtons
4 | mass = 10 kilograms
5 | initial_height = 10 verticalMeters
6 | speed_upwards = 5 verticalMeters / (1 seconds)
7 | duration_of_movement = 10 seconds
8 | gravity_constant = 9.8 verticalMeters / (1 seconds2)
9 | upwards_movement = speed_upwards * duration_of_movement
10 | final_vertical_height = initial_height + upwards_movement
11 | joule_conversion = 1 jules / (1 kilograms * 1 verticalMeters2 * 1 seconds-2)
12 | newton_conversion = 1 newtons / (1 kilograms * 1 verticalMeters * 1 seconds-2)
13 | difference_in_potential_energy = final_vertical_height * mass * gravity_constant
14 | difference_in_potential_energy_newtons = difference_in_potential_energy * newton_conversion
15 | difference_in_potential_energy_jules = difference_in_potential_energy * joule_conversion
16 |
--------------------------------------------------------------------------------
/examples/simple_example.ped:
--------------------------------------------------------------------------------
1 | unit meters seconds
2 | ok = 4 meters * 2 seconds
3 | nonsense = 4 meters + 2 seconds
4 |
--------------------------------------------------------------------------------
/examples/tests.ped:
--------------------------------------------------------------------------------
1 | unit usd
2 | import functions(addOne, addOneUsd)
3 | // This is a collection of tests for mainly functions
4 | id x = x
5 |
--------------------------------------------------------------------------------
/examples/tests/functions.ped:
--------------------------------------------------------------------------------
1 | one = 1
2 |
3 | addOne x = x + 1
4 |
5 |
6 | two = addOne one
7 |
8 | // Testing adding variables with the same name as arguments
9 | x = 5
10 | addTwo x = x + 2
11 | seven = addTwo x
12 |
13 | // Functions with multiple arguments
14 | add x y = x + 1
15 | eight = add 5 3
16 |
--------------------------------------------------------------------------------
/examples/tests/simpleimport.ped:
--------------------------------------------------------------------------------
1 | import functions(one)
2 |
--------------------------------------------------------------------------------
/hie.yaml:
--------------------------------------------------------------------------------
1 | cradle:
2 | cabal:
3 |
--------------------------------------------------------------------------------
/make-ubuntu.bash:
--------------------------------------------------------------------------------
1 | sudo apt install -y ghc
2 | sudo apt install -y cabal-install
3 | sudo apt install -y haskell-stack
4 | sudo apt update -y
5 | sudo apt upgrade -y
6 |
7 | stack upgrade
8 | cabal update
9 |
10 | ## rm -rf ~/.stack
11 | ## rm -rf pedant
12 | git clone git@github.com:Hazelfire/pedant.git
13 | cd pedant
14 |
15 | stack setup
16 | stack build
17 |
--------------------------------------------------------------------------------
/package.yaml:
--------------------------------------------------------------------------------
1 | name: pedant
2 | version: 0.1.0.0
3 | synopsis: A Math language for Dimensional Analysis
4 | description: See README at
5 | maintainer: Sam Nolan
6 | github: Hazelfire/pedant
7 | category: Economics
8 | extra-source-files:
9 | - CHANGELOG.md
10 |
11 | ghc-options: -Wall
12 |
13 | dependencies:
14 | - base >= 4.9 && < 5
15 | - lsp
16 | - filepath
17 | - lsp-types
18 | - lens
19 | - parser-combinators
20 | - megaparsec
21 | - ordered-containers
22 | - mtl
23 | - random
24 | - lens
25 | - containers
26 | - text
27 | - hslogger
28 |
29 | data-files:
30 | - "**/*.ped"
31 |
32 | library:
33 | source-dirs: src
34 | exposed-modules:
35 | - Pedant
36 |
37 | executable:
38 | main: Main.hs
39 | source-dirs: app
40 | dependencies:
41 | - pedant
42 |
43 | tests:
44 | tests:
45 | main: Main.hs
46 | source-dirs:
47 | - test
48 | - src
49 | dependencies:
50 | - hspec
--------------------------------------------------------------------------------
/pedant.cabal:
--------------------------------------------------------------------------------
1 | cabal-version: 1.12
2 |
3 | -- This file has been generated from package.yaml by hpack version 0.35.2.
4 | --
5 | -- see: https://github.com/sol/hpack
6 |
7 | name: pedant
8 | version: 0.1.0.0
9 | synopsis: A Math language for Dimensional Analysis
10 | description: See README at
11 | category: Economics
12 | homepage: https://github.com/Hazelfire/pedant#readme
13 | bug-reports: https://github.com/Hazelfire/pedant/issues
14 | maintainer: Sam Nolan
15 | build-type: Simple
16 | extra-source-files:
17 | CHANGELOG.md
18 | data-files:
19 | examples/decisions/urns.ped
20 | examples/functions.ped
21 | examples/givewell/amf.ped
22 | examples/givewell/common.ped
23 | examples/givewell/deworm_the_world.ped
24 | examples/givewell/givedirectly.ped
25 | examples/givewell/newgivedirectly.ped
26 | examples/highschool_physics_example.ped
27 | examples/simple_example.ped
28 | examples/tests.ped
29 | examples/tests/functions.ped
30 | examples/tests/simpleimport.ped
31 |
32 | source-repository head
33 | type: git
34 | location: https://github.com/Hazelfire/pedant
35 |
36 | library
37 | exposed-modules:
38 | Pedant
39 | other-modules:
40 | Pedant.FileResolver
41 | Pedant.InBuilt
42 | Pedant.LSP
43 | Pedant.Parser
44 | Pedant.Parser.Types
45 | Pedant.TypeCheck
46 | Pedant.TypeCheck.Dimensions
47 | Pedant.TypeCheck.Expressions
48 | Pedant.TypeCheck.LambdaCalculus
49 | Pedant.TypeCheck.TypeError
50 | Pedant.TypeCheck.Types
51 | Pedant.Types
52 | Paths_pedant
53 | hs-source-dirs:
54 | src
55 | ghc-options: -Wall
56 | build-depends:
57 | base >=4.9 && <5
58 | , containers
59 | , filepath
60 | , hslogger
61 | , lens
62 | , lsp
63 | , lsp-types
64 | , megaparsec
65 | , mtl
66 | , ordered-containers
67 | , parser-combinators
68 | , text
69 | default-language: Haskell2010
70 |
71 | executable pedant
72 | main-is: Main.hs
73 | other-modules:
74 | Paths_pedant
75 | hs-source-dirs:
76 | app
77 | ghc-options: -Wall
78 | build-depends:
79 | base >=4.9 && <5
80 | , containers
81 | , filepath
82 | , hslogger
83 | , lens
84 | , lsp
85 | , lsp-types
86 | , megaparsec
87 | , mtl
88 | , ordered-containers
89 | , parser-combinators
90 | , pedant
91 | , text
92 | default-language: Haskell2010
93 |
94 | test-suite tests
95 | type: exitcode-stdio-1.0
96 | main-is: Main.hs
97 | other-modules:
98 | Spec.Parser
99 | Pedant
100 | Pedant.FileResolver
101 | Pedant.InBuilt
102 | Pedant.LSP
103 | Pedant.Parser
104 | Pedant.Parser.Types
105 | Pedant.TypeCheck
106 | Pedant.TypeCheck.Dimensions
107 | Pedant.TypeCheck.Expressions
108 | Pedant.TypeCheck.LambdaCalculus
109 | Pedant.TypeCheck.TypeError
110 | Pedant.TypeCheck.Types
111 | Pedant.Types
112 | Paths_pedant
113 | hs-source-dirs:
114 | test
115 | src
116 | ghc-options: -Wall
117 | build-depends:
118 | base >=4.9 && <5
119 | , containers
120 | , filepath
121 | , hslogger
122 | , hspec
123 | , lens
124 | , lsp
125 | , lsp-types
126 | , megaparsec
127 | , mtl
128 | , ordered-containers
129 | , parser-combinators
130 | , text
131 | default-language: Haskell2010
132 |
--------------------------------------------------------------------------------
/shell.nix:
--------------------------------------------------------------------------------
1 | { pkgs ? import {} }:
2 | let
3 | myHaskell = (pkgs.haskellPackages.ghcWithHoogle (p: with p; [
4 | cabal-doctest
5 | cabal-install
6 | doctest
7 | filepath
8 | hslogger
9 | hspec
10 | random
11 | lsp
12 | megaparsec
13 | ordered-containers
14 | ormolu
15 | ]));
16 | in
17 | pkgs.mkShell {
18 | name = "dimensional";
19 | shellHook = ''
20 | export NIX_GHC=${myHaskell}/bin/ghc
21 | export NIX_GHC_LIBDIR=${myHaskell}/lib/ghc-8.10.7
22 | '';
23 | buildInputs = with pkgs; [
24 | myHaskell
25 | haskell-language-server
26 | coq
27 | nodejs
28 | hpack
29 | yarn
30 | ];
31 | }
32 |
--------------------------------------------------------------------------------
/src/Pedant.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 | {-# LANGUAGE ImportQualifiedPost #-}
3 |
4 | -- | The parser for pedant, a small dimensional programming language
5 | module Pedant (pedantMain, evaluatePedantFile, EvaluationResult (..)) where
6 |
7 | import Control.Monad (forM, void)
8 | import Data.Bifunctor qualified as Bifunctor
9 | import Data.List qualified as List
10 | import Data.List.NonEmpty qualified as NonEmpty
11 | import Data.Map qualified as Map
12 | import Data.Map.Ordered ((|<))
13 | import Data.Map.Ordered qualified as OMap
14 | import Data.Text qualified as T
15 | import Data.Text.IO qualified as T
16 | import Pedant.InBuilt qualified as InBuilt
17 | import Pedant.FileResolver qualified as Resolver
18 | import Pedant.LSP qualified as LSP
19 | import Pedant.Parser
20 | ( makeErrorBundle,
21 | )
22 | import Pedant.TypeCheck
23 | ( TypeCheckState (tcsEnv),
24 | TypeEnv (TypeEnv),
25 | emptyTypeCheckState,
26 | typeCheck,
27 | )
28 | import Pedant.Types
29 | ( ExecutionExpression (..),
30 | ExecutionValue
31 | ( ExecutionValueDict,
32 | ExecutionValueEmptyList,
33 | ExecutionValueFunc,
34 | ExecutionValueNumber
35 | ),
36 | InternalFunction (..),
37 | NumericValue (..),
38 | PedantParseError
39 | ( ppePrint
40 | ),
41 | PrettyPrint (pPrint),
42 | Scheme,
43 | VariableName (VariableName),
44 | )
45 | import System.Environment qualified as Env
46 | import qualified Pedant.TypeCheck as TypeCheck
47 |
48 |
49 | -- | Main Function
50 | pedantMain :: IO ()
51 | pedantMain = do
52 | args <- Env.getArgs
53 | case args of
54 | ("lsp" : _) -> do
55 | void LSP.runLSP
56 | ("compile" : fileName : _) -> print =<< evaluatePedantFile fileName
57 | _ -> putStrLn "pedant compile [file]"
58 |
59 | data EvaluationResult
60 | = EvaluationSuccess VariableValues
61 | | ParseError PedantParseError
62 | | TypeCheckingError PedantParseError
63 | | EvaluationError T.Text
64 |
65 | type VariableValues = OMap.OMap VariableName (NumericValue, Scheme)
66 |
67 | instance Show EvaluationResult where
68 | show (EvaluationSuccess s) =
69 | T.unpack . T.unlines $
70 | map
71 | ( \(VariableName moduleName name, (value, scheme)) ->
72 | case value of
73 | (FuncValue _ _) ->
74 | moduleName <> "." <> name <> " : " <> pPrint scheme
75 | (InternalFunctionValue _) ->
76 | moduleName <> "." <> name <> " : " <> pPrint scheme
77 | _ ->
78 | moduleName <> "." <> name <> " = " <> T.pack (show value) <> " " <> pPrint scheme
79 | )
80 | (List.reverse (OMap.assocs s))
81 | show (ParseError err) =
82 | T.unpack $ ppePrint err
83 | show (TypeCheckingError err) = T.unpack $ ppePrint err
84 | show (EvaluationError err) = T.unpack err
85 |
86 | -- Takes a file and returns it's result from evaluation
87 | evaluatePedantFile :: String -> IO EvaluationResult
88 | evaluatePedantFile fileName = do
89 | resolveResult <- Resolver.resolveIO fileName
90 | contents <- T.readFile fileName
91 | -- After the file has been read and resolved
92 | case resolveResult of
93 | Right modules -> do
94 | -- Typecheck modules
95 | case typeCheck emptyTypeCheckState modules of
96 | (Nothing, valid) -> do
97 | let (TypeEnv env) = tcsEnv valid
98 | let program = List.reverse $ map (Bifunctor.second TypeCheck.variableInfoExecutionExpression) $ OMap.assocs env
99 | in -- Now execute program
100 | case executeProgram OMap.empty program of
101 | Right result ->
102 | let valueTypeMap = OMap.intersectionWith (\ _ (TypeCheck.VariableInfo scheme _ _) value -> (value, scheme)) env result
103 | in return (EvaluationSuccess valueTypeMap)
104 | Left err ->
105 | return (EvaluationError err)
106 | (Just err, _) -> do
107 | let diag = makeErrorBundle err fileName contents
108 | return (TypeCheckingError diag)
109 | Left b -> return (ParseError $ NonEmpty.head b)
110 |
111 | executeProgram :: OMap.OMap VariableName NumericValue -> [(VariableName, ExecutionExpression)] -> Either T.Text (OMap.OMap VariableName NumericValue)
112 | executeProgram values ((name, expr) : rest) =
113 | let value = evaluateExpression values expr
114 | in case value of
115 | Left err -> Left err
116 | Right result -> executeProgram ((name, result) |< values) rest
117 | executeProgram values [] = Right values
118 |
119 | evaluateExpression :: OMap.OMap VariableName NumericValue -> ExecutionExpression -> Either T.Text NumericValue
120 | evaluateExpression variables expression =
121 | case expression of
122 | EBinOp "" fExp parExp -> do
123 | func <- evaluateExpression variables fExp
124 | case func of
125 | FuncValue arg expr -> evaluateExpression variables (bindVariable arg parExp expr)
126 | InternalFunctionValue (InternalFunction f) -> f <$> evaluateExpression variables parExp
127 | _ -> Left $ "Cannot call constant " <> T.pack (show func)
128 | EBinOp op x y ->
129 | let matchingOperations = filter ((== op) . InBuilt.opName) InBuilt.inBuiltBinaryOperations
130 | in case matchingOperations of
131 | [] -> Left $ "No such binary operation " <> op
132 | opDetails : _ ->
133 | case InBuilt.opFunc opDetails of
134 | InBuilt.BinFunc f -> f <$> evaluateExpression variables x <*> evaluateExpression variables y
135 | _ -> Left $ "Binary function not found " <> op
136 | EAccess x name -> do
137 | evaluatedX <- evaluateExpression variables x
138 | case evaluatedX of
139 | DictValue entries ->
140 | case Map.lookup name entries of
141 | Just entry -> return entry
142 | _ -> Left $ "Cannot access " <> name
143 | _ -> Left $ "Cannot access " <> name <> " because not dictionary"
144 | EVariable name ->
145 | case OMap.lookup name variables of
146 | Just value -> return value
147 | Nothing ->
148 | let (VariableName _ newName) = name
149 | in Left $ "Could not find variable " <> newName
150 | EConstant (ExecutionValueNumber num) -> return (NumberValue num)
151 | EConstant (ExecutionValueFunc arg expr) -> return (FuncValue arg expr)
152 | EConstant (ExecutionValueDict entries) -> do
153 | evaluatedEntries <- forM (Map.toList entries) $ \(key, value) -> do
154 | evaluatedValue <- evaluateExpression variables value
155 | return (key, evaluatedValue)
156 | return (DictValue (Map.fromList evaluatedEntries))
157 | EConstant ExecutionValueEmptyList -> do
158 | return (ListValue [])
159 | EInternalFunc f -> return $ InternalFunctionValue f
160 | ENegate expr -> negate <$> evaluateExpression variables expr
161 |
162 | bindVariable :: T.Text -> ExecutionExpression -> ExecutionExpression -> ExecutionExpression
163 | bindVariable name r (EVariable (VariableName moduleName n))
164 | | name == n = r
165 | | otherwise = EVariable (VariableName moduleName n)
166 | bindVariable name r (EBinOp op e1 e2) = EBinOp op (bindVariable name r e1) (bindVariable name r e2)
167 | bindVariable name r (EAccess e1 x) = EAccess (bindVariable name r e1) x
168 | bindVariable _ _ (EConstant v) = EConstant v
169 | bindVariable name r (ENegate e1) = ENegate (bindVariable name r e1)
170 | bindVariable _ _ (EInternalFunc e1) = EInternalFunc e1
171 |
--------------------------------------------------------------------------------
/src/Pedant/FileResolver.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 | {-# LANGUAGE FlexibleInstances #-}
3 |
4 | module Pedant.FileResolver (resolve, resolveIO, Module (..), ModuleResolvingMonad(..)) where
5 |
6 | import qualified Control.Exception as Exception
7 | import Control.Monad (forM_)
8 | import qualified Control.Monad.Except as Except
9 | import qualified Control.Monad.IO.Class as IO
10 | import qualified Control.Monad.State as State
11 | import qualified Control.Monad.Reader as Reader
12 | import Data.List.NonEmpty (NonEmpty (..))
13 | import qualified Data.Map as Map
14 | import qualified Data.Maybe as Maybe
15 | import qualified Data.Text as T
16 | import qualified Data.Text.IO as T
17 | import qualified Pedant.Parser as Parser
18 | import Pedant.Types (PedantParseError (..))
19 | import qualified System.FilePath as FilePath
20 | import Control.Exception (IOException)
21 |
22 | -- | A module. A collection of files before it is run through the type checker
23 | data Module = Module
24 | { moduleName :: T.Text,
25 | moduleStatements :: [Parser.Statement]
26 | }
27 | deriving (Show)
28 |
29 | data ResolverState = ResolverState
30 | { moduleCache :: Map.Map T.Text Module,
31 | moduleStack :: [Module],
32 | moduleReturn :: [Module]
33 | }
34 |
35 |
36 | class Monad m => ModuleResolvingMonad m where
37 | readModule :: String -> m (Maybe T.Text)
38 |
39 | rightToMaybe :: Either a b -> Maybe b
40 | rightToMaybe (Right b) = Just b
41 | rightToMaybe (Left _) = Nothing
42 |
43 | instance ModuleResolvingMonad (Reader.ReaderT String IO) where
44 | readModule fileName = do
45 | rootDirectory <- Reader.ask
46 | rightToMaybe <$> IO.liftIO (Exception.try (T.readFile (rootDirectory FilePath.> moduleNameToPath fileName)) :: IO (Either IOException T.Text))
47 |
48 | moduleNameToPath :: String -> String
49 | moduleNameToPath path = map repl path ++ ".ped"
50 | where
51 | repl :: Char -> Char
52 | repl '.' = FilePath.pathSeparator
53 | repl x = x
54 |
55 | emptyResolverState :: ResolverState
56 | emptyResolverState = ResolverState Map.empty [] []
57 |
58 | type ResolverMonad m a = Except.ExceptT (NonEmpty PedantParseError) (State.StateT ResolverState m) a
59 |
60 | resolveIO :: String -> IO (Either (NonEmpty PedantParseError) [Module])
61 | resolveIO startFileName =
62 | let rootDirectory = FilePath.dropFileName startFileName
63 | startingModule = FilePath.takeBaseName startFileName
64 | in
65 | Reader.runReaderT (resolve startingModule) rootDirectory
66 |
67 | resolve :: ModuleResolvingMonad m => String -> m (Either (NonEmpty PedantParseError) [Module])
68 | resolve startingModule =
69 | fst <$> State.runStateT (Except.runExceptT (resolve' startingModule)) emptyResolverState
70 |
71 | -- | Finds the list of elements that come at and after the one specified
72 | --
73 | -- >>> tailOn 3 [1, 2, 3, 4, 5]
74 | -- [3,4,5]
75 | --
76 | -- >>> tailOn 7 [1, 2, 3, 4, 5]
77 | -- []
78 | --
79 | -- >>> tailOn 1 [1, 2, 3, 4, 5]
80 | -- [1,2,3,4,5]
81 | tailOn :: Eq a => a -> [a] -> [a]
82 | tailOn _ [] = []
83 | tailOn x (h : rest)
84 | | x == h = h : rest
85 | | otherwise = tailOn x rest
86 |
87 | errorReadingModule :: T.Text -> PedantParseError
88 | errorReadingModule m =
89 | let errorMessage =
90 | T.concat
91 | [ "Could not read from module ",
92 | m
93 | ]
94 | in PedantParseError errorMessage 1 1 2 1 errorMessage
95 |
96 |
97 | resolve' :: ModuleResolvingMonad m => String -> ResolverMonad m [Module]
98 | resolve' startingModule = do
99 | readResult <- (Reader.lift . Reader.lift . readModule) startingModule
100 | case readResult of
101 | Nothing -> Except.throwError $ errorReadingModule (T.pack startingModule) :| []
102 | Just contents ->
103 | case Parser.parseProgram startingModule contents of
104 | Left err -> Except.throwError err
105 | Right statements -> do
106 | let newModule = Module (T.pack startingModule) statements
107 | importedModules = Maybe.mapMaybe getImportName statements
108 | State.modify
109 | ( \s ->
110 | ResolverState
111 | { moduleStack = newModule : moduleStack s,
112 | moduleCache = Map.insert (T.pack startingModule) newModule (moduleCache s),
113 | moduleReturn = newModule : moduleStack s
114 | }
115 | )
116 | forM_ importedModules $ \m -> do
117 | stack <- State.gets moduleStack
118 | let parentModules = map moduleName stack
119 | if m `elem` parentModules
120 | then
121 | let errorMessage =
122 | T.concat
123 | [ "Module ",
124 | T.pack startingModule,
125 | " can not import module ",
126 | m,
127 | " as doing so would create a cycle, through ",
128 | T.intercalate " -> " (tailOn m parentModules)
129 | ]
130 | in Except.throwError $ PedantParseError errorMessage 1 1 2 1 errorMessage :| []
131 | else do
132 | cache <- State.gets moduleCache
133 | case Map.lookup m cache of
134 | Just _ ->
135 | -- Already loaded this module, skip
136 | pure ()
137 | Nothing ->
138 | -- We haven't loaded this module yet, resolve it
139 | Reader.void (resolve' (T.unpack m))
140 | State.modify
141 | ( \s ->
142 | s
143 | { moduleStack = tail (moduleStack s)
144 | }
145 | )
146 | State.gets moduleReturn
147 |
148 | getImportName :: Parser.Statement -> Maybe T.Text
149 | getImportName (Parser.ImportStatement (Parser.Positioned _ name) _) = Just name
150 | getImportName _ = Nothing
151 |
--------------------------------------------------------------------------------
/src/Pedant/InBuilt.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 |
3 | module Pedant.InBuilt
4 | ( inBuiltBinaryOperations,
5 | inBuiltPrefixOperations,
6 | Operation (..),
7 | Function (..),
8 | OpFunc (..),
9 | inBuiltFunctions,
10 | )
11 | where
12 |
13 | import qualified Data.Map as Map
14 | import qualified Data.Text as T
15 | import Pedant.Types
16 | (
17 | InternalFunction (..),
18 | NumericValue (..),
19 | )
20 | import qualified Pedant.TypeCheck.Types as TC
21 |
22 | data Operation = Operation
23 | { opName :: T.Text,
24 | opType :: TC.TypeInferenceMonad TC.Type,
25 | opPrecedence :: Int,
26 | opFunc :: OpFunc
27 | }
28 |
29 | data OpFunc
30 | = BinFunc (NumericValue -> NumericValue -> NumericValue)
31 | | UnaryFunc (NumericValue -> NumericValue)
32 |
33 | withPolyNormalDimension :: (TC.NormalDimension -> TC.Type) -> TC.TypeInferenceMonad TC.Type
34 | withPolyNormalDimension f = f <$> TC.newPolyNormDim
35 |
36 | inBuiltBinaryOperations :: [Operation]
37 | inBuiltBinaryOperations =
38 | [ Operation
39 | "+"
40 | (do
41 | var <- TC.newPolyNormDim
42 | let myType = TC.NumberType $ TC.NormDim var
43 | return $ myType `TC.FuncType` myType `TC.FuncType` myType
44 | )
45 | 4
46 | (BinFunc (+)),
47 | Operation
48 | "-"
49 | (do
50 | var <- TC.newPolyNormDim
51 | let myType = TC.NumberType $ TC.NormDim var
52 | return $ myType `TC.FuncType` myType `TC.FuncType` myType
53 | )
54 | 4
55 | (BinFunc (-)),
56 | Operation
57 | "*"
58 | (do
59 | var1 <- TC.newPolyNormDim
60 | var2 <- TC.newPolyNormDim
61 | return $ TC.NumberType (TC.NormDim var1) `TC.FuncType` TC.NumberType (TC.NormDim var2) `TC.FuncType` (TC.NumberType . TC.NormDim $ TC.multiplyNormalDimensions var1 var2)
62 | )
63 | 3
64 | (BinFunc (*)),
65 | Operation
66 | "/"
67 | (Scheme ["a", "b", "t"] $ normalDimPoly "a" `FuncType` (normalDimPoly "b" `FuncType` normalDim [(PolyDim "a", 1), (PolyDim "b", -1)]))
68 | 3
69 | (BinFunc (/)),
70 | Operation
71 | "^"
72 | (Scheme ["a", "b", "t"] $ powerDimPoly "a" `FuncType` (normalDimPoly "b" `FuncType` powerDim [(PolyDim "a", 1), (PolyDim "b", 1)]))
73 | 1
74 | (BinFunc (**)),
75 | Operation
76 | ":"
77 | (Scheme ["a"] $ PolyType "a" `FuncType` (ListType (PolyType "a") `FuncType` ListType (PolyType "a")))
78 | 4
79 | (BinFunc append),
80 | Operation "" (Scheme ["a", "b", "t"] $ ((PolyType "a" `FuncType` PolyType "b") `FuncType` PolyType "a") `FuncType` PolyType "b") 0 (BinFunc (const id))
81 | ]
82 |
83 | append :: NumericValue -> NumericValue -> NumericValue
84 | append a (ListValue b) = ListValue (a : b)
85 | append _ b = b
86 |
87 | inBuiltPrefixOperations :: [Operation]
88 | inBuiltPrefixOperations = [Operation "-" (Scheme ["a", "t"] $ normalDimPoly "a" `FuncType` normalDimPoly "a") 2 (UnaryFunc negate)]
89 |
90 | data Function = Function
91 | { funcName :: T.Text,
92 | funcType :: TC.TypeInferenceMonad TC.Scheme,
93 | funcDef :: InternalFunction
94 | }
95 |
96 | inBuiltFunctions :: [Function]
97 | inBuiltFunctions = [Function "ln" (Scheme ["t", "a"] $ powerDimPoly "a" `FuncType` normalDimPoly "a") (InternalFunction log)]
98 |
99 | normalDimPoly :: T.Text -> TC.Type
100 | normalDimPoly name = BaseDim $ NormDim $ Map.singleton (PolyDim name) 1
101 |
102 | powerDimPoly :: T.Text -> TC.Type
103 | powerDimPoly name = BaseDim . PowDim $ Map.singleton (PolyDim name) 1
104 |
105 | normalDim :: [(TC.PrimitiveDim, Int)] -> TC.Type
106 | normalDim powers = BaseDim . NormDim $ Map.fromList powers
107 |
108 | powerDim :: [(TC.PrimitiveDim, Int)] -> TC.Type
109 | powerDim powers = BaseDim . PowDim $ Map.fromList powers
110 |
--------------------------------------------------------------------------------
/src/Pedant/LSP.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE ExplicitNamespaces #-}
2 | {-# LANGUAGE FlexibleContexts #-}
3 | {-# LANGUAGE FlexibleInstances #-}
4 | {-# LANGUAGE GeneralisedNewtypeDeriving #-}
5 | {-# LANGUAGE ImportQualifiedPost #-}
6 | {-# LANGUAGE OverloadedStrings #-}
7 |
8 | module Pedant.LSP (runLSP) where
9 |
10 | import Control.Lens hiding (Iso)
11 | import Control.Monad (void)
12 | import Control.Monad.IO.Class (MonadIO (liftIO))
13 | import Data.Map qualified as Map
14 | import Data.Map.Ordered qualified as OMap
15 | import qualified Control.Monad.Reader as Reader
16 | import Data.Text qualified as T
17 | import Language.LSP.Diagnostics (partitionBySource)
18 | import Language.LSP.Server qualified as LSP
19 | import Language.LSP.Types qualified as LSP
20 | import Language.LSP.Types.Lens qualified as LSPL
21 | import Language.LSP.VFS qualified as LSP
22 | import Pedant.FileResolver qualified as Resolve
23 | import Pedant.Parser qualified as Parser
24 | import Pedant.TypeCheck qualified as TypeCheck
25 | import Pedant.Types qualified as Types
26 | import System.FilePath qualified as FilePath
27 | import System.Log.Logger (debugM)
28 | import Data.List.NonEmpty qualified as NonEmpty
29 |
30 | handlers :: LSP.Handlers (LSP.LspM ())
31 | handlers =
32 | mconcat
33 | [ LSP.notificationHandler LSP.SInitialized $ \_msg -> do
34 | liftIO $ debugM "reactor.handle" "Processing the Initialized notification"
35 |
36 | -- We're initialized! Lets send a showMessageRequest now
37 | LSP.sendNotification LSP.SWindowShowMessage (LSP.ShowMessageParams LSP.MtInfo "Initializing")
38 |
39 | -- We can dynamically register a capability once the user accepts it
40 | LSP.sendNotification LSP.SWindowShowMessage (LSP.ShowMessageParams LSP.MtInfo "Turning on code lenses dynamically")
41 |
42 | let regOpts = LSP.CodeLensRegistrationOptions Nothing Nothing (Just False)
43 |
44 | -- Register c+ode lens ability to read types
45 | void $
46 | LSP.registerCapability LSP.STextDocumentCodeLens regOpts $ \_req responder -> do
47 | liftIO $ debugM "reactor.handle" "Processing a textDocument/codeLens request"
48 | let doc = _req ^. LSPL.params . LSPL.textDocument . LSPL.uri
49 | filename = LSP.uriToFilePath doc
50 | case filename of
51 | Just f -> do
52 | types <- Reader.runReaderT (unwrapResolver $ getTypes f) (FilePath.dropFileName f)
53 | let rsp =
54 | LSP.List
55 | (map (\(line, message) -> LSP.CodeLens (LSP.mkRange (fromInteger (toInteger line)) 0 0 100) (Just (LSP.Command message "lsp-type" Nothing)) Nothing) (Map.toList types))
56 | responder (Right rsp)
57 | Nothing ->
58 | responder (Right $ LSP.List []),
59 | LSP.notificationHandler LSP.STextDocumentDidSave checkForErrors,
60 | LSP.notificationHandler LSP.STextDocumentDidChange checkForErrors,
61 | LSP.notificationHandler LSP.SWorkspaceDidChangeWatchedFiles (\_ -> pure ()),
62 | LSP.notificationHandler LSP.STextDocumentDidOpen checkForErrors
63 | ]
64 |
65 | getUri :: (LSPL.HasParams s a1, LSPL.HasTextDocument a1 a2, LSPL.HasUri a2 a3) => s -> a3
66 | getUri msg = msg ^. LSPL.params . LSPL.textDocument . LSPL.uri
67 |
68 | newtype LspVFSResolver c m a = LspVFSResolver {unwrapResolver :: Reader.ReaderT String (LSP.LspT c m) a} deriving (Functor, Applicative, Monad)
69 |
70 | instance Resolve.ModuleResolvingMonad (LspVFSResolver config IO) where
71 | readModule moduleName = LspVFSResolver $ do
72 | rootDirectory <- Reader.ask
73 | virtualFileContents <- LSP.getVirtualFile $ LSP.toNormalizedUri $ LSP.filePathToUri (rootDirectory FilePath.> moduleName FilePath.<.> "ped")
74 | case virtualFileContents of
75 | Just vf -> return . Just $ LSP.virtualFileText vf
76 | Nothing -> return Nothing
77 |
78 | checkForErrors :: (LSPL.HasParams s a1, LSPL.HasTextDocument a1 a2, LSPL.HasUri a2 LSP.Uri) => s -> LSP.LspT () IO ()
79 | checkForErrors msg = do
80 | let uri = getUri msg
81 | fileName = LSP.uriToFilePath uri
82 | LSP.sendNotification LSP.SWindowShowMessage (LSP.ShowMessageParams LSP.MtInfo "file changed")
83 | liftIO $ debugM "reactor.handle" $ "Processing DidSaveTextDocument for: " ++ show fileName
84 | case LSP.uriToFilePath uri of
85 | Just fp -> do
86 | diagnostics <- Reader.runReaderT (unwrapResolver $ getErrors (FilePath.takeBaseName fp)) (FilePath.dropFileName fp)
87 | LSP.publishDiagnostics 100 (LSP.toNormalizedUri uri) (Just 0) (partitionBySource diagnostics)
88 | Nothing ->
89 | liftIO $ debugM "reactor.handle" $ "Processing DidSaveTextDocument for: " ++ show fileName
90 |
91 | runLSP :: IO Int
92 | runLSP =
93 | LSP.runServer $
94 | LSP.ServerDefinition
95 | { LSP.onConfigurationChange = const $ const $ Right (),
96 | LSP.defaultConfig = (),
97 | LSP.doInitialize = \env _req -> pure $ Right env,
98 | LSP.staticHandlers = handlers,
99 | LSP.interpretHandler = \env -> LSP.Iso (LSP.runLspT env) liftIO,
100 | LSP.options = lspOptions
101 | }
102 |
103 | syncOptions :: LSP.TextDocumentSyncOptions
104 | syncOptions =
105 | LSP.TextDocumentSyncOptions
106 | (Just True)
107 | (Just LSP.TdSyncIncremental)
108 | (Just False)
109 | (Just False)
110 | (Just $ LSP.InR $ LSP.SaveOptions $ Just False)
111 |
112 | lspOptions :: LSP.Options
113 | lspOptions =
114 | LSP.defaultOptions
115 | { LSP.textDocumentSync = Just syncOptions,
116 | LSP.executeCommandCommands = Just ["lsp-ped-command"]
117 | }
118 |
119 | generalError :: T.Text -> LSP.Diagnostic
120 | generalError message =
121 | LSP.Diagnostic
122 | (LSP.Range (LSP.Position 0 0) (LSP.Position 2 0))
123 | (Just LSP.DsError)
124 | Nothing -- code
125 | (Just "lsp-ped") -- source
126 | message
127 | Nothing -- tags
128 | (Just (LSP.List []))
129 |
130 | -- Gets parse and type errors and provides them as LSP diagnostics
131 | getErrors :: Resolve.ModuleResolvingMonad m => String -> m [LSP.Diagnostic]
132 | getErrors moduleName = do
133 | resolveResult <- Resolve.resolve moduleName
134 | contents <- Resolve.readModule moduleName
135 | case resolveResult of
136 | Right modules ->
137 | case TypeCheck.typeCheck TypeCheck.emptyTypeCheckState modules of
138 | (Nothing, _) ->
139 | return []
140 | (Just err, _) ->
141 | case contents of
142 | Just c -> do
143 | let diag = Parser.makeErrorBundle err moduleName c
144 | return [parseErrorToDiagnostic diag]
145 | Nothing ->
146 | return [generalError "Error, Could not read file" ]
147 | Left err ->
148 | return (map parseErrorToDiagnostic (NonEmpty.toList err))
149 |
150 | -- in this file. It does this by creating a map of line numbers to error descriptions
151 | getTypes :: Resolve.ModuleResolvingMonad m => String -> m (Map.Map Int T.Text)
152 | getTypes name = do
153 | resolveResult <- Resolve.resolve name
154 | case resolveResult of
155 | Right modules ->
156 | case TypeCheck.typeCheck TypeCheck.emptyTypeCheckState modules of
157 | (_, state) ->
158 | let entries =
159 | map
160 | (\(_, TypeCheck.VariableInfo scheme _ (Parser.Assignment _ _ expression)) -> (Parser.pdOffset . Parser.positionedData $ expression, Types.pPrint scheme))
161 | (OMap.toAscList (TypeCheck.teVarMap $ TypeCheck.tcsEnv state))
162 | in return (Map.fromList entries)
163 | Left _ ->
164 | return Map.empty
165 |
166 | toUInt :: Int -> LSP.UInt
167 | toUInt = fromInteger . toInteger
168 |
169 | parseErrorToDiagnostic :: Parser.PedantParseError -> LSP.Diagnostic
170 | parseErrorToDiagnostic err =
171 | LSP.Diagnostic
172 | (LSP.Range (LSP.Position (toUInt $ Types.ppeRow err) (toUInt $ Types.ppeColumn err)) (LSP.Position (toUInt $ Types.ppeEndRow err) (toUInt $ Types.ppeEndColumn err)))
173 | (Just LSP.DsError)
174 | Nothing -- code
175 | (Just "lsp-ped") -- source
176 | (Types.ppeErrString err)
177 | Nothing -- tags
178 | (Just (LSP.List []))
--------------------------------------------------------------------------------
/src/Pedant/Parser.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 | {-# LANGUAGE TypeFamilies #-}
3 |
4 | -- | The parser for pedant. Creates a syntax tree from the file
5 | module Pedant.Parser
6 | ( parseProgram,
7 | PedantParseError (..),
8 | Positioned (..),
9 | Operation (..),
10 | DimensionPart (..),
11 | Dimension (..),
12 | PositionData (..),
13 | Expression (..),
14 | Assignment (..),
15 | Statement (..),
16 | makeErrorBundle,
17 | )
18 | where
19 |
20 | import qualified Control.Monad.Combinators.Expr as Expr
21 | import qualified Data.Bifunctor as Bifunctor
22 | import qualified Data.List as List
23 | import Data.List.NonEmpty (NonEmpty (..))
24 | import qualified Data.Set as Set
25 | import Data.Text (Text)
26 | import qualified Data.Text as T
27 | import Data.Void (Void)
28 | import qualified Pedant.InBuilt as InBuilt
29 | import qualified Pedant.Types as Types
30 | import Text.Megaparsec
31 | import Text.Megaparsec.Char
32 | import qualified Text.Megaparsec.Char.Lexer as L
33 | import Pedant.Types (VariableName)
34 | import Data.Char (GeneralCategory(LetterNumber))
35 |
36 | -- | We now define a parser for this typed language
37 |
38 | type Parser = Parsec Void Text
39 |
40 | sc :: Parser ()
41 | sc =
42 | L.space
43 | hspace1
44 | (L.skipLineComment "//")
45 | (L.skipBlockComment "/*" "*/")
46 |
47 | scn :: Parser ()
48 | scn =
49 | L.space
50 | space1
51 | (L.skipLineComment "//")
52 | (L.skipBlockComment "/*" "*/")
53 |
54 | lexeme :: Parser a -> Parser a
55 | lexeme = L.lexeme sc
56 |
57 | symbol :: Text -> Parser Text
58 | symbol = L.symbol sc
59 |
60 | -- Parentheses, brackets, and braces
61 | parens :: Parser a -> Parser a
62 | parens = between (symbol "(") (symbol ")")
63 |
64 | braces :: Parser a -> Parser a
65 | braces = between (symbol "{") (symbol "}")
66 |
67 | brackets :: Parser a -> Parser a
68 | brackets = between (symbol "[") (symbol "]")
69 |
70 | -- Comma-separated list of expressions
71 | commaSepExpr :: Parser () -> Parser [Positioned Expression]
72 | commaSepExpr sc' = lexeme (pExpr sc' `sepBy` symbol ",")
73 |
74 | pVariable :: Parser (Positioned Expression)
75 | pVariable = position (Variable . VariableName <$> identifier)
76 |
77 | number :: RealFloat a => Parser a
78 | number = lexeme (try L.float <|> L.decimal)
79 |
80 | pTypedNumber :: Parser () -> Parser Expression
81 | pTypedNumber sc' = Number <$> number <*> parseDimension sc'
82 |
83 | -- Lists are parsed as an empty list with all elements concatenated into them
84 | pTypedList :: Parser () -> Parser Expression
85 | pTypedList sc' = List <$> brackets (commaSepExpr sc')
86 |
87 | pTypedRecord :: Parser () -> Parser Expression
88 | pTypedRecord sc' = Record . Map.fromList <$> braces (pair `sepBy` symbol ",")
89 | where
90 | pair :: Parser (RecordKey, Positioned Expression)
91 | pair = (,) <$> (RecordKey <$> identifier )<* symbol "=" <*> pExpr sc'
92 |
93 | parseDimension :: Parser () -> Parser Dimension
94 | parseDimension sc' = (PowParseDim <$> try (char '^' *> parseNextDim [])) <|> (NormalParseDim <$> pLoop [])
95 | where
96 | pLoop :: [Positioned DimensionPart] -> Parser [Positioned DimensionPart]
97 | pLoop p = parseNextDim p <|> return p
98 |
99 | parseNextDim :: [Positioned DimensionPart] -> Parser [Positioned DimensionPart]
100 | parseNextDim oldDim = do
101 | dim <- L.lexeme sc' (position pSingleDim)
102 | pLoop (dim : oldDim)
103 |
104 | pSingleDim :: Parser DimensionPart
105 | pSingleDim = do
106 | name <- (:) <$> letterChar <*> many letterChar
107 | number <- fromInteger <$> L.signed (pure ()) L.decimal <|> return 1
108 | return (DimensionPart (T.pack name) number)
109 |
110 |
111 | identifier :: Parser T.Text
112 | identifier = lexeme $ T.pack <$> ((:) <$> letterChar <*> many (alphaNumChar <|> char '_') > "name")
113 |
114 | pTerm :: Parser () -> Parser (Positioned Expression)
115 | pTerm sc' =
116 | let mainExpression =
117 | choice
118 | [ parens (pExpr sc'),
119 | L.lexeme sc' pVariable,
120 | position (pTypedList sc'),
121 | position (pTypedRecord sc'),
122 | position (pTypedNumber sc')
123 | ]
124 | in do
125 | expression <- mainExpression
126 | L.lexeme sc' (accessor expression <|> return expression)
127 | where
128 | accessor :: Positioned Expression -> Parser (Positioned Expression)
129 | accessor expr = do
130 | _ <- char '.'
131 | L.lexeme sc' $ position (Access expr <$> (AccessKey <$> identifier))
132 |
133 | -- | Runs full programs, such as
134 | -- >>> :set -XOverloadedStrings
135 | -- >>> parse program "" "x = 2\n"
136 | program :: Parser [Statement]
137 | program = manyTill (pStatement <* scn) eof
138 |
139 | -- | Runs full programs, such as
140 | -- >>> :set -XOverloadedStrings
141 | -- >>> parse pStatement "" "x = 2\n"
142 | -- Right (AssignmentStatement (Assignment {assignmentName = "x", assignmentArguments = [], assignmentExpression = Positioned {positionedData = PositionData {pdOffset = 6, pdLength = 1}, positionedValue = PConstant (ParseNumber 2.0 (NormalParseDim []))}}))
143 | pStatement :: Parser Statement
144 | pStatement = L.lineFold scn $ \sc' ->
145 | let new_space_consumer = try sc' <|> sc
146 | in choice
147 | [ pImport new_space_consumer,
148 | pUnit new_space_consumer,
149 | AssignmentStatement <$> pAssignment new_space_consumer
150 | ]
151 |
152 | -- | Import statements. They come in the form of:
153 | -- import [module]([name1, name2, ...])
154 | -- >>> :set -XOverloadedStrings
155 | -- >>> parse (pImport sc) "" "import moduleName(name1, name2)"
156 | -- Right (ImportStatement (Positioned (PositionData 7 10) "moduleName") [Positioned (PositionData 18 5) "name1",Positioned (PositionData 25 5) "name2"])
157 | --
158 | -- Empty imports are invalid:
159 | -- >>> parse (pImport sc) "" "import moduleName()"
160 | -- Left (ParseErrorBundle {bundleErrors = TrivialError 18 (Just (Tokens (')' :| ""))) (fromList [Label ('n' :| "ame")]) :| [], bundlePosState = PosState {pstateInput = "import moduleName()\n", pstateOffset = 0, pstateSourcePos = SourcePos {sourceName = "", sourceLine = Pos 1, sourceColumn = Pos 1}, pstateTabWidth = Pos 8, pstateLinePrefix = ""}})
161 | pImport :: Parser () -> Parser Statement
162 | pImport sc' = do
163 | _ <- try (L.lexeme sc' (symbol "import"))
164 | fileName <- position identifier
165 | _ <- L.lexeme sc' (symbol "(")
166 | ImportStatement fileName <$> pImportList
167 | where
168 | pImportList :: Parser [Positioned T.Text]
169 | pImportList = do
170 | importName <- position identifier
171 | choice
172 | [ do
173 | _ <- L.lexeme sc' (symbol ",")
174 | (importName :) <$> pImportList,
175 | do
176 | _ <- L.lexeme sc' (symbol ")")
177 | return [importName]
178 | ]
179 |
180 | pUnit :: Parser () -> Parser Statement
181 | pUnit sc' = do
182 | _ <- L.lexeme sc' (symbol "unit")
183 | UnitStatement <$> (position identifier `sepBy1` sc')
184 |
185 | -- | Parses an assignment:
186 | -- >>> :set -XOverloadedStrings
187 | -- >>> parse (pAssignment sc) "" "x = 2"
188 | pAssignment :: Parser () -> Parser Assignment
189 | pAssignment sc' = Assignment <$> identifier <*> many identifier <* symbol "=" <*> pExpr sc'
190 |
191 | pExpr :: Parser () -> Parser (Positioned Expression)
192 | pExpr sc' =
193 | Expr.makeExprParser (pTerm sc') (operatorTable sc')
194 |
195 | operatorTable :: Parser () -> [[Expr.Operator Parser (Positioned Expression)]]
196 | operatorTable sc' =
197 | let prefixOps = map (\op -> (InBuilt.opPrecedence op, prefix sc' (InBuilt.opName op) (Prefix $ PrefixOperation (InBuilt.opName op)))) InBuilt.inBuiltPrefixOperations
198 | binaryOps = map (\op -> (InBuilt.opPrecedence op, binary sc' (InBuilt.opName op) (BinOp $ BinaryOperation (InBuilt.opName op)))) InBuilt.inBuiltBinaryOperations
199 | combinedOps = prefixOps ++ binaryOps
200 | sortedOps = List.groupBy (\a b -> fst a == fst b) (List.sortOn fst combinedOps)
201 | in map (map snd) sortedOps
202 |
203 | position :: Parser a -> Parser (Positioned a)
204 | position parser = do
205 | offset <- getOffset
206 | x <- parser
207 | newOffset <- getOffset
208 | return (Positioned (PositionData offset (newOffset - offset)) x)
209 |
210 | binary :: Parser () -> Text -> (Positioned Expression -> Positioned Expression -> Expression) -> Expr.Operator Parser (Positioned Expression)
211 | binary sc' name f =
212 | Expr.InfixL
213 | ( do
214 | _ <- L.symbol sc' name
215 | return (combinePositions f)
216 | )
217 |
218 | combinePositions :: (Positioned a -> Positioned b -> c) -> Positioned a -> Positioned b -> Positioned c
219 | combinePositions f a@(Positioned (PositionData startOffset _) _) b@(Positioned (PositionData start2 l) _) =
220 | Positioned (PositionData startOffset ((start2 + l) - startOffset)) (f a b)
221 |
222 | prefix :: Parser () -> Text -> (Positioned Expression -> Expression) -> Expr.Operator Parser (Positioned Expression)
223 | prefix sc' name f =
224 | Expr.Prefix
225 | ( do
226 | offset <- getOffset
227 | _ <- L.symbol sc' name
228 | return (\x@(Positioned (PositionData childOffset l) _) -> Positioned (PositionData offset ((childOffset + l) - offset)) (f x))
229 | )
230 |
231 | errorBundleToPedantError :: ShowErrorComponent b => ParseErrorBundle Text b -> NonEmpty PedantParseError
232 | errorBundleToPedantError bundle =
233 | let (SourcePos _ column row) = pstateSourcePos (bundlePosState bundle)
234 | in fmap
235 | ( \err ->
236 | PedantParseError
237 | (T.pack $ parseErrorTextPretty err)
238 | (unPos column - 1)
239 | (unPos row - 1)
240 | (unPos row - 1)
241 | (unPos column)
242 | (T.pack $ errorBundlePretty bundle)
243 | )
244 | (bundleErrors bundle)
245 |
246 | parseProgram :: String -> T.Text -> Either (NonEmpty PedantParseError) [Statement]
247 | parseProgram name contents = do
248 | Bifunctor.first errorBundleToPedantError $ parse program name (T.append contents "\n")
249 |
250 | -- | Makes a bundle of errors based on a file name contents and positions.
251 | makeErrorBundle :: (ShowErrorComponent a, a ~ Positioned b) => a -> String -> T.Text -> PedantParseError
252 | makeErrorBundle err@(Positioned (PositionData offset l) _) filename contents =
253 | let initialPosState =
254 | PosState
255 | { pstateInput = contents,
256 | pstateOffset = 0,
257 | pstateSourcePos = initialPos filename,
258 | pstateTabWidth = defaultTabWidth,
259 | pstateLinePrefix = ""
260 | }
261 | ([(_, sourcePos), (_, endSourcePos)], _) = attachSourcePos id [offset, offset + l] initialPosState
262 | newPosState = initialPosState {pstateInput = contents, pstateOffset = 0}
263 | errorBundle = ParseErrorBundle (FancyError offset (Set.singleton (ErrorCustom err)) :| []) newPosState
264 | in PedantParseError
265 | { ppeErrString = T.pack $ showErrorComponent err,
266 | ppeColumn = unPos (sourceColumn sourcePos) - 1,
267 | ppeRow = unPos (sourceLine sourcePos) - 1,
268 | ppeEndColumn = unPos (sourceColumn endSourcePos) - 1,
269 | ppeEndRow = unPos (sourceLine endSourcePos) - 1,
270 | ppePrint = T.pack $ errorBundlePretty errorBundle
271 | }
272 |
--------------------------------------------------------------------------------
/src/Pedant/Parser/Types.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE InstanceSigs #-}
2 | {-# LANGUAGE OverloadedStrings #-}
3 | module Pedant.Parser.Types (Assignment(..), Statement(..), PositionData(..), Positioned(..), Expression(..)) where
4 |
5 | import qualified Data.Text as T
6 | import qualified Data.Map as Map
7 | import qualified Pedant.Types as Types
8 |
9 | data Assignment = Assignment
10 | { assignmentName :: T.Text,
11 | assignmentArguments :: [T.Text],
12 | assignmentExpression :: Positioned Expression
13 | }
14 | deriving (Show)
15 |
16 | data Statement
17 | = AssignmentStatement Assignment
18 | | UnitStatement [Positioned T.Text]
19 | | ImportStatement (Positioned T.Text) [Positioned T.Text]
20 | deriving (Show)
21 |
22 | data PositionData = PositionData {
23 | pdOffset :: Int,
24 | pdLength :: Int
25 | }
26 | deriving (Show, Eq, Ord)
27 |
28 | data Positioned a = Positioned {
29 | positionedData :: PositionData,
30 | positionedValue :: a
31 | } deriving (Show)
32 |
33 | instance Eq a => Eq (Positioned a) where
34 | (==) (Positioned a1 b1) (Positioned a2 b2) = a1 == a2 && b1 == b2
35 |
36 | instance Functor Positioned where
37 | fmap f (Positioned p x) = Positioned p (f x)
38 |
39 | data DimensionPart = DimensionPart
40 | { pdpName :: T.Text,
41 | pdpPower :: Int
42 | }
43 | deriving (Show, Eq)
44 |
45 | instance Types.PrettyPrint DimensionPart where
46 | pPrint (DimensionPart name power) =
47 | if power == 1
48 | then name
49 | else T.concat [name, T.pack (show power)]
50 |
51 | data Dimension
52 | = PowParseDim [Positioned DimensionPart]
53 | | NormalParseDim [Positioned DimensionPart]
54 | deriving (Show, Eq)
55 |
56 | instance Types.PrettyPrint Dimension where
57 | pPrint :: Dimension -> T.Text
58 | pPrint (NormalParseDim parts) = T.unwords (map Types.pPrint parts)
59 | pPrint (PowParseDim parts) = "^" <> T.unwords (map Types.pPrint parts)
60 |
61 |
62 | data Expression
63 | = BinOp Types.BinaryOperation (Positioned Expression) (Positioned Expression)
64 | | Variable Types.VariableName
65 | | Number Double Dimension
66 | | List [Positioned Expression]
67 | | Let Types.VariableName Expression Expression
68 | | Abs Types.VariableName Expression
69 | | Record (Map.Map Types.RecordKey (Positioned Expression))
70 | | Prefix Types.PrefixOperation (Positioned Expression)
71 | | Access (Positioned Expression) Types.AccessKey
72 | deriving (Show, Eq)
73 |
74 | instance Types.PrettyPrint a => Types.PrettyPrint (Positioned a) where
75 | pPrint (Positioned _ a) = Types.pPrint a
76 |
77 | instance Types.PrettyPrint Expression where
78 | pPrint (BinOp (Types.BinaryOperation op) e1 e2) = T.unwords [Types.pPrint e1, op, Types.pPrint e2]
79 | pPrint (Variable (Types.VariableName var)) = var
80 | pPrint (Prefix (Types.PrefixOperation op) e1) = T.concat [op, Types.pPrint e1]
81 | pPrint (Access e1 (Types.AccessKey att)) = T.concat [Types.pPrint e1, att]
82 | pPrint (Number num dim) = T.unwords [T.pack $ show num, Types.pPrint dim]
83 | pPrint (List list) = T.concat ["[", T.intercalate ", " (map Types.pPrint list), "]"]
84 | pPrint (Let (Types.VariableName name) value expr) = T.concat ["let ", name, " = ", Types.pPrint value, " in ", Types.pPrint expr]
85 | pPrint (Abs (Types.VariableName name) value) = T.concat ["\\", name, " -> ", Types.pPrint value]
86 | pPrint (Record op) =
87 | T.concat
88 | [ "{",
89 | T.intercalate ", " (map (\(Types.RecordKey key, value) -> T.concat [key, " = ", Types.pPrint value]) (Map.toAscList op)),
90 | "}"
91 | ]
92 |
93 | data Function = NaturalLogarithm
94 | deriving (Show)
--------------------------------------------------------------------------------
/src/Pedant/TypeCheck.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE FlexibleInstances #-}
2 | {-# LANGUAGE OverloadedStrings #-}
3 |
4 | -- | TypeChecker for Pedant.
5 | module Pedant.TypeCheck
6 | ( TypeEnv (..),
7 | typeCheck,
8 | TypeError (..),
9 | emptyTypeCheckState,
10 | TypeCheckState (..),
11 | VariableName (..),
12 | VariableInfo(..)
13 | )
14 | where
15 |
16 | import Control.Monad.Except
17 | import Control.Monad.State hiding (state)
18 | import qualified Data.Map as Map
19 | import qualified Data.Map.Ordered as OMap
20 | import qualified Data.Maybe as Maybe
21 | import qualified Data.Set as Set
22 | import qualified Data.Text as T
23 | import Debug.Trace (trace, traceShowId)
24 | import qualified Pedant.FileResolver as Resolver
25 | import qualified Pedant.InBuilt as InBuilt
26 | import qualified Pedant.Parser as Parser
27 | import Pedant.Types
28 | import qualified Text.Megaparsec as Megaparsec
29 |
30 |
31 | -- | The state of the type checker
32 | data TypeCheckState = TypeCheckState
33 | { -- | The environment of the checker. This contains references to all the variables and schemes of those variables currently declared.
34 | tcsEnv :: TypeEnv,
35 | -- | Substitutions, the current substitutions that are required for the expression to unify
36 | tcsSubs :: Substitution,
37 | -- | Units, the units currently declared
38 | tcsUnits :: Set.Set VariableName,
39 | -- | A list of the modules that have been checked
40 | tcsCheckedModules :: Set.Set T.Text,
41 | tcsCurrentModule :: T.Text
42 | }
43 |
44 | emptyTypeCheckState :: TypeCheckState
45 | emptyTypeCheckState = TypeCheckState (TypeEnv OMap.empty) nullSubst Set.empty Set.empty ""
46 |
47 | nullSubst :: Substitution
48 | nullSubst = Substitution Map.empty Map.empty
49 |
50 | subUnion :: Substitution -> Substitution -> Substitution
51 | subUnion a b =
52 | Substitution
53 | { subTypes = subTypes a `Map.union` subTypes b,
54 | subDimensions = subDimensions a `Map.union` subDimensions b
55 | }
56 |
57 | composeSubst :: Substitution -> Substitution -> Substitution
58 | composeSubst s1 s2 = appliedSubs `subUnion` s1
59 | where
60 | appliedSubs =
61 | Substitution
62 | { subTypes = Map.map (apply s1) (subTypes s2),
63 | subDimensions = Map.map (apply s1) (subDimensions s2)
64 | }
65 |
66 | data VariableInfo = VariableInfo {
67 | variableInfoScheme :: Scheme,
68 | variableInfoExecutionExpression :: ExecutionExpression,
69 | variableInfoParserStatement :: Parser.Assignment
70 | } deriving (Show)
71 |
72 | newtype TypeEnv = TypeEnv { teVarMap :: OMap.OMap VariableName VariableInfo }
73 | deriving (Show)
74 |
75 |
76 | instance Types TypeEnv where
77 | ftv (TypeEnv env) = ftv (map (variableInfoScheme . snd) $ OMap.assocs env)
78 | apply s (TypeEnv env) = TypeEnv (fmap (\vi -> vi { variableInfoScheme = apply s (variableInfoScheme vi) }) env)
79 |
80 | instance Types TypeCheckState where
81 | ftv state = ftv (tcsEnv state)
82 | apply s state = state {tcsEnv = apply s (tcsEnv state)}
83 |
84 |
85 | newtype TypeName = TypeName T.Text
86 |
87 | type TI a = ExceptT (Parser.Positioned TypeError) (State TIState) a
88 |
89 | runTI :: TI a -> (Either (Parser.Positioned TypeError) a, TIState)
90 | runTI t =
91 | runState (runExceptT t) initTIState
92 | where
93 | initTIState = TIState {tiSupply = 0}
94 |
95 | newTyVar :: TI Type
96 | newTyVar = do
97 | s <- get
98 | put s {tiSupply = tiSupply s + 1}
99 | return (PolyType ("a" <> T.pack (show (tiSupply s))))
100 |
101 | newTyDimension :: T.Text -> TI Dimension
102 | newTyDimension prefix =
103 | do
104 | s <- get
105 | put s {tiSupply = tiSupply s + 1}
106 | return (NormDim $ Map.singleton (PolyDim $ prefix <> T.pack (show (tiSupply s))) 1)
107 |
108 |
109 | liftUMtoTI :: Parser.PositionData -> ReasonForUnification -> UM a -> TI a
110 | liftUMtoTI p reason m = do
111 | initState <- get
112 | case runState (runExceptT m) initState of
113 | (Right result, state) -> do
114 | put state
115 | return result
116 | (Left err, _) -> throwError $ Parser.Positioned p $ UnificationError reason err
117 |
118 | wrapError :: Type -> Type -> UM Substitution -> UM Substitution
119 | wrapError t1 t2 child = child `catchError` addUnificationLayer
120 | where
121 | addUnificationLayer :: UnificationTrace -> UM a
122 | addUnificationLayer errStack = throwError ((t1, t2) : errStack)
123 |
124 | varBind :: T.Text -> Type -> UM Substitution
125 | varBind u t
126 | | t == PolyType u = return nullSubst
127 | | u `Set.member` ftv t =
128 | throwError
129 | [(PolyType u, t)]
130 | | otherwise = return (nullSubst {subTypes = Map.singleton u t})
131 |
132 | typeCheck :: TypeCheckState -> [Resolver.Module] -> (Maybe (Parser.Positioned TypeError), TypeCheckState)
133 | typeCheck tcState (currentModule : rest) =
134 | trace ("Current module: " <> T.unpack (Resolver.moduleName currentModule)) $
135 | case typeCheckFile tcState currentModule of
136 | (Just err, newState) -> (Just err, newState)
137 | (Nothing, newState) -> do
138 | typeCheck newState rest
139 | typeCheck tcState [] =
140 | (Nothing, tcState)
141 |
142 |
143 | importModule :: T.Text -> Parser.Positioned T.Text -> [Parser.Positioned T.Text] -> TypeCheckState -> TI TypeCheckState
144 | importModule moduleName (Parser.Positioned moduleNamePos importedModuleName) imports oldState =
145 | if importedModuleName `Set.member` tcsCheckedModules oldState
146 | then do
147 | let foldImports = foldM $ \currState (Parser.Positioned importNamePos importName) -> do
148 | let (TypeEnv env) = tcsEnv currState
149 | case OMap.lookup (VariableName importedModuleName importName) env of
150 | Just vi ->
151 | -- The item imported is a variable. I simply write this down
152 | -- as a variable declaration
153 | let newTcState = addToEnv (VariableName moduleName importName) (vi { variableInfoExecutionExpression = EVariable (VariableName importedModuleName importName)}) (tcsEnv currState)
154 | in return (currState {tcsEnv = newTcState})
155 | Nothing ->
156 | -- Is it an imported unit?
157 | if VariableName importedModuleName importName `Set.member` tcsUnits oldState
158 | then
159 | let newUnits = VariableName moduleName importName `Set.insert` tcsUnits currState
160 | in return (currState {tcsUnits = newUnits})
161 | else throwError $ Parser.Positioned importNamePos (MissingImportError importedModuleName importName)
162 | foldImports oldState imports
163 | else throwError $ Parser.Positioned moduleNamePos (MissingModuleError importedModuleName)
164 |
165 | wrapFunctionArgs :: [T.Text] -> ExecutionExpression -> ExecutionExpression
166 | wrapFunctionArgs (arg : rest) expr = EConstant (ExecutionValueFunc arg (wrapFunctionArgs rest expr))
167 | wrapFunctionArgs [] expr = expr
168 |
169 |
170 | foldSubst :: Traversable t => t Substitution -> Substitution
171 | foldSubst = foldr composeSubst nullSubst
172 |
--------------------------------------------------------------------------------
/src/Pedant/TypeCheck/Dimensions.hs:
--------------------------------------------------------------------------------
1 | module Pedant.TypeCheck.Dimensions where
2 |
3 | import qualified Pedant.TypeCheck.Types as Types
4 |
5 | data PrimitiveDim
6 | = -- | Literal dimension, such as years
7 | LitDim Types.UnitName
8 | | -- | Polymorphic dimension, such as
9 | PolyPrimDim PolyTypeName
10 | deriving (Eq, Ord)
11 |
12 | -- | Attempts to find a unification between dimensions
13 | mguDim :: Types.Dimension -> Types.Dimension -> Types.UM Types.Substitution
14 | mguDim (NormDim t) (NormDim u) =
15 | if u == t
16 | then return nullSubst
17 | else -- unifying dimensions is a bit tricky, and this method is not perfect and leaves out some possible (but rare) unifications
18 |
19 | let dividedOut = Map.filter (/= 0) $ Map.unionWith (+) t (Map.map negate u)
20 | polyDim =
21 | Maybe.mapMaybe
22 | ( \(k, v) ->
23 | case (k, v) of
24 | (PolyDim d, 1) -> Just (d, 1)
25 | (PolyDim d, -1) -> Just (d, -1)
26 | _ -> Nothing
27 | )
28 | (Map.toList dividedOut)
29 | in case polyDim of
30 | (firstDim, power) : _ ->
31 | let withoutPolyVar = Map.delete (PolyDim firstDim) dividedOut
32 | dividedByPower = Map.map (`quot` (- power)) withoutPolyVar
33 | in return $ nullSubst {subDimensions = Map.singleton firstDim (NormDim dividedByPower)}
34 | [] ->
35 | throwError [(BaseDim (NormDim t), BaseDim (NormDim u))]
36 | mguDim (PowDim t) (PowDim u) =
37 | trace (show (PowDim t) ++ " unify " ++ show (PowDim u)) $
38 | if u == t
39 | then return nullSubst
40 | else
41 | let dividedOut = Map.filter (/= 0) $ Map.unionWith (+) t (Map.map negate u)
42 | polyDim =
43 | Maybe.mapMaybe
44 | ( \(k, v) ->
45 | case (k, v) of
46 | (PolyDim d, 1) -> Just (d, 1)
47 | (PolyDim d, -1) -> Just (d, -1)
48 | _ -> Nothing
49 | )
50 | (Map.toList (traceShowId dividedOut))
51 | in case traceShowId polyDim of
52 | (firstDim, power) : _ ->
53 | let withoutPolyVar = Map.delete (PolyDim firstDim) dividedOut
54 | dividedByPower = Map.map (`quot` (- power)) (traceShowId withoutPolyVar)
55 | in return $ nullSubst {subDimensions = Map.singleton firstDim (PowDim (traceShowId dividedByPower))}
56 | [] ->
57 | throwError [(BaseDim (PowDim t), BaseDim (PowDim u))]
58 | mguDim (NormDim u) (PowDim t) = do
59 | -- I can only unify BaseDims and PowDims if they both unify to dimensionless
60 | trace (show (NormDim u) ++ " unify " ++ show (PowDim t)) $
61 | ( do
62 | s1 <- mguDim (NormDim Map.empty) (NormDim u)
63 | s2 <- mguDim (PowDim Map.empty) (apply s1 (PowDim t))
64 | return $ s2 `composeSubst` s1
65 | )
66 | `catchError` (\_ -> throwError [(BaseDim (NormDim u), BaseDim (PowDim t))])
67 | mguDim (PowDim u) (NormDim t) =
68 | do
69 | -- I can only unify BaseDims and PowDims if they both unify to dimensionless
70 | s1 <- mguDim (PowDim Map.empty) (PowDim u)
71 | s2 <- mguDim (NormDim Map.empty) (apply s1 (NormDim t))
72 | return $ s2 `composeSubst` s1
73 | `catchError` (\_ -> throwError [(BaseDim (NormDim u), BaseDim (PowDim t))])
74 |
75 | ftv :: Dimension -> Set.Set PolyTypeName
76 | ftv (NormDim n) =
77 | let keys = Map.keys n
78 | in Set.fromList $ Maybe.mapMaybe polymorphicVar keys
79 | where
80 | polymorphicVar :: PrimitiveDim -> Maybe PolyTypeName
81 | polymorphicVar (PolyPrimDim a) = Just a
82 | polymorphicVar _ = Nothing
83 | ftv (PowDim n) = ftv (NormDim n)
84 | ftv (PolyDim n) = Set.singleton n
85 |
86 | apply :: Substitution -> Dimension -> Dimension
87 | apply s dim =
88 | case dim of
89 | NormDim n -> NormDim $ Map.foldlWithKey applyOne Map.empty n
90 | PowDim n -> PowDim $ Map.foldlWithKey applyOne Map.empty n
91 | PolyDim name ->
92 | case Map.lookup name (subDimensions s) of
93 | Nothing -> PolyDim name
94 | Just x -> apply s x
95 | where
96 | applyOne :: Map.Map PrimitiveDim Int -> PrimitiveDim -> Int -> Map.Map PrimitiveDim Int
97 | applyOne dimMap (LitDim x) power = Map.filter (/= 0) $ Map.unionWith (+) dimMap (Map.singleton (LitDim x) power)
98 | applyOne dimMap (PolyPrimDim x) power =
99 | case Map.lookup x (subDimensions s) of
100 | Just (NormDim substitution) -> combine dimMap (Map.map (* power) substitution)
101 | Just (PowDim substitution) -> combine dimMap (Map.map (* power) substitution)
102 | Just (PolyDim x) -> PolyDim x
103 | Nothing -> combine dimMap (Map.singleton (PolyPrimDim x) power)
104 |
105 | combine :: Map.Map PrimitiveDim Int -> Map.Map PrimitiveDim Int -> Map.Map PrimitiveDim Int
106 | combine a b = Map.filter (/= 0) $ Map.unionWith (+) a b
--------------------------------------------------------------------------------
/src/Pedant/TypeCheck/Expressions.hs:
--------------------------------------------------------------------------------
1 | module Pedant.TypeCheck.Expressions (inferType) where
2 |
3 | import qualified Pedant.TypeCheck.Types as TC
4 |
5 | inferType :: Parser.Positioned Parser.Expression -> T TypeCheckResult
6 | inferType state (Parser.Positioned pos expression) =
7 | let (TypeEnv env) = tcsEnv state
8 | allowedUnits = Set.filter (\(VariableName moduleName _) -> moduleName == tcsCurrentModule state) $ tcsUnits state
9 | in case expression of
10 | -- We got a variable
11 | Parser.Variable n ->
12 | -- Lookup variable in type environment
13 | case OMap.lookup (VariableName (tcsCurrentModule state) n) env of
14 | Nothing ->
15 | case filter ((== n) . InBuilt.funcName) InBuilt.inBuiltFunctions of
16 | func : _ -> do
17 | let t = InBuilt.funcType func
18 | return $ TypeCheckResult nullSubst t (EInternalFunc $ InBuilt.funcDef func)
19 | [] ->
20 | throwError $ Parser.Positioned pos $ MissingVariableError n
21 | Just vi -> do
22 | t <- variableInfoScheme vi
23 | return $ TypeCheckResult nullSubst t (EVariable (VariableName (tcsCurrentModule state) n))
24 | Parser.Number value pdim -> do
25 | dimension <- evaluateDimension (Set.map (\(VariableName _ name) -> name) allowedUnits) pdim
26 | return $ TypeCheckResult nullSubst (BaseDim dimension) (EConstant $ ExecutionValueNumber value)
27 | Parser.List list -> do
28 | let emptyListType = Scheme ["a", "t"] $ ListType (BaseDim (NormDim (Map.singleton (PolyDim "a") 1)))
29 | dim <- instantiate emptyListType
30 | return $ TypeCheckResult nullSubst dim (EConstant (ExecutionValueList ))
31 | Parser.Record record -> do
32 | recordEntries <- forM (Map.toList record) $ \(key, el) -> do
33 | TypeCheckResult sub _type ex <- ti state el
34 | return (key, (sub, _type, ex))
35 |
36 | let dimension = map (\(key, (_, d, _)) -> (key, d)) recordEntries
37 | elems = map (\(key, (_, _, value)) -> (key, value)) recordEntries
38 | substitutions = map (\(_, (sub, _, _)) -> sub) recordEntries
39 | return $ TypeCheckResult (foldSubst substitutions) (DictType (Map.fromList dimension)) (EConstant $ ExecutionValueDict (Map.fromList elems))
40 | Parser.BinOp "" e1 e2 ->
41 | do
42 | tv <- newTyVar "a"
43 | TypeCheckResult sub1 type1 ex1 <- ti state e1
44 | TypeCheckResult sub2 type2 ex2 <- ti (apply sub1 state) e2
45 | let reason = BinaryOpUnificationReason "" (e1, type1) (e2, type2)
46 | sub3 <- liftUMtoTI pos reason $ mgu (apply sub2 type1) (FuncType type2 tv)
47 | return $ TypeCheckResult (sub3 `composeSubst` sub2 `composeSubst` sub1) (apply sub3 tv) (EBinOp "" ex1 ex2)
48 | Parser.BinOp opName e1 e2 ->
49 | do
50 | case filter ((== opName) . InBuilt.opName) InBuilt.inBuiltBinaryOperations of
51 | [] -> throwError $ Parser.Positioned pos $ InternalError $ "ERROR, COULD NOT FIND OPERATION " <> opName
52 | op : _ -> do
53 | tv <- newTyVar "a"
54 | opType <- instantiate (InBuilt.opType op)
55 | TypeCheckResult s1 t1 ex1 <- ti state e1
56 | TypeCheckResult s2 t2 ex2 <- ti (apply s1 state) e2
57 | let reason = BinaryOpUnificationReason opName (e1, t1) (e2, t2)
58 | s3 <- liftUMtoTI pos reason $ mgu opType (t1 `FuncType` (t2 `FuncType` tv))
59 | return $ TypeCheckResult (s3 `composeSubst` s2 `composeSubst` s1) (apply s3 tv) (EBinOp opName ex1 ex2)
60 | Parser.Access e1 x ->
61 | do
62 | tv <- newTyVar "a"
63 | TypeCheckResult s1 t1 ex1 <- ti state e1
64 | let reason = AccessUnificationReason (e1, t1) x
65 | s2 <- liftUMtoTI pos reason $ mgu t1 (PolyDictType (Map.singleton x tv))
66 | return $ TypeCheckResult (s2 `composeSubst` s1) (apply s2 tv) (EAccess ex1 x)
67 | Parser.Prefix preOp e1 ->
68 | case filter ((== preOp) . InBuilt.opName) InBuilt.inBuiltPrefixOperations of
69 | [] -> throwError $ Parser.Positioned pos (MissingVariableError preOp)
70 | op : _ -> do
71 | let prefixScheme = InBuilt.opType op
72 | prefixType <- instantiate prefixScheme
73 | tv <- newTyVar "a"
74 | TypeCheckResult s1 t1 ex1 <- ti state e1
75 | let reason = PrefixOpUnificationReason preOp (e1, t1)
76 | s2 <- liftUMtoTI pos reason $ mgu prefixType (t1 `FuncType` tv)
77 | return $ TypeCheckResult (s2 `composeSubst` s1) (apply s2 tv) (ENegate ex1)
78 |
79 | evaluateDimension :: Set.Set T.Text -> Parser.Dimension -> TI Dimension
80 | evaluateDimension allowedUnits dim =
81 | case dim of
82 | Parser.PowParseDim components ->
83 | PowDim <$> foldM addToDimensionMap Map.empty components
84 | Parser.NormalParseDim components ->
85 | NormDim <$> foldM addToDimensionMap Map.empty components
86 | where
87 | addToDimensionMap :: Map.Map PrimitiveDim Int -> Parser.Positioned Parser.DimensionPart -> TI (Map.Map PrimitiveDim Int)
88 | addToDimensionMap dimMap (Parser.Positioned p (Parser.DimensionPart name power)) =
89 | if Set.member name allowedUnits
90 | then return $ Map.insert (LitDim name) power dimMap
91 | else throwError $ Parser.Positioned p $ MissingUnitError name
92 |
93 | mgu :: Type -> Type -> UM Substitution
94 | mgu a b = wrapError a b (mgu' a b)
95 |
96 | mgu' :: Type -> Type -> UM Substitution
97 | mgu' (FuncType l r) (FuncType l' r') = do
98 | s1 <- mgu l l'
99 | s2 <- mgu (apply s1 r) (apply s1 r')
100 | return (s1 `composeSubst` s2)
101 | mgu' (PolyType u) t = varBind u t
102 | mgu' t (PolyType u) = varBind u t
103 | mgu' (BaseDim t) (BaseDim u) = mguDim t u
104 | mgu' (PolyDictType t) (DictType u) = do
105 | foldM go nullSubst (Map.toList t)
106 | where
107 | go :: Substitution -> (T.Text, Type) -> UM Substitution
108 | go sub (key, type_) = do
109 | case Map.lookup key u of
110 | Just currType -> do
111 | s1 <- mgu (apply sub type_) (apply sub currType)
112 | return (sub `composeSubst` s1)
113 | Nothing -> throwError [(PolyDictType t, DictType u)]
114 | mgu' (DictType t) (PolyDictType u) = mgu' (PolyDictType u) (DictType t)
115 | mgu' (ListType t) (ListType u) = mgu t u
116 | mgu' t1 t2 = throwError [(t1, t2)]
--------------------------------------------------------------------------------
/src/Pedant/TypeCheck/LambdaCalculus.hs:
--------------------------------------------------------------------------------
1 | module Pedant.TypeCheck.LambdaCalculus (inferType) where
2 |
3 | import qualified Pedant.TypeCheck.Types as TC
4 | import qualified Data.Set as Set
5 | import qualified Data.Text as T
6 | import qualified Pedant.Types as Types
7 | import qualified Pedant.FileResolver as Resolver
8 |
9 | data TypeCheckedModule = TypeCheckedModule {
10 | -- | The environment of the checker. This contains references to all the variables and schemes of those variables currently declared.
11 | tcmVars :: TC.TypeEnv,
12 | -- | Substitutions, the current substitutions that are required for the expression to unify
13 | tcsSubs :: TC.Substitution,
14 | -- | Units, the units currently declared
15 | tcsUnits :: Set.Set Types.VariableName,
16 | -- | A list of the modules that have been checked
17 | tcsCheckedModules :: Set.Set T.Text,
18 | tcsCurrentModule :: T.Text,
19 | tcsSeed :: Int
20 | }
21 |
22 | typeCheckFile :: Resolver.Module -> TC.TypeInferenceMonad TypeCheckedModule
23 | typeCheckFile m =
24 | let setStateModuleName = tcState {tcsCurrentModule = Resolver.moduleName m}
25 | (result, _) = runTI (inferLoop setStateModuleName (Resolver.moduleStatements m))
26 | in case result of
27 | Right (err, state) ->
28 | let addedCheckedModule = state {tcsCheckedModules = Resolver.moduleName m `Set.insert` tcsCheckedModules setStateModuleName}
29 | in (err, addedCheckedModule)
30 | Left err ->
31 | (Just err, tcState)
32 | where
33 | inferLoop :: TypeCheckState -> [Parser.Statement] -> TI (Maybe (Parser.Positioned TypeError), TypeCheckState)
34 | inferLoop state [] = return (Nothing, state)
35 | inferLoop state (statement : rest) =
36 | let moduleName = tcsCurrentModule state
37 | in case statement of
38 | Parser.UnitStatement units ->
39 | let newUnits = tcsUnits state `Set.union` Set.fromList (map (\(Parser.Positioned _ p) -> VariableName moduleName p) units)
40 | in inferLoop (state {tcsUnits = newUnits}) rest
41 | Parser.ImportStatement importedModuleName imports -> do
42 | moduleState <- importModule moduleName importedModuleName imports state
43 | inferLoop moduleState rest
44 | Parser.AssignmentStatement assignment -> do
45 | -- First, assign polymorphic types to all arguments of the function (make the definition as loose as possible)
46 | mapPairs <-
47 | mapM
48 | ( \a -> do
49 | tv <- newTyVar a
50 | return (VariableName moduleName a, VariableInfo (Scheme [] tv) (EVariable (VariableName moduleName a)) assignment) -- This is a fake execution expression, it's an argument, it's deliberately unknown
51 | )
52 | (Parser.assignmentArguments assignment)
53 |
54 | -- Then, add these arguments to the type environment
55 | let (TypeEnv env) = tcsEnv state
56 | arguments = Parser.assignmentArguments assignment
57 | env'' = TypeEnv (env OMap.<>| OMap.fromList mapPairs)
58 | TypeCheckResult s1 t1 ex <- ti (state {tcsEnv = env''}) (Parser.assignmentExpression assignment)
59 |
60 | let varType = foldr (\(_, VariableInfo (Scheme _ tv) _ _) acc -> apply s1 tv `FuncType` acc) (apply s1 t1) mapPairs
61 | name = VariableName moduleName (Parser.assignmentName assignment)
62 | t' = generalize (apply s1 env'') varType
63 | -- Note that this env is not env''. This is because otherwise we will add arguments as variables
64 | -- We want to not include those
65 | envWithVar = addToEnv name (VariableInfo t' (wrapFunctionArgs arguments ex) assignment) (TypeEnv env)
66 | let newTcState =
67 | state
68 | { tcsEnv = envWithVar,
69 | tcsSubs = s1 `composeSubst` tcsSubs state
70 | }
71 | inferLoop newTcState rest
72 | `catchError` (\err -> return (Just err, state))
--------------------------------------------------------------------------------
/src/Pedant/TypeCheck/Types.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 | {-# LANGUAGE InstanceSigs #-}
3 |
4 | module Pedant.TypeCheck.Types
5 | ( PolyTypeName (..),
6 | Scheme (..),
7 | Substitution (..),
8 | PrimitiveDim(..),
9 | Dimension(..),
10 | NormalDimension(..),
11 | Type (..),
12 | TypeEnv (..),
13 | TypeInferenceMonad (..),
14 | newPolyType,
15 | newPolyDim,
16 | multiplyNormalDimensions,
17 | newPolyNormDim
18 | )
19 | where
20 |
21 | import qualified Control.Monad.Except as Except
22 | import qualified Control.Monad.State as State
23 | import qualified Data.Char as Char
24 | import qualified Data.List as List
25 | import qualified Data.Map as Map
26 | import qualified Data.Map.Ordered as OMap
27 | import qualified Data.Maybe as Maybe
28 | import qualified Data.Set as Set
29 | import qualified Data.Text as T
30 | import qualified Pedant.Parser.Types as Parser
31 | import qualified Pedant.Types as Types
32 |
33 | newtype PolyTypeName = PolyTypeName {showTypeNameNumber :: Int} deriving (Show, Eq, Ord)
34 |
35 | data Scheme = Scheme [PolyTypeName] Type deriving (Show)
36 |
37 | data TypeCheckResult = TypeCheckResult Type Types.ExecutionExpression
38 |
39 | type UnificationTrace = [(Type, Type)]
40 |
41 | newtype TIState = TIState {tiSeed :: Int}
42 |
43 | -- | Unification Monad
44 | type UM a = Except.ExceptT UnificationTrace (State.State TIState) a
45 |
46 | data VariableInfo = VariableInfo
47 | { variableInfoScheme :: Scheme,
48 | variableInfoExecutionExpression :: Types.ExecutionExpression,
49 | variableInfoParserStatement :: Parser.Assignment
50 | }
51 | deriving (Show)
52 |
53 | newtype TypeEnv = TypeEnv {teVarMap :: OMap.OMap Types.VariableName VariableInfo}
54 | deriving (Show)
55 |
56 | -- | The state of the type checker
57 | data TypeCheckState = TypeCheckState
58 | { -- | The environment of the checker. This contains references to all the variables and schemes of those variables currently declared.
59 | tcsEnv :: TypeEnv,
60 | -- | Substitutions, the current substitutions that are required for the expression to unify
61 | tcsSubs :: Substitution,
62 | -- | Units, the units currently declared
63 | tcsUnits :: Set.Set Types.UnitName,
64 | -- | A list of the modules that have been checked
65 | tcsCheckedModules :: Set.Set T.Text,
66 | tcsCurrentModule :: T.Text,
67 | tcsSeed :: Int
68 | }
69 |
70 | instance Types TypeEnv where
71 | ftv (TypeEnv env) = ftv (map (variableInfoScheme . snd) $ OMap.assocs env)
72 | apply s (TypeEnv env) = TypeEnv (fmap (\vi -> vi {variableInfoScheme = apply s (variableInfoScheme vi)}) env)
73 |
74 | generalize :: TypeEnv -> Type -> Scheme
75 | generalize env t = Scheme vars t
76 | where
77 | vars = Set.toList (ftv t `Set.difference` ftv env)
78 |
79 | addToEnv :: Types.VariableName -> VariableInfo -> TypeEnv -> TypeEnv
80 | addToEnv key var (TypeEnv env) = TypeEnv ((key, var) OMap.|< env)
81 |
82 | newtype TypeInferenceMonad a = TypeInferenceMonad {runTI :: Except.ExceptT (Parser.Positioned TypeError) (State.State TypeCheckState) a}
83 |
84 | newPolyNormDim :: TypeInferenceMonad NormalDimension
85 | newPolyNormDim = TypeInferenceMonad $ State.gets (polyNormDim . PolyTypeName . tcsSeed)
86 |
87 | newPolyType :: TypeInferenceMonad Type
88 | newPolyType = TypeInferenceMonad $ State.gets (PolyType . PolyTypeName . tcsSeed)
89 |
90 | newPolyDim :: TypeInferenceMonad Dimension
91 | newPolyDim = TypeInferenceMonad $ State.gets (PolyDim . PolyTypeName . tcsSeed)
92 |
93 | imap :: (Int -> a -> b) -> [a] -> [b]
94 | imap f list = _imap list 0
95 | where
96 | _imap (x : rest) idx =
97 | f idx x : _imap rest (idx + 1)
98 | _imap [] _ = []
99 |
100 | instance Types.PrettyPrint Scheme where
101 | pPrint (Scheme vars t) =
102 | let typeNames = imap (\i v -> (v, PolyType (PolyTypeName i))) vars
103 | dimNames = imap (\i v -> (v, PolyDim (PolyTypeName i))) vars
104 | sub = Substitution {subTypes = Map.fromList typeNames, subDimensions = Map.fromList dimNames}
105 | in Types.pPrint $ apply sub t
106 |
107 | class Types a where
108 | ftv :: a -> Set.Set PolyTypeName
109 | apply :: Substitution -> a -> a
110 |
111 |
112 | instance Types Type where
113 | ftv (PolyType n) = Set.singleton n
114 | ftv (FuncType x y) = ftv x `Set.union` ftv y
115 | ftv (DictType x) = Set.unions . map ftv $ Map.elems x
116 | ftv (ListType x) = ftv x
117 | ftv (NumberType x) = ftv x
118 | ftv (PolyDictType x) = Set.unions . map ftv $ Map.elems x
119 |
120 | apply s (PolyType n) =
121 | case Map.lookup n (subTypes s) of
122 | Nothing -> PolyType n
123 | Just x -> apply s x
124 | apply s (FuncType x y) = FuncType (apply s x) (apply s y)
125 | apply s (NumberType n) =
126 | NumberType $ apply s n
127 | apply s (ListType n) =
128 | ListType $ apply s n
129 | apply s (PolyDictType n) =
130 | PolyDictType (Map.map (apply s) n)
131 | apply s (DictType n) =
132 | DictType (Map.map (apply s) n)
133 |
134 | instance Types Scheme where
135 | ftv (Scheme vars t) = ftv t `Set.difference` Set.fromList vars
136 | apply s (Scheme vars t) = Scheme vars (apply (foldr deleteFromSub s vars) t)
137 | where
138 | deleteFromSub :: PolyTypeName -> Substitution -> Substitution
139 | deleteFromSub key sub =
140 | Substitution
141 | { subDimensions = Map.delete key (subDimensions sub),
142 | subTypes = Map.delete key (subTypes sub)
143 | }
144 |
145 | instance Types a => Types [a] where
146 | apply s = map (apply s)
147 | ftv l = Set.unions $ map ftv l
148 |
149 | -- | There are two different types of substitutions. Dimensional and type substitutions.
150 | data Substitution = Substitution
151 | { subTypes :: Map.Map PolyTypeName Type,
152 | subDimensions :: Map.Map PolyTypeName Dimension
153 | }
154 | deriving (Show)
155 |
156 |
157 | data PrimitiveDim
158 | = -- | Literal dimension, such as years
159 | LitDim Types.UnitName
160 | | -- | Polymorphic dimension, such as
161 | PolyPrimDim PolyTypeName
162 | deriving (Eq, Ord)
163 |
164 | newtype NormalDimension = NormalDimension {normDimComponents :: Map.Map PrimitiveDim Int } deriving (Eq, Ord)
165 | -- | Represents a dimension! This is the big thing in pedant
166 | data Dimension
167 | = -- | a non-power dimension (such as years)
168 | NormDim NormalDimension
169 | | -- | A power dimension (such as ^years-1)
170 | PowDim (Map.Map PrimitiveDim Int)
171 | | PolyDim PolyTypeName
172 | deriving (Eq)
173 |
174 | dimensionless :: Dimension
175 | dimensionless = NormDim (NormalDimension Map.empty)
176 |
177 | polyNormDim :: PolyTypeName -> NormalDimension
178 | polyNormDim name = NormalDimension (Map.singleton (PolyPrimDim name) 1)
179 |
180 | multiplyNormalDimensions :: NormalDimension -> NormalDimension -> NormalDimension
181 | multiplyNormalDimensions (NormalDimension map1) (NormalDimension map2) = NormalDimension $ Map.filter (/= 0) $ Map.unionWith (+) map1 map2
182 |
183 | data Type
184 | = -- | An actual dimension, such as people years-1
185 | NumberType Dimension
186 | | -- | A list of a dimension, such as [years]
187 | ListType Type
188 | | -- | A dictionary of dimensions, such as {x:meters,y:meters}
189 | DictType (Map.Map T.Text Type)
190 | | -- | A polymorphic dictionary (a dictionary that contains these keys or more). Such as {|x:meters,y:meters}
191 | PolyDictType (Map.Map T.Text Type)
192 | | -- | A Function. Such as years -> meters
193 | FuncType Type Type
194 | | -- | A Polymorphic Type. A type that could be anything
195 | PolyType PolyTypeName
196 | deriving (Eq)
197 |
198 | infixr 7 `FuncType`
199 |
200 |
201 | -- Show instances for dimensions. Shows internal details and may be
202 | -- difficult to read.
203 | instance Show PrimitiveDim where
204 | show (LitDim s) = show s
205 | show (PolyPrimDim s) = "prim<" ++ show s ++ ">"
206 |
207 | instance Show Dimension where
208 | show (NormDim dim) =
209 | if Map.size dim == 0
210 | then "dimensionless"
211 | else unwords $ map (\(name, amount) -> if amount == 1 then show name else show name ++ show amount) (List.sortOn (negate . snd) (Map.toList dim))
212 | show (PowDim dim) =
213 | if Map.size dim == 1
214 | then "^" ++ show (NormDim dim)
215 | else "^(" ++ show (NormDim dim) ++ ")"
216 |
217 | instance Show Type where
218 | show (NumberType dim) = show dim
219 | show (ListType dim) =
220 | "[" ++ show dim ++ "]"
221 | show (DictType dim) =
222 | "{" ++ List.intercalate "," (map (\(key, value) -> T.unpack key ++ ":" ++ show value) (Map.toAscList dim)) ++ "}"
223 | show (PolyDictType dim) =
224 | "{|" ++ List.intercalate "," (map (\(key, value) -> T.unpack key ++ ":" ++ show value) (Map.toAscList dim)) ++ "}"
225 | show (FuncType dimArg dimVal) =
226 | show dimArg ++ "->" ++ show dimVal
227 | show (PolyType a) =
228 | "type<" ++ show a ++ ">"
229 |
230 | -- | Base Dimension returns the underlying normal dimension for lists. This
231 | -- is used to check whether a dimension can be multiplied or added
232 | baseDimension :: Type -> Type
233 | baseDimension (ListType a) = baseDimension a
234 | baseDimension x = x
235 |
236 | -- | Multiplies two dimensions together
237 | typeMult :: Type -> Type -> Either String Type
238 | typeMult (NumberType (NormDim a)) (NumberType (NormDim b)) = Right . NumberType . NormDim $ Map.filter (/= 0) $ Map.unionWith (+) a b
239 | typeMult (ListType a) (ListType b) = ListType <$> typeMult a b
240 | typeMult (ListType a) b = ListType <$> typeMult a b
241 | typeMult a (ListType b) = ListType <$> typeMult a b
242 | typeMult x y = Left $ "Cannot multiply " ++ show x ++ " to " ++ show y
243 |
244 |
245 | baseUnitPrim :: PrimitiveDim -> Maybe Types.UnitName
246 | baseUnitPrim (LitDim x) = Just x
247 | baseUnitPrim (PolyPrimDim _) = Nothing
248 |
249 | baseUnitsDim :: Dimension -> Set.Set Types.UnitName
250 | baseUnitsDim (NormDim a) = Set.fromList . Maybe.mapMaybe baseUnitPrim $ Map.keys a
251 | baseUnitsDim (PowDim a) = Set.fromList . Maybe.mapMaybe baseUnitPrim $ Map.keys a
252 |
253 | -- | Base Units. Which units make up the type. Used for checking whether
254 | -- units have been declared
255 | baseUnits :: Type -> Set.Set Types.UnitName
256 | baseUnits (NumberType a) = baseUnitsDim a
257 | baseUnits (ListType a) = baseUnits a
258 | baseUnits (DictType a) = Set.unions (map baseUnits $ Map.elems a)
259 | baseUnits (PolyDictType a) = Set.unions (map baseUnits $ Map.elems a)
260 | baseUnits (FuncType a b) = baseUnits a `Set.union` baseUnits b
261 | baseUnits (PolyType _) = Set.empty
262 |
263 | instance Types.PrettyPrint PrimitiveDim where
264 | pPrint (LitDim (Types.UnitName s)) = s
265 | pPrint (PolyPrimDim (PolyTypeName s)) = "'" <> T.pack (show s)
266 |
267 | instance Types.PrettyPrint Dimension where
268 | pPrint (NormDim dim) =
269 | if Map.empty == dim
270 | then "1"
271 | else T.unwords $ map (\(name, amount) -> if amount == 1 then Types.pPrint name else Types.pPrint name <> T.pack (show amount)) (List.sortOn (negate . snd) (Map.toList dim))
272 | pPrint (PowDim dim) =
273 | if Map.empty == dim
274 | then "1"
275 | else "^" <> Types.pPrint (NormDim dim)
276 |
277 | instance Types.PrettyPrint Type where
278 | pPrint (NumberType s) = Types.pPrint s
279 | pPrint (DictType d) =
280 | "{" <> T.intercalate "," (map (\(key, value) -> key <> ":" <> Types.pPrint value) (Map.toAscList d)) <> "}"
281 | pPrint (ListType s) = "[" <> Types.pPrint s <> "]"
282 | pPrint (PolyDictType d) =
283 | "{|" <> T.intercalate "," (map (\(key, value) -> key <> ":" <> Types.pPrint value) (Map.toAscList d)) <> "}"
284 | pPrint (PolyType (PolyTypeName s)) = "''" <> T.pack (show s)
285 | pPrint (FuncType x y) =
286 | Types.pPrint x <> " -> " <> Types.pPrint y
287 |
288 | -- | A Type Error. Decribes a problem that occured during type checking
289 | data TypeError
290 | = UnificationError ReasonForUnification UnificationTrace
291 | | MissingUnitError T.Text
292 | | MissingVariableError T.Text
293 | | MissingImportError T.Text T.Text
294 | | MissingModuleError T.Text
295 | | InternalError T.Text
296 | deriving (Eq)
297 |
298 | data ReasonForUnification
299 | = BinaryOpUnificationReason T.Text (Parser.Positioned Parser.Expression, Type) (Parser.Positioned Parser.Expression, Type)
300 | | PrefixOpUnificationReason T.Text (Parser.Positioned Parser.Expression, Type)
301 | | AccessUnificationReason (Parser.Positioned Parser.Expression, Type) T.Text
302 | deriving (Eq)
303 |
304 | typeErrorMessage :: TypeError -> T.Text
305 | typeErrorMessage te =
306 | case te of
307 | UnificationError reason _ ->
308 | case reason of
309 | BinaryOpUnificationReason "+" (p1, t1) (p2, t2) ->
310 | T.concat
311 | [ "Can only add dimension that are the same.\n",
312 | Types.pPrint p1,
313 | " has the type ",
314 | Types.pPrint t1,
315 | " and ",
316 | Types.pPrint p2,
317 | " has the type ",
318 | Types.pPrint t2
319 | ]
320 | BinaryOpUnificationReason "-" (p1, t1) (p2, t2) ->
321 | T.concat
322 | [ "Can only subtract dimensions that are the same.\n",
323 | Types.pPrint p1,
324 | " has the type ",
325 | Types.pPrint t1,
326 | " and ",
327 | Types.pPrint p2,
328 | " has the type ",
329 | Types.pPrint t2
330 | ]
331 | BinaryOpUnificationReason op (p1, t1) (p2, t2) ->
332 | T.concat
333 | [ op,
334 | " must be called on a number.\n",
335 | Types.pPrint p1,
336 | " has the type ",
337 | Types.pPrint t1,
338 | " and ",
339 | Types.pPrint p2,
340 | " has the type ",
341 | Types.pPrint t2
342 | ]
343 | PrefixOpUnificationReason op (p1, t1) ->
344 | T.concat
345 | [ op,
346 | " must be called on a number.\n",
347 | Types.pPrint p1,
348 | " has the type ",
349 | Types.pPrint t1
350 | ]
351 | AccessUnificationReason (p1, t1) key ->
352 | T.concat
353 | [ Types.pPrint p1,
354 | " has type ",
355 | Types.pPrint t1,
356 | " does not have the key ",
357 | key
358 | ]
359 | (MissingUnitError unitName) ->
360 | T.concat
361 | [ "unit ",
362 | unitName,
363 | " not declared. Try adding a \"unit ",
364 | unitName,
365 | "\" statement before this line"
366 | ]
367 | (MissingVariableError varName) ->
368 | T.concat
369 | [ "variable ",
370 | varName,
371 | " not declared."
372 | ]
373 | InternalError err ->
374 | T.append "INTERNAL ERROR. YOU SHOULD NOT BE GETTING THIS: " err
375 | MissingImportError moduleName variable ->
376 | T.concat
377 | [ "Could not find name ",
378 | variable,
379 | " in module ",
380 | moduleName,
381 | "."
382 | ]
383 | (MissingModuleError moduleName) ->
384 | T.concat
385 | [ "Could not find module ",
386 | moduleName,
387 | "."
388 | ]
389 |
390 | errorComponentLen (Parser.Positioned (Parser.PositionData _ l) _) = l
--------------------------------------------------------------------------------
/src/Pedant/Types.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 |
3 | module Pedant.Types
4 | ( Operation (..),
5 | ExecutionExpression (..),
6 | ExecutionStatement (..),
7 | PedantParseError (..),
8 | ExecutionValue (..),
9 | NumericValue (..),
10 | PrettyPrint (..),
11 | InternalFunction (..),
12 | BinaryOperation(..),
13 | VariableName(..),
14 | UnitName(..),
15 | RecordKey(..),
16 | PrefixOperation(..),
17 | AccessKey(..)
18 | )
19 | where
20 |
21 | import qualified Data.List as List
22 | import qualified Data.Map as Map
23 | import qualified Data.Set as Set
24 | import qualified Data.Text as T
25 |
26 | class PrettyPrint a where
27 | pPrint :: a -> T.Text
28 |
29 | -- | Defining a shallow embedding for a typed number.
30 | -- A typed number is a number with units. It must follow
31 | -- the rules of dimensional analysis
32 | data NumericValue
33 | = NumberValue Double
34 | | -- | A number and the dimension of the number
35 | ListValue [NumericValue]
36 | | DictValue (Map.Map T.Text NumericValue)
37 | | FuncValue T.Text ExecutionExpression
38 | | InternalFunctionValue InternalFunction
39 |
40 | lift2Numeric :: (Double -> Double -> Double) -> NumericValue -> NumericValue -> NumericValue
41 | lift2Numeric op a b =
42 | case (a, b) of
43 | (NumberValue x, NumberValue y) -> NumberValue (op x y)
44 | (NumberValue x, ListValue y) -> ListValue (map (lift2Numeric op $ NumberValue x) y)
45 | (ListValue x, NumberValue y) -> ListValue (map (\z -> lift2Numeric op z (NumberValue y)) x)
46 | (ListValue x, ListValue y) -> ListValue (zipWith (lift2Numeric op) x y)
47 | (x, _) -> x
48 |
49 | liftNumeric :: (Double -> Double) -> NumericValue -> NumericValue
50 | liftNumeric op a =
51 | case a of
52 | NumberValue x -> NumberValue $ op x
53 | ListValue list -> ListValue (map (liftNumeric op) list)
54 | DictValue x -> DictValue x
55 | FuncValue x y -> FuncValue x y
56 | InternalFunctionValue x -> InternalFunctionValue x
57 |
58 | instance Num NumericValue where
59 | (*) = lift2Numeric (*)
60 |
61 | (+) = lift2Numeric (+)
62 |
63 | (-) = lift2Numeric (-)
64 |
65 | negate = liftNumeric negate
66 |
67 | abs = liftNumeric abs
68 |
69 | signum = liftNumeric signum
70 | fromInteger = NumberValue . fromInteger
71 |
72 | instance Fractional NumericValue where
73 | fromRational = NumberValue . fromRational
74 | (/) = lift2Numeric (/)
75 |
76 | instance Floating NumericValue where
77 | pi = NumberValue pi
78 | exp = liftNumeric exp
79 | log = liftNumeric log
80 | sin = liftNumeric sin
81 | cos = liftNumeric cos
82 | asin = liftNumeric asin
83 | acos = liftNumeric acos
84 | atan = liftNumeric atan
85 | sinh = liftNumeric sinh
86 | cosh = liftNumeric cosh
87 | asinh = liftNumeric asinh
88 | acosh = liftNumeric acosh
89 | atanh = liftNumeric atanh
90 |
91 |
92 | instance Show NumericValue where
93 | show (NumberValue val) = show val
94 | show (ListValue val) = "[" ++ List.intercalate ", " (map show val) ++ "]"
95 | show (DictValue val) = "{" ++ List.intercalate ", " (map (\(key, value) -> T.unpack key ++ "=" ++ show value) (Map.toAscList val)) ++ "}"
96 | show (FuncValue arg expr) = T.unpack arg ++ " -> " ++ show expr
97 | show (InternalFunctionValue _) = "INTERNAL FUNCTION"
98 |
99 | data ExecutionValue
100 | = ExecutionValueNumber Double
101 | | ExecutionValueList [ExecutionExpression]
102 | | ExecutionValueDict (Map.Map T.Text ExecutionExpression)
103 | | ExecutionValueFunc T.Text ExecutionExpression
104 | deriving (Show)
105 |
106 | newtype BinaryOperation = BinaryOperation T.Text deriving (Show, Eq)
107 | newtype VariableName = VariableName T.Text deriving (Show, Eq, Ord)
108 | newtype UnitName = UnitName T.Text deriving (Show, Eq, Ord)
109 | newtype RecordKey = RecordKey T.Text deriving (Show, Eq, Ord)
110 | newtype PrefixOperation = PrefixOperation T.Text deriving (Show, Eq)
111 | newtype AccessKey = AccessKey T.Text deriving (Show, Eq)
112 |
113 | data ExecutionExpression
114 | = EBinOp T.Text ExecutionExpression ExecutionExpression
115 | | EVariable T.Text
116 | | EAccess ExecutionExpression T.Text
117 | | EConstant ExecutionValue
118 | | ENegate ExecutionExpression
119 | | EInternalFunc InternalFunction
120 | deriving (Show)
121 |
122 | newtype InternalFunction = InternalFunction (NumericValue -> NumericValue)
123 |
124 | instance Show InternalFunction where
125 | show _ = "INTERNAL FUNCTION"
126 |
127 | data ExecutionStatement
128 | = ExecAssignment T.Text ExecutionExpression
129 | | ExecImport T.Text (Set.Set T.Text)
130 | deriving (Show)
131 |
132 | data Operation = Add | Sub | Mult | Div | App | Power
133 | deriving (Show)
134 |
135 |
136 | data PedantParseError = PedantParseError
137 | { ppeErrString :: T.Text,
138 | ppeColumn :: Int,
139 | ppeRow :: Int,
140 | ppeEndColumn :: Int,
141 | ppeEndRow :: Int,
142 | ppePrint :: T.Text
143 | }
144 | deriving (Show)
145 |
--------------------------------------------------------------------------------
/stack.yaml:
--------------------------------------------------------------------------------
1 | # This file was automatically generated by 'stack init'
2 | #
3 | # Some commonly used options have been documented as comments in this file.
4 | # For advanced use and comprehensive documentation of the format, please see:
5 | # https://docs.haskellstack.org/en/stable/yaml_configuration/
6 |
7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version.
8 | # A snapshot resolver dictates the compiler version and the set of packages
9 | # to be used for project dependencies. For example:
10 | #
11 | # resolver: lts-3.5
12 | # resolver: nightly-2015-09-21
13 | # resolver: ghc-7.10.2
14 | #
15 | # The location of a snapshot can be provided as a file or url. Stack assumes
16 | # a snapshot provided as a file might change, whereas a url resource does not.
17 | #
18 | # resolver: ./custom-snapshot.yaml
19 | # resolver: https://example.com/snapshots/2018-01-01.yaml
20 | resolver:
21 | url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/20/20.yaml
22 |
23 | # User packages to be built.
24 | # Various formats can be used as shown in the example below.
25 | #
26 | # packages:
27 | # - some-directory
28 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz
29 | # subdirs:
30 | # - auto-update
31 | # - wai
32 | packages:
33 | - .
34 | # Dependency packages to be pulled from upstream that are not in the resolver.
35 | # These entries can reference officially published versions as well as
36 | # forks / in-progress versions pinned to a git hash. For example:
37 | #
38 | # extra-deps:
39 | # - acme-missiles-0.3
40 | # - git: https://github.com/commercialhaskell/stack.git
41 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a
42 | #
43 | # extra-deps: []
44 |
45 | # Override default flag values for local packages and extra-deps
46 | # flags: {}
47 |
48 | # Extra package databases containing global packages
49 | # extra-package-dbs: []
50 |
51 | # Control whether we use the GHC we find on the path
52 | # system-ghc: true
53 | #
54 | # Require a specific version of stack, using version ranges
55 | # require-stack-version: -any # Default
56 | # require-stack-version: ">=2.7"
57 | #
58 | # Override the architecture used by stack, especially useful on Windows
59 | # arch: i386
60 | # arch: x86_64
61 | #
62 | # Extra directories used by stack for building
63 | # extra-include-dirs: [/path/to/dir]
64 | # extra-lib-dirs: [/path/to/dir]
65 | #
66 | # Allow a newer minor version of GHC than the snapshot specifies
67 | # compiler-check: newer-minor
68 |
--------------------------------------------------------------------------------
/stack.yaml.lock:
--------------------------------------------------------------------------------
1 | # This file was autogenerated by Stack.
2 | # You should not edit this file by hand.
3 | # For more information, please see the documentation at:
4 | # https://docs.haskellstack.org/en/stable/lock_files
5 |
6 | packages: []
7 | snapshots:
8 | - completed:
9 | sha256: 126fa33ceb11f5e85ceb4e86d434756bd9a8439e2e5776d306a15fbc63b01e89
10 | size: 650041
11 | url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/20/20.yaml
12 | original:
13 | url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/20/20.yaml
14 |
--------------------------------------------------------------------------------
/test/Main.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE ImportQualifiedPost #-}
2 | module Main (main) where
3 |
4 | import Test.Hspec ( hspec, describe )
5 | import Spec.Parser qualified as ParserSpec
6 |
7 | main :: IO ()
8 | main = hspec $
9 | describe "Parser spec" ParserSpec.spec
10 |
11 |
--------------------------------------------------------------------------------
/test/Spec/Parser.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 | module Spec.Parser (spec) where
3 |
4 | import Test.Hspec
5 | import Pedant.Parser (parseProgram)
6 | import Data.Either (isRight)
7 |
8 |
9 | spec :: Spec
10 | spec = do
11 | describe "Parses programs with no line ending at end" $
12 | it "x = 2" $ do
13 | parseProgram "test" "x = 2" `shouldSatisfy` isRight
--------------------------------------------------------------------------------
/theories/.Numerics.aux:
--------------------------------------------------------------------------------
1 | COQAUX1 0f4eb4cfaa2677439d9b504a6201da02 /mnt/c/Users/samno/Code/Dimensions/pedant/theories/Numerics.v
2 |
--------------------------------------------------------------------------------
/theories/Collections.v:
--------------------------------------------------------------------------------
1 | Load Numerics.
2 | Import ListNotations.
3 |
4 | Inductive PrimitiveType : Type :=
5 | | Number
6 | | Boolean.
7 |
8 | Module RecordMap := FMapAVL.Make Nat_as_OT.
9 |
10 | Inductive PedantType : Type :=
11 | | Primative : PrimitiveType -> PedantType
12 | | UnorderedSet : PedantType -> PedantType
13 | | Record : list (nat * PedantType) -> PedantType.
14 |
15 | Definition impact : nat := 0.
16 | Definition given_placement : nat := 1.
17 | Definition counterfactualAdjustment := 2.
18 |
19 | Definition populationIcapsType := UnorderedSet (Record [(impact, Primative Number)]).
20 |
21 | (* Just thinking about how I want this to come out for ICAPs
22 | I'm thinking, you define impact as:
23 | groupType : Treatment | Control | Other
24 | importance : set({id: ConceptionID, from: ID, to: ID, relative_importance : importance[to] importance[from]^{-2}})
25 | population : set({id: ID, impact: I, occured: boolean, group: groupType})
26 | impact: sum(impact|occured) > sum(impact|!occured)
27 | impact = sum(impact | occured) / length_P(impact | occured)
28 |
29 | counterfactual_scenarios = set({got_placement: boolean, personId: PersonId})
30 | population_icap = set({personId: PersonId, impact: I, given_placement: boolean, counterfactualAdjustment: E_counterfactual_scenarios(got_placement | personId)})
31 | ICAPS = sum((0 - counterfactual_adjustment) * impact * given_placement)
32 | ICAPS = sum((0 - E(got_placement | personId & !given_placement)) * impact * got_placement)
33 | ICAPS = sum(impact * given_placement - E(got_placement | personId & !given_placement)) * impact * got_placement)
34 | // Rule, you can only combine sets that are of the same condition. considering impact is | given_placement this fails. ICPS = impact * got_placement - impact * got_placement | !occured
35 | *)
--------------------------------------------------------------------------------
/theories/HindleyMilner.v:
--------------------------------------------------------------------------------
1 | Require Import Coq.Lists.List.
2 | Import ListNotations.
3 | Require Import Coq.Logic.Decidable.
4 | Require Import Coq.Strings.String.
5 | Require Import Coq.Arith.PeanoNat.
6 |
7 | Section HindleyMilner.
8 |
9 | Variable atom : Type.
10 | Variable atom_eq_dec : forall x y : atom, {x = y} + {x <> y}.
11 | Variable fresh_atom : list atom -> atom.
12 | Hypothesis fresh_atom_spec : forall xs, ~ In (fresh_atom xs) xs.
13 |
14 | Inductive expr : Type :=
15 | | EVar : atom -> expr
16 | | EAbs : atom -> expr -> expr
17 | | EApp : expr -> expr -> expr
18 | | ELet : atom -> expr -> expr -> expr.
19 |
20 | Inductive type : Type :=
21 | | TVar : atom -> type
22 | | TArrow : type -> type -> type.
23 |
24 | Inductive context : Type :=
25 | | Empty : context
26 | | Extend : atom -> type -> context -> context.
27 |
28 | Inductive typing : context -> expr -> type -> Prop :=
29 | | TyVar : forall Gamma x t,
30 | (Extend x t Empty) = Gamma ->
31 | typing Gamma (EVar x) t
32 | | TyAbs : forall Gamma x e t1 t2,
33 | typing (Extend x t1 Gamma) e t2 ->
34 | typing Gamma (EAbs x e) (TArrow t1 t2)
35 | | TyApp : forall Gamma e1 e2 t1 t2,
36 | typing Gamma e1 (TArrow t1 t2) ->
37 | typing Gamma e2 t1 ->
38 | typing Gamma (EApp e1 e2) t2
39 | | TyLet : forall Gamma x e1 e2 t1 t2,
40 | typing Gamma e1 t1 ->
41 | typing (Extend x t1 Gamma) e2 t2 ->
42 | typing Gamma (ELet x e1 e2) t2.
43 |
44 | Fixpoint substitute (x : atom) (u : type) (t : type) : type :=
45 | match t with
46 | | TVar y => if atom_eq_dec x y then u else t
47 | | TArrow t1 t2 => TArrow (substitute x u t1) (substitute x u t2)
48 | end.
49 |
50 | Inductive unifies : type -> type -> Prop :=
51 | | UVar : forall x t, unifies (TVar x) t
52 | | UArrow : forall t1 t2 t3 t4,
53 | unifies t1 t3 ->
54 | unifies t2 t4 ->
55 | unifies (TArrow t1 t2) (TArrow t3 t4).
56 |
57 | Fixpoint type_eq_dec (t1 t2 : type) : bool :=
58 | match t1, t2 with
59 | | TVar a1, TVar a2 => if atom_eq_dec a1 a2 then true else false
60 | | TArrow t1a t1b, TArrow t2a t2b =>
61 | andb (type_eq_dec t1a t2a) (type_eq_dec t1b t2b)
62 | | _, _ => false
63 | end.
64 |
65 | Fixpoint atoms_type (t : type) : list atom :=
66 | match t with
67 | | TVar x => [x]
68 | | TArrow t1 t2 => atoms_type t1 ++ atoms_type t2
69 | end.
70 |
71 | Fixpoint atoms_type_context (Gamma : context) : list atom :=
72 | match Gamma with
73 | | Empty => []
74 | | Extend x t G => x :: atoms_type t ++ atoms_type_context G
75 | end.
76 |
77 | Fixpoint lookup_context (Gamma: context) (x: atom) : option type :=
78 | match Gamma with
79 | | Empty => None
80 | | Extend y t G => if atom_eq_dec x y then Some t else lookup_context G x
81 | end.
82 |
83 | Fixpoint infer (e : expr) (Gamma : context) : option type :=
84 | match e with
85 | | EVar x => lookup_context Gamma x
86 | | EAbs x e =>
87 | let t1 := fresh_atom (atoms_type_context Gamma) in
88 | let t2 := TVar (fresh_atom (t1 :: atoms_type_context Gamma)) in
89 | match infer e (Extend x (TVar t1) Gamma) with
90 | | Some t2 => Some (TArrow (TVar t1) t2)
91 | | None => None
92 | end
93 | | EApp e1 e2 =>
94 | match infer e1 Gamma, infer e2 Gamma with
95 | | Some (TArrow t1 t2), Some t1' =>
96 | if type_eq_dec t1 t1'
97 | then Some t2
98 | else None
99 | | _, _ => None
100 | end
101 | | ELet x e1 e2 =>
102 | match infer e1 Gamma with
103 | | Some t1 =>
104 | infer e2 (Extend x t1 Gamma)
105 | | None => None
106 | end
107 | end.
108 |
109 | End HindleyMilner.
--------------------------------------------------------------------------------
/theories/MyExtraction.v:
--------------------------------------------------------------------------------
1 | From Coq Require Import QArith.
2 | From Coq Require Import Extraction.
3 | Require Coq.extraction.ExtrHaskellNatInt.
4 |
5 |
6 | Extraction Language Haskell.
7 |
8 | (* Extract Coq's Q to Haskell's Rational *)
9 | Extract Inductive Q => "Rational" [ "toRational" ].
10 |
11 | (* Extract Coq's Qadd to Haskell's (+) *)
12 | Extract Constant Qplus => "(+)".
13 |
14 | (* Extract Coq's Qmult to Haskell's ( * ) *)
15 | Extract Constant Qmult => "(*)" .
16 |
17 | (* Extract Coq's Qopp to Haskell's negate *)
18 | Extract Constant Qopp => "negate".
19 |
20 | (* Extract Coq's Qsub to Haskell's (-) *)
21 | Extract Constant Qminus => "(-)".
22 |
23 | (* Extract Coq's Qdiv to Haskell's (/) *)
24 | Extract Constant Qdiv => "(/)".
25 |
26 | (* Extract Coq's Qeq to Haskell's (==) *)
27 | Extract Constant Qeq => "(==)".
28 |
29 | (* Extract Coq's Qle to Haskell's (<=) *)
30 | Extract Constant Qle => "(<=)".
31 |
32 | (* Extract Coq's Qlt to Haskell's (<) *)
33 | Extract Constant Qlt => "(<)".
34 |
35 | (* Extract Coq's Qcompare to Haskell's compare *)
36 | Extract Constant Qcompare => "compare".
--------------------------------------------------------------------------------
/theories/Numerics.glob:
--------------------------------------------------------------------------------
1 | DIGEST 0f4eb4cfaa2677439d9b504a6201da02
2 | FPedant.theories.Numerics
3 | R15:29 Coq.Arith.Arith <> <> lib
4 | R48:65 Coq.Strings.String <> <> lib
5 | R84:106 Coq.FSets.FMapInterface <> <> lib
6 | R125:143 Coq.FSets.FMapFacts <> <> lib
7 | R162:178 Coq.FSets.FMapAVL <> <> lib
8 | R197:209 Coq.Bool.Bool <> <> lib
9 | R228:255 Coq.Structures.OrderedTypeEx <> <> lib
10 | R274:279 Coq.ZArith.ZArith <> <> lib
11 | R298:303 Coq.QArith.QArith <> <> lib
12 |
--------------------------------------------------------------------------------
/theories/Numerics.v:
--------------------------------------------------------------------------------
1 | Require Import Coq.Arith.Arith.
2 | Require Import Coq.Strings.String.
3 | Require Import Coq.FSets.FMapInterface.
4 | Require Import Coq.FSets.FMapFacts.
5 | Require Import Coq.FSets.FMapAVL.
6 | Require Import Coq.Bool.Bool.
7 | Require Import Coq.Structures.OrderedTypeEx.
8 | Require Import ZArith.
9 | Require Import QArith.
10 | Require Import Pedant.MyExtraction.
11 | Require Import Extraction.
12 | Require ExtrHaskellBasic.
13 | Require ExtrHaskellZInt.
14 | Open Scope Q_scope.
15 |
16 | Extraction Language Haskell.
17 |
18 | Definition BaseUnit := nat.
19 | Module UnitPairOrderedType : OrderedType := PairOrderedType Nat_as_OT Z_as_OT.
20 |
21 | Module UnitSet := FMapAVL.Make Nat_as_OT.
22 |
23 |
24 | Definition Unit := UnitSet.t Z.
25 |
26 | Extract Constant Unit => "Data.Map.Strict.Map".
27 |
28 | Inductive Expr : Type :=
29 | | Num : Q -> Unit -> Expr
30 | | Add : Expr -> Expr -> Expr
31 | | Sub : Expr -> Expr -> Expr
32 | | Mul : Expr -> Expr -> Expr
33 | | Div : Expr -> Expr -> Expr.
34 |
35 | Print remove.
36 |
37 | Definition Z_eq_dec : forall x y : Z, {x = y} + {x <> y} := Z.eq_dec.
38 |
39 | Lemma Zpair_eq_dec : forall x y : Z * Z, {x = y} + {x <> y}.
40 | Proof.
41 | decide equality; apply Z_eq_dec.
42 | Defined.
43 |
44 | Definition zip_units (f: Z -> Z -> Z) (m1 m2 : Unit) : Unit :=
45 | UnitSet.mapi
46 | (fun k v1 =>
47 | match UnitSet.find k m2 with
48 | | Some v2 => (f v1 v2) : Z
49 | | None => v1 : Z
50 | end)
51 | m1.
52 |
53 | Definition unit_mult (u1 u2 : Unit) : Unit :=
54 | zip_units Z.add u1 u2.
55 |
56 | Definition unit_div (u1 u2 : Unit) : Unit :=
57 | zip_units Z.sub u1 u2.
58 |
59 | Print UnitSet.
60 | Definition unit_eqb := UnitSet.equal Z.eqb.
61 |
62 | Fixpoint eval (e : Expr) : option (Q * Unit) :=
63 | match e with
64 | | Num n u => Some (n, u)
65 | | Add e1 e2 =>
66 | match eval e1, eval e2 with
67 | | Some (n1, u1), Some (n2, u2) =>
68 | if unit_eqb u1 u2 then Some (n1 + n2, u1) else None
69 | | _, _ => None
70 | end
71 | | Sub e1 e2 =>
72 | match eval e1, eval e2 with
73 | | Some (n1, u1), Some (n2, u2) =>
74 | if unit_eqb u1 u2 then Some (n1 - n2, u1) else None
75 | | _, _ => None
76 | end
77 | | Mul e1 e2 =>
78 | match eval e1, eval e2 with
79 | | Some (n1, u1), Some (n2, u2) => Some (n1 * n2, unit_mult u1 u2)
80 | | _, _ => None
81 | end
82 | | Div e1 e2 =>
83 | match eval e1, eval e2 with
84 | | Some (n1, u1), Some (n2, u2) =>
85 | if Qeq_bool n2 0 then None else Some (n1 / n2, unit_div u1 u2)
86 | | _, _ => None
87 | end
88 | end.
89 |
90 | Fixpoint rawEval (e : Expr) : option Q :=
91 | match e with
92 | | Num n u => Some n
93 | | Add e1 e2 =>
94 | match rawEval e1, rawEval e2 with
95 | | Some n1, Some n2 => Some (n1 + n2)
96 | | _, _ => None
97 | end
98 | | Sub e1 e2 =>
99 | match rawEval e1, rawEval e2 with
100 | | Some n1, Some n2 => Some (n1 - n2)
101 | | _, _ => None
102 | end
103 | | Mul e1 e2 =>
104 | match rawEval e1, rawEval e2 with
105 | | Some n1, Some n2 => Some (n1 * n2)
106 | | _, _ => None
107 | end
108 | | Div e1 e2 =>
109 | match rawEval e1, rawEval e2 with
110 | | Some n1, Some n2 =>
111 | if Qeq_bool n2 0 then None else Some (n1 / n2)
112 | | _, _ => None
113 | end
114 | end.
115 |
116 | Definition DimensionMultiplier : Type := BaseUnit -> Q.
117 |
118 | Definition RawConversionFunction : Type :=
119 | option Q -> DimensionMultiplier -> option Q.
120 |
121 | Fixpoint ppower (q : Q) (n : positive) : Q :=
122 | match n with
123 | | xH => q
124 | | xO p => let r := ppower q p in Qmult r r
125 | | xI p => let r := ppower q p in Qmult q (Qmult r r)
126 | end.
127 |
128 | Definition power (q : Q) (n : Z) : Q :=
129 | match n with
130 | | Z0 => 1
131 | | Zpos p => ppower q p
132 | | Zneg p => Qinv (ppower q p)
133 | end.
134 |
135 | Definition multiplyUnit (q : Q) (u : Unit) (dm : DimensionMultiplier) : Q :=
136 | UnitSet.fold (fun unit_key exponent acc => Qmult acc (power (dm unit_key) exponent)) u 1 * q.
137 |
138 | Extract Constant UnitSet.fold =>"Data.Map.Strict.fold".
139 |
140 | Fixpoint multiplyExpression (e : Expr) (dm : DimensionMultiplier) : Expr :=
141 | match e with
142 | | Num q u => Num (multiplyUnit q u dm) u
143 | | Add e1 e2 => Add (multiplyExpression e1 dm) (multiplyExpression e2 dm)
144 | | Sub e1 e2 => Sub (multiplyExpression e1 dm) (multiplyExpression e2 dm)
145 | | Mul e1 e2 => Mul (multiplyExpression e1 dm) (multiplyExpression e2 dm)
146 | | Div e1 e2 => Div (multiplyExpression e1 dm) (multiplyExpression e2 dm)
147 | end.
148 |
149 | Definition someQEq (q1 q2 : option Q): Prop :=
150 | match q1, q2 with
151 | | Some a, Some b => Qeq a b
152 | | None, None => True
153 | | _, _ => False
154 | end.
155 |
156 | Definition RawConversionFunctionValid (g: RawConversionFunction) (e: Expr) := forall (dm: DimensionMultiplier), someQEq (rawEval (multiplyExpression e dm)) (g (rawEval e) dm).
157 |
158 | Record ConversionFunction (e: Expr) := {
159 | g : RawConversionFunction;
160 | valid : RawConversionFunctionValid g e
161 | }.
162 |
163 | Definition numConversionFunction (q : Q) (u : Unit) : ConversionFunction (Num q u):=
164 | let g := fun resultOption dim =>
165 | match resultOption with
166 | | Some result => Some (multiplyUnit q u dim)
167 | | None => None
168 | end
169 | in {|
170 | g:= g;
171 | valid:= (fun (d: DimensionMultiplier) => eq_refl) : RawConversionFunctionValid g (Num q u)
172 | |}.
173 |
174 | Definition addConversionFunctionRaw (q1 q2 : Q) (u : Unit) : RawConversionFunction :=
175 | let g := fun resultOption dim =>
176 | match resultOption with
177 | | Some result => Some (multiplyUnit (q1 + q2) u dim)
178 | | None => None
179 | end
180 | in g.
181 |
182 | Module UnitSetFacts := WProperties_fun(Nat_as_OT)(UnitSet).
183 |
184 | Lemma addConversionFunctionRawValid (q1 q2: Q) (u: Unit): RawConversionFunctionValid (addConversionFunctionRaw q1 q2 u) (Add (Num q1 u) (Num q2 u)).
185 | Proof.
186 | unfold RawConversionFunctionValid.
187 | simpl.
188 | unfold multiplyUnit.
189 | intros.
190 | field.
191 | Qed.
192 | Extraction Language Haskell.
193 |
194 | Extraction "Numerics.hs" multiplyExpression.
195 |
196 |
--------------------------------------------------------------------------------
/theories/Types.v:
--------------------------------------------------------------------------------
1 | Require Import Coq.Strings.String.
2 | Require Import ZArith.
3 | Require Import Coq.Sets.Ensembles.
4 | Require Import
5 | Coq.FSets.FMapList
6 | Coq.Structures.OrderedTypeEx.
7 | Require Import
8 | Coq.Structures.OrderedType.
9 |
10 |
11 | Inductive primDim : Set :=
12 | | litDim : string -> primDim
13 | | polyDim : string -> primDim.
14 |
15 | Module primDim_as_OT <: UsualOrderedType.
16 | Definition t := primDim.
17 | Definition eq := @eq primDim.
18 | Definition eq_sym := @eq_sym t.
19 | Definition eq_trans := @eq_trans t.
20 | Definition eq_refl := @eq_refl t.
21 |
22 | Definition lt (a : primDim) (b: primDim) :=
23 | match a, b with
24 | | litDim x, litDim y => String_as_OT.lt x y
25 | | litDim x, polyDim y => True
26 | | polyDim x, litDim y => False
27 | | polyDim x, polyDim y => String_as_OT.lt x y
28 | end.
29 |
30 | Lemma lt_trans : forall x y z : t, lt x y -> lt y z -> lt x z.
31 | Proof.
32 | intros x y z H1 H2.
33 | destruct x; destruct y; destruct z; trivial; apply (String_as_OT.lt_trans s s0 s1); auto ;contradiction.
34 | Qed.
35 |
36 | Lemma lt_not_eq : forall x y : t, lt x y -> ~ eq x y.
37 | Proof.
38 | intros x y islt eq.
39 | destruct x.
40 | - destruct y.
41 | + simpl in islt.
42 | unfold primDim_as_OT.eq in eq.
43 | inversion eq.
44 | apply (String_as_OT.lt_not_eq s s0 islt) ; assumption.
45 | + unfold primDim_as_OT.eq in eq.
46 | discriminate.
47 | - destruct y.
48 | + contradiction.
49 | + unfold primDim_as_OT.eq in eq.
50 | inversion eq.
51 | apply (String_as_OT.lt_not_eq s s0 islt); assumption.
52 | Qed.
53 |
54 | Definition compare (a b : primDim) : Compare lt eq a b :=
55 | match a, b with
56 | | litDim x, litDim y => String_as_OT.compare x y
57 | | litDim x, polyDim y => Lt
58 | | polyDim x, litDim y => Gt
59 | | polyDim x, polyDim y => String_as_OT.compare x y
60 | end.
61 | Definition eq_dec := eq_nat_dec.
62 |
63 |
64 |
65 | End primDim_as_OT.
66 |
67 | Module Import dimMap := FMapList.Make(primDim_as_OT).
68 |
69 | Inductive dimension : Set :=
70 | | normDim : list (primDim * Z) -> dimension
71 | | powDim : list (primDim * Z) -> dimension.
72 |
73 |
74 | Inductive type : Set :=
75 | | baseType : dimension -> type
76 | | listType : type -> type
77 | | dictType : (string -> type) -> type
78 | | polyDictType : (string -> type) -> type
79 | | funcType : type -> type -> type.
80 |
81 |
82 | Record substitution : Set := mkSubstitution
83 | { subDimension : list (string * dimension)
84 | ; subTypes : list (string * type)
85 | }.
86 |
87 | Class Types A : Type :=
88 | { ftv : A -> Ensemble string;
89 | app : substitution -> A -> A
90 | }.
91 |
92 | Search ((?a -> bool) -> list ?a -> option ?a).
93 |
94 | Open Scope string_scope.
95 |
96 | Search ((?a -> ?b) -> option ?a -> option ?b).
97 | Check eqb.
98 |
99 | Definition lookup {A : Type} (name : string) (substitution : list (string * A)) : option A :=
100 | option_map snd (List.find (fun x => fst x =? name) substitution).
101 |
102 | Print subDimension.
103 |
104 | Definition lookupFreeVariables (dimMap : list (primDim * Z)) : Ensemble Z :=
105 |
106 |
107 | Instance typesDimension : Types dimension :=
108 | { ftv :=
109 | fun a:dimension =>
110 | match a with
111 | | litDim _ => Empty_set _
112 | | polyDim v => Singleton _ v
113 | end
114 | , app :=
115 | fun (sub:substitution) (a:primDim) =>
116 | match a with
117 | | litDim b => litDim b
118 | | polyDim v =>
119 | match lookup v (subDimension substitution) with
120 | Some sub
121 |
122 |
123 | }.
124 |
--------------------------------------------------------------------------------