├── src ├── Compiler.Tests │ ├── paket.references │ ├── Program.fs │ └── Compiler.Tests.fsproj ├── Compiler │ ├── paket.references │ ├── Program.fs │ └── Compiler.fsproj ├── POC.ASTExtraction │ ├── paket.references │ ├── ReQuestsal_1_1.fs │ ├── ASTExtraction.fsproj │ └── Program.fs ├── POC.ParsingLexing │ ├── paket.references │ ├── README.md │ ├── Error.fs │ ├── Monads.fs │ ├── Program.fs │ ├── Lexer.fs │ └── Parser.fs └── POC.TypeInference │ └── Ast3.fs ├── docs ├── _config.yml ├── GraphFlow.png ├── CompilerPhases.md ├── proofOfConceptSyntax.md └── syntax.md ├── .paket ├── paket.exe ├── paket.targets └── Paket.Restore.targets ├── paket.dependencies ├── README.md ├── ReQuetzal.sln └── .gitignore /src/Compiler.Tests/paket.references: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-cayman -------------------------------------------------------------------------------- /src/Compiler/paket.references: -------------------------------------------------------------------------------- 1 | FSharp.Compiler.Service -------------------------------------------------------------------------------- /src/POC.ASTExtraction/paket.references: -------------------------------------------------------------------------------- 1 | FSharp.Compiler.Service -------------------------------------------------------------------------------- /src/POC.ParsingLexing/paket.references: -------------------------------------------------------------------------------- 1 | FSharp.Compiler.Service -------------------------------------------------------------------------------- /.paket/paket.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/7sharp9/ReQuetzal/HEAD/.paket/paket.exe -------------------------------------------------------------------------------- /docs/GraphFlow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/7sharp9/ReQuetzal/HEAD/docs/GraphFlow.png -------------------------------------------------------------------------------- /src/POC.ASTExtraction/ReQuestsal_1_1.fs: -------------------------------------------------------------------------------- 1 | 2 | let x = 42 3 | let y = x + 43 4 | let z = x + y -------------------------------------------------------------------------------- /paket.dependencies: -------------------------------------------------------------------------------- 1 | source https://www.nuget.org/api/v2 2 | nuget ExtCore 3 | nuget FSharp.Compiler.Service 4 | nuget FSharp.Core 4.3.3 -------------------------------------------------------------------------------- /src/Compiler/Program.fs: -------------------------------------------------------------------------------- 1 | namespace Compiler 2 | 3 | 4 | module AST = 5 | 6 | 7 | [] 8 | let main argv = 9 | 10 | 0 // return an integer exit code 11 | -------------------------------------------------------------------------------- /src/Compiler.Tests/Program.fs: -------------------------------------------------------------------------------- 1 | // Learn more about F# at http://fsharp.org 2 | 3 | open System 4 | 5 | [] 6 | let main argv = 7 | printfn "Hello World from F#!" 8 | 0 // return an integer exit code 9 | -------------------------------------------------------------------------------- /src/Compiler/Compiler.fsproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | Exe 4 | netcoreapp2.0 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ReQuetzal 2 | 3 | ReQuetzal is an experimental ML style language with row polymorphism, and first class modules. It is currently in early development. 4 | 5 | [![Join the chat at https://gitter.im/7sharp9/ReQuetzal](https://badges.gitter.im/7sharp9/ReQuetzal.svg)](https://gitter.im/7sharp9/ReQuetzal?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) 6 | -------------------------------------------------------------------------------- /src/Compiler.Tests/Compiler.Tests.fsproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | netcoreapp2.0 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /src/POC.ASTExtraction/ASTExtraction.fsproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | netcoreapp2.0 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /src/POC.ParsingLexing/README.md: -------------------------------------------------------------------------------- 1 | ### Parser + Lexer work 2 | 3 | This folder's purpose is to keep track of the work that has been in regards to the hand-written parser. 4 | We have implemented the followings : 5 | 6 | - `ReadWriteStateResult` Monad that handles all these cases (maybe we do not need the Read and write!!) + handles Failures as part of the Try with => we shall use a DU to represent Failures. 7 | - We lex using AP, and generate Indent and Dedent tokens 8 | - We parse the token stream by type-instanciating the `RWSResult` monad. 9 | -------------------------------------------------------------------------------- /docs/CompilerPhases.md: -------------------------------------------------------------------------------- 1 | # Compiler phases 2 | 3 | The compiler will be composed of 6 major phases in the pipeline of work: 4 | 5 | 1. Lexing 6 | 2. Parsing 7 | 3. AST Generation 8 | 4. Semantic Analyses and Meta-Application 9 | 5. QIR Translation and Optimization 10 | 6. Code Generation 11 | 12 | Each phase yields a structure with extended informations related to the step we are in. 13 | 14 | The structure is in fact decomposed in ***2 pipelines***. 15 | 16 | The **main** pipeline which will apply the 6 different phases in a sequential like fashion. 17 | 18 | We also have a **secondary** pipeline, called the ***Information pipeline*** used for gathering information regarding the phases, without having to keep them during the whole main process (which might allow better performance and better support for future tooling). 19 | 20 | ![GraphFlow.png Not Found](GraphFlow.png "Compiler Flow") 21 | 22 | ## Lexing 23 | 24 | ## Parsing 25 | 26 | ## AST Generation 27 | 28 | ## Semantic Analyses and Meta-Application 29 | 30 | ## QIR Translation and Optimization 31 | 32 | ## Code Generation 33 | -------------------------------------------------------------------------------- /ReQuetzal.sln: -------------------------------------------------------------------------------- 1 | 2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio 15 4 | VisualStudioVersion = 15.0.26124.0 5 | MinimumVisualStudioVersion = 15.0.26124.0 6 | Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{047DDF29-1049-47C5-B947-CF836E09F346}" 7 | EndProject 8 | Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "Compiler", "src\Compiler\Compiler.fsproj", "{6F8C48C6-BE53-4DB5-B474-1A62AF7858C0}" 9 | EndProject 10 | Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "Compiler.Tests", "src\Compiler.Tests\Compiler.Tests.fsproj", "{39B79B66-695B-4EB0-888B-10D24CE5B3DC}" 11 | EndProject 12 | Global 13 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 14 | Debug|Any CPU = Debug|Any CPU 15 | Debug|x64 = Debug|x64 16 | Debug|x86 = Debug|x86 17 | Release|Any CPU = Release|Any CPU 18 | Release|x64 = Release|x64 19 | Release|x86 = Release|x86 20 | EndGlobalSection 21 | GlobalSection(SolutionProperties) = preSolution 22 | HideSolutionNode = FALSE 23 | EndGlobalSection 24 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 25 | {6F8C48C6-BE53-4DB5-B474-1A62AF7858C0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 26 | {6F8C48C6-BE53-4DB5-B474-1A62AF7858C0}.Debug|Any CPU.Build.0 = Debug|Any CPU 27 | {6F8C48C6-BE53-4DB5-B474-1A62AF7858C0}.Debug|x64.ActiveCfg = Debug|x64 28 | {6F8C48C6-BE53-4DB5-B474-1A62AF7858C0}.Debug|x64.Build.0 = Debug|x64 29 | {6F8C48C6-BE53-4DB5-B474-1A62AF7858C0}.Debug|x86.ActiveCfg = Debug|x86 30 | {6F8C48C6-BE53-4DB5-B474-1A62AF7858C0}.Debug|x86.Build.0 = Debug|x86 31 | {6F8C48C6-BE53-4DB5-B474-1A62AF7858C0}.Release|Any CPU.ActiveCfg = Release|Any CPU 32 | {6F8C48C6-BE53-4DB5-B474-1A62AF7858C0}.Release|Any CPU.Build.0 = Release|Any CPU 33 | {6F8C48C6-BE53-4DB5-B474-1A62AF7858C0}.Release|x64.ActiveCfg = Release|x64 34 | {6F8C48C6-BE53-4DB5-B474-1A62AF7858C0}.Release|x64.Build.0 = Release|x64 35 | {6F8C48C6-BE53-4DB5-B474-1A62AF7858C0}.Release|x86.ActiveCfg = Release|x86 36 | {6F8C48C6-BE53-4DB5-B474-1A62AF7858C0}.Release|x86.Build.0 = Release|x86 37 | {39B79B66-695B-4EB0-888B-10D24CE5B3DC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 38 | {39B79B66-695B-4EB0-888B-10D24CE5B3DC}.Debug|Any CPU.Build.0 = Debug|Any CPU 39 | {39B79B66-695B-4EB0-888B-10D24CE5B3DC}.Debug|x64.ActiveCfg = Debug|x64 40 | {39B79B66-695B-4EB0-888B-10D24CE5B3DC}.Debug|x64.Build.0 = Debug|x64 41 | {39B79B66-695B-4EB0-888B-10D24CE5B3DC}.Debug|x86.ActiveCfg = Debug|x86 42 | {39B79B66-695B-4EB0-888B-10D24CE5B3DC}.Debug|x86.Build.0 = Debug|x86 43 | {39B79B66-695B-4EB0-888B-10D24CE5B3DC}.Release|Any CPU.ActiveCfg = Release|Any CPU 44 | {39B79B66-695B-4EB0-888B-10D24CE5B3DC}.Release|Any CPU.Build.0 = Release|Any CPU 45 | {39B79B66-695B-4EB0-888B-10D24CE5B3DC}.Release|x64.ActiveCfg = Release|x64 46 | {39B79B66-695B-4EB0-888B-10D24CE5B3DC}.Release|x64.Build.0 = Release|x64 47 | {39B79B66-695B-4EB0-888B-10D24CE5B3DC}.Release|x86.ActiveCfg = Release|x86 48 | {39B79B66-695B-4EB0-888B-10D24CE5B3DC}.Release|x86.Build.0 = Release|x86 49 | EndGlobalSection 50 | GlobalSection(NestedProjects) = preSolution 51 | {6F8C48C6-BE53-4DB5-B474-1A62AF7858C0} = {047DDF29-1049-47C5-B947-CF836E09F346} 52 | {39B79B66-695B-4EB0-888B-10D24CE5B3DC} = {047DDF29-1049-47C5-B947-CF836E09F346} 53 | EndGlobalSection 54 | EndGlobal 55 | -------------------------------------------------------------------------------- /.paket/paket.targets: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | true 7 | $(MSBuildThisFileDirectory) 8 | $(MSBuildThisFileDirectory)..\ 9 | $(PaketRootPath)paket.lock 10 | $(PaketRootPath)paket-files\paket.restore.cached 11 | /Library/Frameworks/Mono.framework/Commands/mono 12 | mono 13 | 14 | 15 | 16 | 17 | $(PaketRootPath)paket.exe 18 | $(PaketToolsPath)paket.exe 19 | "$(PaketExePath)" 20 | $(MonoPath) --runtime=v4.0.30319 "$(PaketExePath)" 21 | 22 | 23 | 24 | 25 | 26 | $(MSBuildProjectFullPath).paket.references 27 | 28 | 29 | 30 | 31 | $(MSBuildProjectDirectory)\$(MSBuildProjectName).paket.references 32 | 33 | 34 | 35 | 36 | $(MSBuildProjectDirectory)\paket.references 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | $(PaketCommand) restore --references-file "$(PaketReferences)" 49 | 50 | RestorePackages; $(BuildDependsOn); 51 | 52 | 53 | 54 | true 55 | 56 | 57 | 58 | $([System.IO.File]::ReadAllText('$(PaketRestoreCacheFile)')) 59 | $([System.IO.File]::ReadAllText('$(PaketLockFilePath)')) 60 | true 61 | false 62 | true 63 | 64 | 65 | 71 | 72 | 73 | -------------------------------------------------------------------------------- /src/POC.ParsingLexing/Error.fs: -------------------------------------------------------------------------------- 1 | namespace Compiler 2 | 3 | module AssemblyInfo = 4 | open System.Runtime.CompilerServices 5 | 6 | [] 7 | do () 8 | 9 | module Error = 10 | open Microsoft.FSharp.Quotations 11 | open Microsoft.FSharp.Reflection 12 | open Patterns 13 | 14 | /// Not used for the moment 15 | type ErrorPriority = 16 | | FatalError 17 | | ErrorRecoverable 18 | | Warning 19 | 20 | type IFailure = 21 | abstract member Description : string 22 | abstract member Priority : ErrorPriority 23 | 24 | 25 | module Helper = 26 | let inline private extractUnionCaseInfo (expr : Expr<'T>) = 27 | let rec aux (expr : Expr) = 28 | match expr with 29 | | NewUnionCase (uci,_) -> uci 30 | | Lambda (_,expr) -> aux expr 31 | | Let (_,_,expr) -> aux expr 32 | | _ -> 33 | failwithf "Unexpected expression : Expected a union expression but received : { %A }" expr 34 | 35 | aux expr 36 | 37 | 38 | 39 | let inline (|IsFailureArgs|_|) (expr : Expr< ^args -> ^T>) (failure : ^Failure when ^Failure :> IFailure) = 40 | // Check if the failure type is a Union 41 | if FSharpType.IsUnion (failure.GetType()) then 42 | let expectedType = typeof< ^T> 43 | printfn "Test : { %A }" (typeof< ^Failure>,FSharpType.IsUnion (typeof< ^Failure>)) 44 | let failureType = FSharpType.GetUnionCases(failure.GetType(),true).[0].DeclaringType 45 | // Compare expr type and failure type : should be equal 46 | if expectedType = failureType then 47 | // extract Union Case Info, if the expr is not a union, this will throw (bad implementation on the 48 | // the user side) 49 | let expectedUCI = extractUnionCaseInfo expr 50 | let (failureUCI,args) = FSharpValue.GetUnionFields(failure,typeof< ^T>) 51 | // both Union Case Info should be the same 52 | if expectedUCI = failureUCI then 53 | // return the args 54 | let args = 55 | match args.Length with 56 | | 0 -> box () 57 | | 1 -> args.[0] 58 | | _ -> FSharpValue.MakeTuple(args,typeof< ^args>) 59 | // This is safe to do, as we know the type of expr : Expr< ^args -> ^T> 60 | Some (args :?> ^args) 61 | else 62 | None 63 | else 64 | None 65 | else 66 | None 67 | 68 | let inline (|IsFailureNoArgs|_|) (expr : Expr< ^T>) (failure : ^Failure when ^Failure :> IFailure) = 69 | (|IsFailureArgs|_|) <@ fun () -> %expr @> failure 70 | 71 | 72 | 73 | 74 | 75 | // module test = 76 | // open Error 77 | // open Helper 78 | 79 | // type Test = 80 | // | Testing of int * float 81 | // | Toz 82 | // interface IFailure with 83 | // member this.Description = 84 | // match this with 85 | // | Toz -> "Toz" 86 | // | Testing (tokenType , token) -> 87 | // (sprintf "Testing => { %A } <=> { %A }" tokenType token) 88 | 89 | // member __.Priority = ErrorPriority.FatalError 90 | 91 | // let failure = Testing (5,2.0) 92 | // let e = 93 | // match failure with 94 | // | IsFailureArgs <@ Testing @> args -> printfn "%A" (failure,args) 95 | // | IsFailureNoArgs <@ Toz @> -> printfn "%A" failure 96 | // | _ -> failwith "failed" 97 | 98 | // // let e = extractUnionCaseInfo <@ Testing (5,2.0) @> 99 | 100 | // // let v = extractUnionCaseInfo <@ Toz @> 101 | -------------------------------------------------------------------------------- /docs/proofOfConceptSyntax.md: -------------------------------------------------------------------------------- 1 | # Scope 2 | 3 | In order to limit the scope and complexity at the beginning of the project we will be using a limited version of the language in order to get various parts of the compiler operational. The following is a list of syntactical scope of the language followed by parse tree and AST samples from similar languages such as F#, we will use this as a vague reference point during the design. 4 | 5 | ## Primitive types 6 | 7 | We will be limiting scope to `int`, `bool`, `string` and `float` to focus on getting simple type inference working before introducing variants like `unint32`, `double` etc. 8 | 9 | ## Bindings 10 | Bindings will be available for `let` and mutable `var` bindings 11 | 12 | ## Functions 13 | Functions will be available in both fully named and anonymous variants, I considered limiting the scope to only named functions but short function application will be a core part of the language and should be included. 14 | 15 | ## Modules 16 | Initial modules will be out of scope and binding, types and function will all be implicitly in a single global module. We will explore module definition importing and exporting in a later phase. 17 | 18 | ## Records 19 | Records comprising of primitive type can be defined but row polymorphic features will be beyond the initial scope. 20 | 21 | ## Unions 22 | Simple unions with primitive components will be considered the only valid syntax initially 23 | 24 | ## Pattern matching 25 | While pattern matching is an important part of the language we will not be exploring it in any detail in the first phase. 26 | 27 | ## Advances language concepts 28 | 29 | For version 0.1 of the language we will not be approaching any of the advanced parts of the language like constraints, active matches, and notation extension. 30 | 31 | # Syntax examples 32 | 33 | ## 1: Simple let and var bindings 34 | 35 | ### 1.1 simple immutable bindings 36 | ```fsharp 37 | let x = 42 38 | let y = x + 43 39 | let z = x + y 40 | ``` 41 | ### 1.2 simple mutable bindings 42 | ``` 43 | var a = 42 44 | var b = a + 43 45 | var c = a + b 46 | 47 | a <- b + c 48 | ``` 49 | 50 | ### parse tree 51 | ### AST 52 | 53 | ## Functions 54 | ### 2.1 simple named function 55 | ```elm 56 | id: 'a -> 'a 57 | id a = a 58 | ``` 59 | 60 | ```elm 61 | add: int -> int -> int 62 | add a b = 63 | a + b 64 | ``` 65 | ### 2.2 simple partially applied function 66 | ```elm 67 | addTwo: int -> int 68 | addTwo = 69 | add 2 70 | ``` 71 | 72 | ### parse tree 73 | ### AST 74 | 75 | ## 3: Records 76 | ### 3.1 simple record definition and instantiation 77 | 78 | ```fsharp 79 | type Point = {x: int, y: int} 80 | let myPoint = { x = 42, y = 172 } 81 | ``` 82 | 83 | ### 3.2 records with mixed types 84 | ```fsharp 85 | type Person { 86 | name: string 87 | age: int 88 | height: float 89 | } 90 | let person1 = {name="Fred", age=28, height=5.8} 91 | ``` 92 | 93 | ### 3.3 updating records 94 | ```fsharp 95 | let anotherPoint1 = { myPoint with y = 53 } 96 | //or syntax 2 97 | let anotherPoint2 = { myPoint | y = 54 } 98 | //or syntax 3 99 | let anotherPoint2 = { myPoint: y = 55 } 100 | ``` 101 | 102 | ### 3.4 anonymous records 103 | 104 | ```elm 105 | let anonymousPoint = { x = 42, y = 172 } 106 | ``` 107 | 108 | ### parse tree 109 | ### AST 110 | 111 | ## 4: Unions 112 | 113 | ```fsharp 114 | //syntax1 F# style 115 | type Shape = 116 | | Rectangle of width: float * length: float 117 | | Circle of radius: float 118 | | Prism of width: float * float * height: float 119 | let rect = Rectangle(length = 1.3, width = 10.0) 120 | let circ = Circle (1.0) 121 | let prism = Prism(5.0, 2.0, height = 3.0) 122 | 123 | //syntax2 nameless partials 124 | type Shape = 125 | | Rectangle of float float 126 | | Circle of float 127 | | Prism of float float float 128 | let rect = Rectangle 1.3 10.0 129 | let circ = Circle 1.0 130 | let prism = Prism 5.0 2.0 3.0 131 | 132 | //syntax3 named partials 133 | type Shape = 134 | | Rectangle of (width: float) (length: float) 135 | | Circle of radius: float 136 | | Prism of (width: float) float (height: float) 137 | let rect = Rectangle (length = 1.3) (width = 10.0) 138 | let circ = Circle (1.0) 139 | let prism = Prism 5.0 2.0 (height = 3.0) 140 | ``` 141 | 142 | We can use vary the syntax by not using of, perhaps using `:` instead. 143 | ```fsharp 144 | //syntax 4 145 | type Shape = 146 | | Rectangle: width: float * length: float 147 | | Circle: radius: float 148 | | Prism: width: float * float * height: float 149 | ``` 150 | We could use use `,` to separate parameters and enclose in parentheses rather than using `of` as a prefix: 151 | ```swift 152 | //syntax 5 153 | type Shape = 154 | | Rectangle(width: float, length: float) 155 | | Circle(radius: float) 156 | | Prism(width: float * float * height: float) 157 | ``` 158 | 159 | 160 | ### parse tree 161 | ### AST 162 | -------------------------------------------------------------------------------- /src/POC.ASTExtraction/Program.fs: -------------------------------------------------------------------------------- 1 | // Learn more about F# at http://fsharp.org 2 | 3 | open System 4 | open System.IO 5 | open Microsoft.FSharp.Compiler.SourceCodeServices 6 | open Microsoft.FSharp.Compiler.Ast 7 | open System.Runtime.InteropServices.WindowsRuntime 8 | open System.Xml.Linq 9 | 10 | let checker = FSharpChecker.Create(keepAssemblyContents=true, ImplicitlyStartBackgroundWork=false) 11 | 12 | let fileName = "/Users/davethomas/Documents/GitHub/ReQuetzal/src/POC.ASTExtraction/ReQuestsal_1_1.fs" 13 | let projectFilename = "ReQuetsal.fsproj" 14 | 15 | let assembly = Reflection.Assembly.GetCallingAssembly() 16 | let referencedAssemblies = assembly.GetReferencedAssemblies() 17 | let dependencies = 18 | [ yield sprintf "-r:%s" "/usr/local/share/dotnet/shared/Microsoft.NETCore.App/2.0.0/mscorlib.dll" 19 | yield sprintf "-r:%s" "/usr/local/share/dotnet/shared/Microsoft.NETCore.App/2.0.0/netstandard.dll" 20 | yield sprintf "-r:%s" "/usr/local/share/dotnet/shared/Microsoft.NETCore.App/2.0.0/System.Private.CoreLib.dll" 21 | for a in referencedAssemblies do 22 | let aa = System.Reflection.Assembly.Load(a.FullName) 23 | yield sprintf "-r:%s" aa.Location ] 24 | 25 | let arguments = 26 | [| 27 | //yield "--simpleresolution" 28 | yield "--out:" + System.IO.Path.ChangeExtension(fileName, ".exe") 29 | yield "--platform:anycpu" 30 | yield "--fullpaths" 31 | yield "--flaterrors" 32 | yield "--target:exe" 33 | yield "--noframework" 34 | //yield sprintf "-r:%s" assembly.FullName 35 | yield "--targetprofile:netstandard" 36 | yield! dependencies 37 | yield fileName |] 38 | 39 | let projectOptions = 40 | 41 | checker.GetProjectOptionsFromCommandLineArgs(projectFilename, arguments) 42 | 43 | let input = File.ReadAllText("/Users/davethomas/Documents/GitHub/ReQuetzal/src/POC.ASTExtraction/ReQuestsal_1_1.fs") 44 | 45 | let projOptions = checker.GetProjectOptionsFromScript(fileName, input) |> Async.RunSynchronously 46 | 47 | let parsingOptions, _errors = checker.GetParsingOptionsFromProjectOptions(projectOptions) 48 | 49 | // Run the first phase (untyped parsing) of the compiler 50 | let parseFileResults = checker.ParseFile(fileName, input, parsingOptions) |> Async.RunSynchronously 51 | 52 | let parseTree = 53 | match parseFileResults.ParseTree with 54 | | Some tree -> tree 55 | | None -> failwith "Something went wrong during parsing. No parseTree!" 56 | 57 | let rec visitPattern (node: SynPat) = 58 | printf "Node: %s " (node.GetType().Name) 59 | match node with 60 | | SynPat.Wild(_range) -> 61 | printfn "underscore pattern" 62 | | SynPat.Named(pat, name, _, _, _range) -> 63 | visitPattern pat 64 | printfn "named: '%s'" name.idText 65 | | SynPat.LongIdent(LongIdentWithDots(ident, _), _, _, _, _, _) -> 66 | let names = String.concat "." [ for i in ident -> i.idText ] 67 | printfn "identifier: %s" names 68 | | pat -> 69 | printfn "other pattern: %A" pat 70 | 71 | let rec visitExpression (node: SynExpr) = 72 | printf "Node: %s " (node.GetType().Name) 73 | match node with 74 | | SynExpr.IfThenElse(cond, trueBranch, falseBranchOpt, _, _, _, _) -> 75 | // Visit all sub-expressions 76 | printfn "Conditional:" 77 | visitExpression cond 78 | visitExpression trueBranch 79 | falseBranchOpt |> Option.iter visitExpression 80 | 81 | | SynExpr.LetOrUse(_, _, bindings, body, _) -> 82 | // Visit bindings (there may be multiple 83 | // for 'let .. = .. and .. = .. in ...' 84 | printfn "LetOrUse with the following bindings:" 85 | for binding in bindings do 86 | let (Binding(access, kind, inlin, mutabl, attrs, xmlDoc, data, pat, retInfo, init, m, sp)) = binding 87 | visitPattern pat 88 | visitExpression init 89 | // Visit the body expression 90 | printfn "And the following body:" 91 | visitExpression body 92 | | SynExpr.Const(cst, _range) -> 93 | printfn "Const: %A" cst 94 | | SynExpr.App(exprAtomicFlag, isInfix, funExpr, argExpr, _range) -> 95 | (visitExpression funExpr) 96 | (visitExpression argExpr) 97 | 98 | | SynExpr.Ident(ident) -> 99 | printfn "Ident: %A" ident 100 | | expr -> printfn "visitExpression - not supported expression: %A" expr 101 | 102 | let visitDeclarations decls = 103 | for declaration in decls do 104 | printf "Node: %s " (declaration.GetType().Name) 105 | match declaration with 106 | | SynModuleDecl.Let(isRec, bindings, range) -> 107 | // Let binding as a declaration is similar to let binding 108 | // as an expression (in visitExpression), but has no body 109 | for binding in bindings do 110 | let (Binding(access, kind, inlin, mutabl, attrs, xmlDoc, data, pat, retInfo, body, m, sp)) = binding 111 | visitPattern pat 112 | visitExpression body 113 | | _ -> printfn "visitDeclarations - not supported declaration: %A" declaration 114 | 115 | let visitModulesAndNamespaces modulesOrNss = 116 | for moduleOrNs in modulesOrNss do 117 | let (SynModuleOrNamespace(lid, isRec, isMod, decls, xml, attrs, _, m)) = moduleOrNs 118 | if isMod then printfn "module: %A" lid 119 | else printfn "Namespace: %A" lid 120 | visitDeclarations decls 121 | 122 | let niceParseTree = 123 | match parseTree with 124 | | ParsedInput.ImplFile(ParsedImplFileInput(filename, isScript, qualifierFilename, scopedPragmas, hashDirectives, modules, (isLastCompiland, isExe))) -> 125 | visitModulesAndNamespaces modules 126 | | ParsedInput.SigFile _ -> failwith "F# Interface file not supported." 127 | 128 | let typedAst = 129 | checker.ParseAndCheckProject(projectOptions) |> Async.RunSynchronously 130 | 131 | printfn "Errors: %A" typedAst.Errors 132 | 133 | [] 134 | let main argv = 135 | printfn "\n%A" parseTree 136 | printfn "\n%A" typedAst.AssemblyContents.ImplementationFiles.Head.Declarations 137 | 0 // return an integer exit code 138 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | ## 4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore 5 | 6 | # User-specific files 7 | *.suo 8 | *.user 9 | *.userosscache 10 | *.sln.docstates 11 | 12 | # User-specific files (MonoDevelop/Xamarin Studio) 13 | *.userprefs 14 | 15 | # Build results 16 | [Dd]ebug/ 17 | [Dd]ebugPublic/ 18 | [Rr]elease/ 19 | [Rr]eleases/ 20 | x64/ 21 | x86/ 22 | bld/ 23 | [Bb]in/ 24 | [Oo]bj/ 25 | [Ll]og/ 26 | 27 | # Visual Studio 2015 cache/options directory 28 | .vs/ 29 | # Uncomment if you have tasks that create the project's static files in wwwroot 30 | #wwwroot/ 31 | 32 | # MSTest test Results 33 | [Tt]est[Rr]esult*/ 34 | [Bb]uild[Ll]og.* 35 | 36 | # NUNIT 37 | *.VisualState.xml 38 | TestResult.xml 39 | 40 | # Build Results of an ATL Project 41 | [Dd]ebugPS/ 42 | [Rr]eleasePS/ 43 | dlldata.c 44 | 45 | # .NET Core 46 | project.lock.json 47 | project.fragment.lock.json 48 | artifacts/ 49 | **/Properties/launchSettings.json 50 | 51 | *_i.c 52 | *_p.c 53 | *_i.h 54 | *.ilk 55 | *.meta 56 | *.obj 57 | *.pch 58 | *.pdb 59 | *.pgc 60 | *.pgd 61 | *.rsp 62 | *.sbr 63 | *.tlb 64 | *.tli 65 | *.tlh 66 | *.tmp 67 | *.tmp_proj 68 | *.log 69 | *.vspscc 70 | *.vssscc 71 | .builds 72 | *.pidb 73 | *.svclog 74 | *.scc 75 | 76 | # Chutzpah Test files 77 | _Chutzpah* 78 | 79 | # Visual C++ cache files 80 | ipch/ 81 | *.aps 82 | *.ncb 83 | *.opendb 84 | *.opensdf 85 | *.sdf 86 | *.cachefile 87 | *.VC.db 88 | *.VC.VC.opendb 89 | 90 | # Visual Studio profiler 91 | *.psess 92 | *.vsp 93 | *.vspx 94 | *.sap 95 | 96 | # TFS 2012 Local Workspace 97 | $tf/ 98 | 99 | # Guidance Automation Toolkit 100 | *.gpState 101 | 102 | # ReSharper is a .NET coding add-in 103 | _ReSharper*/ 104 | *.[Rr]e[Ss]harper 105 | *.DotSettings.user 106 | 107 | # JustCode is a .NET coding add-in 108 | .JustCode 109 | 110 | # TeamCity is a build add-in 111 | _TeamCity* 112 | 113 | # DotCover is a Code Coverage Tool 114 | *.dotCover 115 | 116 | # Visual Studio code coverage results 117 | *.coverage 118 | *.coveragexml 119 | 120 | # NCrunch 121 | _NCrunch_* 122 | .*crunch*.local.xml 123 | nCrunchTemp_* 124 | 125 | # MightyMoose 126 | *.mm.* 127 | AutoTest.Net/ 128 | 129 | # Web workbench (sass) 130 | .sass-cache/ 131 | 132 | # Installshield output folder 133 | [Ee]xpress/ 134 | 135 | # DocProject is a documentation generator add-in 136 | DocProject/buildhelp/ 137 | DocProject/Help/*.HxT 138 | DocProject/Help/*.HxC 139 | DocProject/Help/*.hhc 140 | DocProject/Help/*.hhk 141 | DocProject/Help/*.hhp 142 | DocProject/Help/Html2 143 | DocProject/Help/html 144 | 145 | # Click-Once directory 146 | publish/ 147 | 148 | # Publish Web Output 149 | *.[Pp]ublish.xml 150 | *.azurePubxml 151 | # TODO: Comment the next line if you want to checkin your web deploy settings 152 | # but database connection strings (with potential passwords) will be unencrypted 153 | *.pubxml 154 | *.publishproj 155 | 156 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 157 | # checkin your Azure Web App publish settings, but sensitive information contained 158 | # in these scripts will be unencrypted 159 | PublishScripts/ 160 | 161 | # NuGet Packages 162 | *.nupkg 163 | # The packages folder can be ignored because of Package Restore 164 | **/packages/* 165 | # except build/, which is used as an MSBuild target. 166 | !**/packages/build/ 167 | # Uncomment if necessary however generally it will be regenerated when needed 168 | #!**/packages/repositories.config 169 | # NuGet v3's project.json files produces more ignorable files 170 | *.nuget.props 171 | *.nuget.targets 172 | 173 | # Microsoft Azure Build Output 174 | csx/ 175 | *.build.csdef 176 | 177 | # Microsoft Azure Emulator 178 | ecf/ 179 | rcf/ 180 | 181 | # Windows Store app package directories and files 182 | AppPackages/ 183 | BundleArtifacts/ 184 | Package.StoreAssociation.xml 185 | _pkginfo.txt 186 | 187 | # Visual Studio cache files 188 | # files ending in .cache can be ignored 189 | *.[Cc]ache 190 | # but keep track of directories ending in .cache 191 | !*.[Cc]ache/ 192 | 193 | # Others 194 | ClientBin/ 195 | ~$* 196 | *~ 197 | *.dbmdl 198 | *.dbproj.schemaview 199 | *.jfm 200 | *.pfx 201 | *.publishsettings 202 | orleans.codegen.cs 203 | 204 | # Since there are multiple workflows, uncomment next line to ignore bower_components 205 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 206 | #bower_components/ 207 | 208 | # RIA/Silverlight projects 209 | Generated_Code/ 210 | 211 | # Backup & report files from converting an old project file 212 | # to a newer Visual Studio version. Backup files are not needed, 213 | # because we have git ;-) 214 | _UpgradeReport_Files/ 215 | Backup*/ 216 | UpgradeLog*.XML 217 | UpgradeLog*.htm 218 | 219 | # SQL Server files 220 | *.mdf 221 | *.ldf 222 | *.ndf 223 | 224 | # Business Intelligence projects 225 | *.rdl.data 226 | *.bim.layout 227 | *.bim_*.settings 228 | 229 | # Microsoft Fakes 230 | FakesAssemblies/ 231 | 232 | # GhostDoc plugin setting file 233 | *.GhostDoc.xml 234 | 235 | # Node.js Tools for Visual Studio 236 | .ntvs_analysis.dat 237 | node_modules/ 238 | 239 | # Typescript v1 declaration files 240 | typings/ 241 | 242 | # Visual Studio 6 build log 243 | *.plg 244 | 245 | # Visual Studio 6 workspace options file 246 | *.opt 247 | 248 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) 249 | *.vbw 250 | 251 | # Visual Studio LightSwitch build output 252 | **/*.HTMLClient/GeneratedArtifacts 253 | **/*.DesktopClient/GeneratedArtifacts 254 | **/*.DesktopClient/ModelManifest.xml 255 | **/*.Server/GeneratedArtifacts 256 | **/*.Server/ModelManifest.xml 257 | _Pvt_Extensions 258 | 259 | # Paket dependency manager 260 | .paket/paket.exe 261 | paket-files/ 262 | 263 | # FAKE - F# Make 264 | .fake/ 265 | 266 | # JetBrains Rider 267 | .idea/ 268 | *.sln.iml 269 | 270 | # CodeRush 271 | .cr/ 272 | 273 | # Python Tools for Visual Studio (PTVS) 274 | __pycache__/ 275 | *.pyc 276 | 277 | # Cake - Uncomment if you are using it 278 | # tools/** 279 | # !tools/packages.config 280 | 281 | # Telerik's JustMock configuration file 282 | *.jmconfig 283 | 284 | # BizTalk build output 285 | *.btp.cs 286 | *.btm.cs 287 | *.odx.cs 288 | *.xsd.cs 289 | .DS_Store 290 | -------------------------------------------------------------------------------- /src/POC.ParsingLexing/Monads.fs: -------------------------------------------------------------------------------- 1 | namespace Compiler 2 | 3 | 4 | [] 5 | module Monoid = 6 | 7 | 8 | type IMonoid<'M> = 9 | abstract member Zero : (unit -> 'M) 10 | abstract member Combine : ('M * 'M -> 'M) 11 | 12 | let list = 13 | { new IMonoid> with 14 | member __.Zero = fun () -> [] 15 | member __.Combine = fun (l1,l2) -> l1 @ l2 16 | } 17 | let array = 18 | { new IMonoid<'T []> with 19 | member __.Zero = fun () -> [||] 20 | member __.Combine = fun (a1,a2) -> Array.append a1 a2 21 | } 22 | 23 | let unit = 24 | { new IMonoid<_> with 25 | member __.Zero = fun () -> () 26 | member __.Combine = fun (_,_) -> () 27 | } 28 | 29 | [] 30 | module RWSResult = 31 | 32 | /// might want to change that as in the case of the compiler we do not want to 33 | /// fail directly, but propagate a list of errors with priority 34 | /// TODO : To be defined and thought of carefully as this could be our error handling/recovering mechanism 35 | /// with the user of "compensators" which compensates the user code when possible 36 | [] 37 | [] 38 | type Result<'Read, 'Write, 'State , 'T, 'F> = 39 | | Success of 'Read * 'Write * 'State * 'T 40 | | Failure of 'F 41 | 42 | [] 43 | [] 44 | type RWSRInternal< 'Read, 'Write, 'State, 'T, 'F> = 45 | RWSRInternal of ('State * 'Read -> Result<'Read , 'Write, 'State, 'T, 'F>) 46 | 47 | type RWSRDelayed< 'Read, 'Write, 'State, 'T, 'F> = 48 | unit -> RWSRInternal< 'Read, 'Write, 'State, 'T, 'F> 49 | 50 | [] 51 | [] 52 | type RWSResult< 'Read, 'Write, 'State, 'T, 'F> = RWSResult of RWSRDelayed< 'Read, 'Write, 'State, 'T, 'F> 53 | 54 | 55 | type RWSResultBuilder<'W> (monoid: IMonoid<'W>) = 56 | member __.Bind(RWSResult delayed, f:'T -> RWSRInternal<'R,'W,'S,'U,'F>) : RWSRInternal<'R,'W,'S,'U,'F> = 57 | RWSRInternal 58 | (fun (state,read) -> 59 | let (RWSRInternal rwsResult1) = delayed () 60 | match rwsResult1 (state,read) with 61 | | Success (_,write1,state1,value1) -> 62 | let (RWSRInternal rwsResult2) = f value1 63 | match rwsResult2 (state1,read) with 64 | | Success (_,write2,state2,value2) -> 65 | let writeCombined = monoid.Combine(write1,write2) 66 | Success (read,writeCombined,state2,value2) 67 | | Failure failure -> Failure failure 68 | | Failure failure -> Failure failure 69 | ) 70 | 71 | member __.Return(value:'T) : RWSRInternal<'R,'W,'S,'T,'F> = 72 | RWSRInternal (fun (state,read) -> Success (read, monoid.Zero() , state ,value)) 73 | 74 | member __.ReturnFrom(RWSResult update : RWSResult<'R,'W,'S,'T,'F>) : RWSRInternal<'R,'W,'S,'T,'F> = 75 | update () 76 | 77 | // member this.Yield(value:'T) : RWSResult<'R,'W,'S,'T> = this.Return value 78 | // member this.YieldFrom(delayed) = this.ReturnFrom delayed 79 | 80 | member this.Zero() : RWSRInternal<'R,'W,'S, unit,'F> = this.Return () 81 | 82 | member __.Delay(f: RWSRDelayed<'R,'W,'S,'T,'F>) = f 83 | 84 | member __.Run(f:RWSRDelayed<'R,'W,'S,'T,'F>) = RWSResult f 85 | 86 | member this.Combine(rwsResult : RWSRInternal<'R,'W,'S,unit,'F>, delayed :RWSRDelayed<'R,'W,'S,'T,'F>) : RWSRInternal<'R,'W,'S,'T,'F> = 87 | this.Bind( 88 | this.Run( 89 | this.Delay( 90 | fun () -> rwsResult)), 91 | delayed) 92 | 93 | member this.TryFinally(body:RWSRDelayed<'R,'W,'S,'T,'F>, compensation) = 94 | try this.ReturnFrom(RWSResult body) 95 | finally compensation() 96 | 97 | member this.TryWith(body : RWSRDelayed<'R,'W,'S,'T,'F>, handler: 'F -> RWSRInternal<'R,'W,'S,'T,'F>) : RWSRInternal<'R,'W,'S,'T,'F> = 98 | 99 | let (RWSRInternal t) = this.ReturnFrom(RWSResult body) 100 | // let e = 101 | fun (read,state) -> 102 | let f = t (read,state) 103 | match f with 104 | | Success _ -> f 105 | | Failure failure -> 106 | try 107 | let (RWSRInternal t1) = handler failure 108 | t1 (read,state) 109 | with 110 | | :? System.Runtime.CompilerServices.RuntimeWrappedException -> 111 | printfn "\n\n\n\n RuntimeWrappedException caught" 112 | f 113 | |> RWSRInternal 114 | 115 | 116 | 117 | member this.Using(disposable:#System.IDisposable, body : 'a -> RWSRInternal<'R,'W,'S,'T,'F>) = 118 | let body' = fun () -> body disposable 119 | this.TryFinally(body', fun () -> 120 | match disposable with 121 | | null -> () 122 | | disp -> disp.Dispose()) 123 | 124 | member this.While(guard: unit -> bool, body: RWSRDelayed<'R,'W,'S,unit,'F>) : RWSRInternal<'R,'W,'S,unit,'F> = 125 | let rec whileLoop guard body = 126 | if guard() then 127 | this.Bind (RWSResult( body ), fun _ -> whileLoop guard body) 128 | else 129 | this.Zero() 130 | 131 | whileLoop guard body 132 | 133 | member this.For(sequence:seq<'a>, body: 'a -> RWSRInternal<'R,'W,'S,unit,'F>) : RWSRInternal<'R,'W,'S,unit,'F> = 134 | this.Using(sequence.GetEnumerator(),fun enum -> 135 | this.While(enum.MoveNext, 136 | this.Delay(fun () -> body enum.Current))) 137 | 138 | 139 | 140 | 141 | let rwsGetState (monoid : IMonoid<'W>) = 142 | RWSResult(fun () -> RWSRInternal(fun (state,read) -> Success (read,monoid.Zero(),state,state) ) ) 143 | 144 | let rwsPutState (monoid : IMonoid<'W>) (state: 'S) = 145 | RWSResult(fun () -> RWSRInternal(fun (_,read) -> Success (read,monoid.Zero(),state,()) ) ) 146 | 147 | let rwsRead (monoid : IMonoid<'W>) = 148 | RWSResult(fun () -> RWSRInternal(fun (state,read) -> Success (read,monoid.Zero(),state,read) ) ) 149 | 150 | let rwsWrite (value : 'W) = 151 | RWSResult(fun () -> RWSRInternal(fun (state,read) -> Success (read,value,state,())) ) 152 | 153 | let rwsRun (state,read) (RWSResult delayed: RWSResult<'R,'W,'S,'T,'F>) = 154 | let (RWSRInternal rwsResult) = delayed () 155 | rwsResult (state,read) 156 | 157 | 158 | 159 | 160 | -------------------------------------------------------------------------------- /src/POC.ParsingLexing/Program.fs: -------------------------------------------------------------------------------- 1 | namespace Compiler 2 | open System 3 | open System.IO 4 | 5 | module AST = 6 | open LexerTokenization 7 | open Parser 8 | open Parsing 9 | // let t = 5 10 | // // ***************************************************************************************************** 11 | // // AST => might need to change but these Unions represent the AST 12 | // // ***************************************************************************************************** 13 | // type Name = Name of string 14 | // type VarName = VarName of string 15 | 16 | 17 | // type Literal = 18 | // | String of string 19 | // | Bool of bool 20 | // | Int of int 21 | // | Float of float 22 | 23 | // type Expr = 24 | // // For example : x in `f(x)` => Var "x" 25 | // | Var of VarName 26 | // | Literal of Literal 27 | // | Let of Name * Expr * Expr 28 | // | Lambda of Name * Expr 29 | // | Application of Expr * Expr 30 | 31 | 32 | // // ***************************************************************************************************** 33 | // // Type tree => to be used for type inference and type checking 34 | // // ***************************************************************************************************** 35 | // // For example : `∀ α. α -> int -> α` => `α` is a type variable : TypeVariable "α" BUT `int` is not a type variable, it is a type constant 36 | // type TypeVariable = TypeVariable of string 37 | // type TypeConstant = TypeConstant of string 38 | 39 | // type Type = 40 | // | TypeLambda of Type * Type 41 | // | TypeVariable of TypeVariable 42 | // | TypeConstant of TypeConstant 43 | 44 | // // Type Scheme represent polymorphic types : `id: ∀α. α -> α` for instance 45 | // // `id: ∀αβ. α -> β -> int -> list α * β ` is decomposed into 46 | // // `∀αβ` before the . which represents the bounded type variables `α and β` which are bounded to the quantifier `∀` 47 | // // The F# type system allow us to represent two part what is before the . which is a list of type variable and what is after which is the type itself 48 | // type Scheme = 49 | // { BoundedTypeVariables : Set // To ensure that each type variable is unique, we use a Set 50 | // EnclosedType : Type } 51 | 52 | // // The Environment is the container of all information (everything, type scheme, bounded variables, variable names ...) 53 | // // Each variable found in the Expr tree has an associated type scheme which could be generalized (with some type holes to fill) 54 | // // or instanciated (every hole is filled). 55 | // type Environment = Environment of Map 56 | 57 | // // Substitutions are just mapping from type variables to types that could be applied to a specific environment. 58 | // type Substitutions = Substitutions of Map 59 | 60 | type StreamWriter with 61 | member x.writeAsyncLine(input:string) = x.WriteLineAsync(input) |> Async.AwaitTask 62 | member x.writeAsyncNewLines(N:int) = 63 | async{ 64 | for _ in 1..N do 65 | return! x.writeAsyncLine("") 66 | } 67 | 68 | 69 | [] 70 | let main argv = 71 | 72 | 73 | let inputsTYPE = 74 | [ 75 | "int" 76 | "int -> float" 77 | "(int -> (float -> int))" 78 | "(int -> (float -> int)) -> None" 79 | "(float -> int) -> None" 80 | "int -> (float -> int)) -> None" 81 | "" 82 | "int -> float ->" 83 | ] 84 | 85 | let inputsPARAMS = 86 | [ 87 | "f" 88 | "(f)" 89 | "(f :int)" 90 | "(f :int ,g : float,h)" 91 | "(f :int ,g,h )" 92 | "(f,g,h)" 93 | "(f :int ,g : float,h : (int -> float -> (float -> None) ) )" 94 | "(f,g,(h)" 95 | "" 96 | ] 97 | 98 | 99 | let inputsEXPR = 100 | [ 101 | "let x = 5" 102 | "let x = 5 + 3" 103 | "let y = false" 104 | "let e = 5+3+4+(3+4*5)" 105 | ] 106 | 107 | 108 | let parseTest (writer:StreamWriter) (parser:unit -> Parser<'T>) (input:string) = 109 | async{ 110 | printfn "===================================" 111 | printfn "TEST ==> { %s }" input 112 | printfn "===================================" 113 | let tokens = lexing input 114 | let parsed = runParser tokens (parser()) 115 | 116 | match parsed with 117 | | Success (_,_,s,t) -> 118 | do! writer.writeAsyncLine(sprintf "SUCCESS ==> { %s }" input) 119 | do! writer.writeAsyncLine(sprintf "%A" s) 120 | do! writer.writeAsyncNewLines 2 121 | do! writer.writeAsyncLine(sprintf "%A" t) 122 | do! writer.writeAsyncNewLines 4 123 | | Failure failure -> 124 | do! writer.writeAsyncLine(sprintf "FAILURE => { %s }" input) 125 | do! writer.writeAsyncLine(sprintf "%A" failure) 126 | do! writer.writeAsyncNewLines 2 127 | do! writer.writeAsyncLine(sprintf "%s" (failure.Description)) 128 | do! writer.writeAsyncNewLines 4 129 | } 130 | 131 | let parseTests (writer:StreamWriter) (name:string) (parser:unit -> Parser<'T>) (inputs:string list) = 132 | async{ 133 | printfn "==========================================" 134 | printfn "TEST SUITE ==> { %s }" name 135 | printfn "==========================================" 136 | do! writer.writeAsyncLine(sprintf "==========================================" ) 137 | do! writer.writeAsyncLine(sprintf "TEST SUITE ==> { %s }" name) 138 | do! writer.writeAsyncLine(sprintf "==========================================" ) 139 | do! writer.writeAsyncNewLines 2 140 | 141 | for input in inputs do 142 | return! parseTest writer parser input 143 | 144 | do! writer.writeAsyncNewLines 10 145 | } 146 | 147 | let run () = 148 | async{ 149 | let path = @"C:\Users\Lleutch\Workspace\test.txt" 150 | use writer = new StreamWriter(File.Create(path)) 151 | 152 | 153 | do! parseTests writer "PARAMETERS" parsePTParam inputsPARAMS 154 | do! parseTests writer "TYPE DEFINITION" parsePTTypeDef inputsTYPE 155 | do! parseTests writer "EXPRESSIONS" parsePTExpr inputsEXPR 156 | } 157 | 158 | run () |> Async.RunSynchronously 159 | // Async.Sleep 5000 |> Async.RunSynchronously 160 | 0 // return an integer exit code 161 | -------------------------------------------------------------------------------- /src/POC.TypeInference/Ast3.fs: -------------------------------------------------------------------------------- 1 | module rec Ast3 2 | open System 3 | open System.Collections.Generic 4 | 5 | type name = string 6 | type label = string 7 | 8 | type exp = 9 | | EVar of name 10 | | EPrim of prim 11 | | EApp of exp * exp 12 | | EAbs of name * exp 13 | | ELet of name * exp * exp 14 | 15 | and prim = 16 | | Int of int 17 | | Bool of bool 18 | | Cond 19 | | RecordSelect of label 20 | | RecordExtend of label 21 | | RecordRestrict of label 22 | | RecordEmpty 23 | 24 | type Typ = 25 | | TVar of name 26 | | TInt 27 | | TBool 28 | | TFun of Typ * Typ 29 | | TRecord of Typ 30 | | TRowEmpty 31 | | TRowExtend of label * Typ * Typ 32 | 33 | type Scheme = Scheme of name list * Typ 34 | 35 | type TypeEnv = Map 36 | 37 | type Subst = Map 38 | 39 | module Typ = 40 | let rec ftv (typ: Typ) = 41 | match typ with 42 | | TVar name -> Set.singleton name 43 | | TInt -> Set.empty 44 | | TBool -> Set.empty 45 | | TFun(t1, t2) -> Set.union (ftv t1) (ftv t2) 46 | | TRecord typ -> ftv typ 47 | | TRowEmpty -> Set.empty 48 | | TRowExtend (_l, t, r) -> Set.union (ftv r) (ftv t) 49 | 50 | let rec apply s typ = 51 | match typ with 52 | | TVar n -> 53 | match Map.tryFind n s with 54 | | Some t -> t 55 | | None -> TVar n 56 | | TFun(t1, t2) -> 57 | TFun (apply s t1, apply s t2) 58 | | TRecord t -> 59 | TRecord (apply s t) 60 | | TRowExtend (l, t, r) -> 61 | TRowExtend(l, apply s t, apply s r) 62 | | TInt | TBool | TRowEmpty -> 63 | typ 64 | 65 | let parens s = 66 | sprintf "( %s )" s 67 | 68 | let braces s = 69 | sprintf "{ %s }" s 70 | let toString ty = 71 | let rec parenType ty = 72 | match ty with 73 | | TFun(_type1, _type2) -> parens (toString ty) 74 | | _ -> toString ty 75 | 76 | match ty with 77 | | TVar name -> name 78 | | TInt -> "int" 79 | | TBool -> "bool" 80 | | TFun(t, s) -> 81 | (parenType t) + " -> " + (toString s) 82 | | TRecord typ -> 83 | sprintf "{ %s }" ( toString typ) 84 | | TRowEmpty -> "{ }" 85 | | TRowExtend (label, typ, row) -> 86 | sprintf "%s = %s | %s" label (toString typ) (toString row) 87 | 88 | module Scheme = 89 | let ftv (scheme: Scheme) = 90 | match scheme with 91 | | Scheme(variables, typ) -> 92 | Set.difference (Typ.ftv typ) (Set.ofList variables) 93 | 94 | let apply (s: Subst) (scheme: Scheme) = 95 | match scheme with 96 | | Scheme(vars, t) -> 97 | let newSubst = List.foldBack (fun key currentSubst -> Map.remove key currentSubst ) vars s 98 | let newTyp = Typ.apply newSubst t 99 | Scheme(vars, newTyp) 100 | 101 | module TypeEnv = 102 | let remove (env: TypeEnv) (var : string)= 103 | Map.remove var env 104 | 105 | let ftv (typEnv: TypeEnv) = 106 | Seq.foldBack (fun (KeyValue(_key ,v)) state -> 107 | Set.union state (Scheme.ftv v)) typEnv Set.empty 108 | 109 | let apply (s : Subst) (env: TypeEnv) = 110 | Map.map (fun _k v -> Scheme.apply s v) env 111 | 112 | module Subst = 113 | /// Apply `s1` to `s2` then merge the results 114 | let compose s1 s2 = 115 | Map.union (Map.map (fun _k (v : Typ) -> Typ.apply s1 v) s2) s1 116 | 117 | ///generalize abstracts a type over all type variables which are free 118 | /// in the type but not free in the given type environment. 119 | let generalize (env : TypeEnv) (t : Typ) = 120 | let variables = 121 | Set.difference (Typ.ftv t) (TypeEnv.ftv env) 122 | |> Set.toList 123 | Scheme(variables, t) 124 | 125 | let private currentId = ref 0 126 | 127 | let nextId() = 128 | let id = !currentId 129 | currentId := id + 1 130 | id 131 | 132 | let resetId() = currentId := 0 133 | 134 | let newTyVar prefix = 135 | TVar ( sprintf "%s%i" prefix (nextId ())) 136 | 137 | /// Replaces all bound type variables in a type scheme with fresh type variables. 138 | let instantiate (ts : Scheme) = 139 | match ts with 140 | | Scheme(vars, t) -> 141 | let nvars = vars |> List.map (fun name -> newTyVar (string name.[0]) ) 142 | let s = List.zip vars nvars |> Map.ofList 143 | Typ.apply s t 144 | 145 | let rewriteRow (row: Typ) newLabel = 146 | match row with 147 | | TRowEmpty -> failwithf "label %s cannot be inserted" newLabel 148 | | TRowExtend(label, fieldTy, rowTail) when newLabel = label -> 149 | (fieldTy, rowTail, Map.empty) //nothing to do 150 | | TRowExtend(label, fieldTy, rowTail) -> 151 | match rowTail with 152 | | TVar alpha -> 153 | let beta = newTyVar "r" 154 | let gamma = newTyVar "a" 155 | gamma, TRowExtend(label, fieldTy, beta), Map.singleton alpha (TRowExtend(newLabel, gamma, beta)) 156 | | _otherwise -> 157 | let (fieldTy', rowTail', s) = rewriteRow rowTail newLabel 158 | fieldTy', TRowExtend(label, fieldTy, rowTail'), s 159 | | _ -> failwithf "Unexpected type: %A" row 160 | 161 | let varBind u t = 162 | match t with 163 | | TVar name when name = u -> Map.empty 164 | | _ when Set.contains u (Typ.ftv t) -> 165 | failwithf "Occur check fails: %s vs %A" u t 166 | | _ -> Map.singleton u t 167 | 168 | 169 | let rec unify (t1 : Typ) (t2 : Typ) : Subst = 170 | match t1, t2 with 171 | | TFun (l, r), TFun (l', r') -> 172 | let s1 = unify l l' 173 | let s2 = unify (Typ.apply s1 r) (Typ.apply s1 r') 174 | Subst.compose s2 s1 175 | | TVar u, t 176 | | t, TVar u -> varBind u t 177 | | TInt, TInt -> Map.empty 178 | | TBool, TBool -> Map.empty 179 | | TRecord row1, TRecord row2 -> 180 | unify row1 row2 181 | | TRowEmpty, TRowEmpty -> Map.empty 182 | | TRowExtend(label1, fieldTyp1, rowTail1), (TRowExtend(_,_,_) as row2) -> 183 | let fieldTy2, rowTail2, theta1 = rewriteRow row2 label1 184 | let rec toList ty = 185 | match ty with 186 | | TVar name -> [], Some name 187 | | TRowEmpty -> [], None 188 | | TRowExtend(l, t, r) -> 189 | let ls, mv = toList r 190 | (l, t) :: ls, mv 191 | | _ -> failwithf "invalid row tail %A" ty 192 | let result = toList rowTail1 193 | match snd result with 194 | | Some tv when theta1 |> Map.containsKey tv -> 195 | failwithf "recursive row type" 196 | | _ -> 197 | let theta2 = unify (Typ.apply theta1 fieldTyp1) (Typ.apply theta1 fieldTy2) 198 | let s = Subst.compose theta2 theta1 199 | let theta3 = unify (Typ.apply s rowTail1) (Typ.apply s rowTail2) 200 | Subst.compose theta3 s 201 | | _ -> failwithf "Types do not unify: %A vs %A" t1 t2 202 | 203 | let rec ti (env : TypeEnv) (exp : exp) : Subst * Typ = 204 | match exp with 205 | | EVar name -> 206 | match Map.tryFind name env with 207 | | None -> failwithf "Unbound variable: %s" name 208 | | Some sigma -> 209 | let t = instantiate sigma 210 | Map.empty, t 211 | | EPrim prim -> (Map.empty, tiPrim prim) 212 | | EAbs(n, e) -> 213 | let tv = newTyVar "a" 214 | let env1 = TypeEnv.remove env n 215 | let env2 = Map.union env1 (Map.singleton n (Scheme([], tv) )) 216 | let (s1, t1) = ti env2 e 217 | s1, TFun( Typ.apply s1 tv, t1) 218 | | EApp(e1, e2) -> 219 | let s1, t1 = ti env e1 220 | let s2, t2 = ti (TypeEnv.apply s1 env) e2 221 | let tv = newTyVar "a" 222 | let s3 = unify (Typ.apply s2 t1) (TFun(t2, tv)) 223 | Subst.compose (Subst.compose s3 s2) s1, Typ.apply s3 tv 224 | | ELet(x, e1, e2) -> 225 | let s1, t1 = ti env e1 226 | let env1 = TypeEnv.remove env x 227 | let scheme = generalize (TypeEnv.apply s1 env) t1 228 | let env2 = Map.add x scheme env1 229 | let s2, t2 = ti (TypeEnv.apply s1 env2 ) e2 230 | Subst.compose s2 s1, t2 231 | 232 | let tiPrim prim = 233 | match prim with 234 | | Int _ -> TInt 235 | | Bool _ -> TBool 236 | | Cond -> 237 | let a = newTyVar "a" 238 | TFun(TBool, TFun(a, TFun(a, a))) 239 | | RecordEmpty -> 240 | TRecord TRowEmpty 241 | | RecordSelect label -> 242 | let a = newTyVar "a" 243 | let r = newTyVar "r" 244 | TFun (TRecord (TRowExtend(label, a, r)) , a) 245 | | RecordExtend label -> 246 | let a = newTyVar "a" 247 | let r = newTyVar "r" 248 | TFun(a, TFun(TRecord r, TRecord(TRowExtend(label, a, r) ))) 249 | | RecordRestrict label -> 250 | let a = newTyVar "a" 251 | let r = newTyVar "r" 252 | TFun(TRecord(TRowExtend(label, a, r)), TRecord r) 253 | 254 | let typeInference env e = 255 | let s, t = ti env e 256 | Typ.apply s t 257 | 258 | let test1 = 259 | "r1: {y=2} :: {y = Int}", 260 | EApp (EApp(EPrim(RecordExtend "y"), (EPrim ( Int 2))), (EPrim RecordEmpty)) 261 | 262 | let test2 = 263 | "r2: {x=1, y=2 } :: {x = Int, y = Int}", 264 | EApp (EApp (EPrim(RecordExtend "x"), (EPrim (Int 1))), snd test1) 265 | 266 | let test3 = 267 | "r3: (_.y) {x=1, y=2 } :: Int", 268 | EApp (EPrim (RecordSelect "y"), snd test2) 269 | 270 | let test4 = 271 | "r4: let f = fun r -> (_.x) r in f \nexpecting :: {x = a4 | r5} -> a4", 272 | ELet("f", EAbs("r", EApp( EPrim(RecordSelect "x"), EVar "r")), EVar "f" ) 273 | 274 | let test5 = 275 | "r5: fun r -> (_.x) r\nexpecting :: {x = a3 | r2} -> a3", 276 | EAbs("r", EApp( EPrim(RecordSelect "x") , EVar "r") ) 277 | 278 | let b1 = 279 | "b1", 280 | ELet ("id", EAbs("x", EVar "x"), EVar "id") 281 | 282 | let b2 = 283 | "b2", 284 | ELet ("id", EAbs( "x", EVar "x"), EApp(EVar "id", EVar "id")) 285 | 286 | let b3 = 287 | "b3", 288 | ELet( "id", (EAbs("x", ELet( "y", EVar "x", EVar "y"))), EApp(EVar "id", EVar "id")) 289 | 290 | let b4 = 291 | "b4", 292 | ELet( "id", EAbs( "x", ELet("y", EVar "x", EVar "y")), EApp(EApp(EVar "id", EVar "id") ,EPrim (Int 2))) 293 | 294 | let b5 = 295 | "b5", 296 | ELet("id", EAbs( "x", EApp(EVar "x", EVar "x")), EVar "id") 297 | 298 | let b6 = 299 | "b6", 300 | EAbs("m", (ELet("y", EVar "m", ELet("x", (EApp(EVar "y", EPrim(Bool true))), EVar "x")))) 301 | 302 | let b7 = 303 | "b7: fun f -> (fun g -> (fun arg (f g arg)))", 304 | EAbs("f", EAbs("g", EAbs("arg", EApp(EVar("g"), EApp(EVar("f"), EVar("arg")))))) 305 | 306 | 307 | -------------------------------------------------------------------------------- /docs/syntax.md: -------------------------------------------------------------------------------- 1 | # Syntax Examples 2 | 3 | This document provide a list of code snippets in a structured manner in order to provide a basic overview of the language. It will also display a set of features that the language will have built-in that will solve specific issues related to low-level programming requirements. 4 | 5 | ## Values and Variables 6 | 7 | The language will provide basic features found in any general-purpose languages. Basic value types will built-in to represent simple to express data. 8 | 9 | ### Mutable and Immutable values 10 | 11 | There is support for both immutable and mutable values. 12 | 13 | Values defined via `let` like `let x = 5` are **immutable**. `let` is **binding** a value of a certain type to a name. Because it is a binding, the value for named binding should be set only once. 14 | 15 | ```Fsharp 16 | let x = 5 17 | let y = 6 18 | x <- 7 <- doesn't compile because x is immutable 19 | ``` 20 | 21 | Values defined via `var` like `var x = 5` are **mutable**. Because they can mutate over time, we decide to call them variables. 22 | 23 | ```Javascript 24 | var e = 5 25 | e <- 7 26 | ``` 27 | 28 | ### Number 29 | 30 | Support for primitive number are defines as follows: 31 | 32 | - byte, int8 33 | - uint16, int16 34 | - uint32, int32 35 | - uint64, int64 36 | - float, double, fixed-point 37 | 38 | _Note byte is unsigned_ 39 | 40 | ```Fsharp 41 | let (a:byte) = 17ub 42 | let (b:int8) = 0xA1b 43 | 44 | let x = 5 45 | let y = 6 46 | let z = x + y + 2*x*y <- support for infix operators 47 | 48 | let (x:float) = 7.2 <- support for shadowing 49 | let (fp:fixed[4,12]) = 0xA.E15 <- support for some refinement types 50 | 51 | 52 | // Doesn't compile because we have defined a fixed[4,12] and not a fixed[8,8] 53 | let (fpFailed:fixed[8,8]) = 0xA.E1 54 | ``` 55 | 56 | ### Strings 57 | 58 | Support for string values. 59 | 60 | - string 61 | - char 62 | 63 | ```Fsharp 64 | let (s1:string) = "Hello" 65 | let (s2:char) = 'a' 66 | 67 | let (s1':char []) = ['H';'e';'l';'l';'o'] 68 | s1' = s1 <- internally a string is a char[] 69 | // val it: true 70 | ``` 71 | 72 | ## Functions 73 | 74 | Functions are basic elements of the language that contains a behavior to apply over a set of input data. They take elements from an input set and return an element of an output set. We consider them as first-class citizen of the language, and thus are as easily manipulable as simple values. They are the basic element of composition, and by composing them we build up more complex behavior. 75 | 76 | ### Function definition 77 | 78 | The concept of function is defined in such way that allows support for first-class support. 79 | 80 | The functions are **curried** by default, and thus allow us to **partially apply** them in order to generate a new function with fix parameters. A function has a type associated to it that we call a **signature**. 81 | 82 | ```elm 83 | add : int -> int -> int 84 | add x y = 85 | x + y 86 | 87 | add3 : int -> int -> int -> int 88 | add3 x y z = 89 | let tmp = x + y 90 | tmp + z 91 | 92 | // Functions can be passed as parameters, such as values 93 | addWithOperation : (int -> int) -> int -> int -> int 94 | addWithOperation fn x y = 95 | (fn x) + (fn y) 96 | ``` 97 | 98 | ### Anonymous functions 99 | 100 | It is often the case that users want to pass functions as parameters to other functions but on the fly such that the function is never really defined. We called these kind of functions **anonymous** functions. 101 | 102 | ```elm 103 | addWithOperation : (int -> int) -> int -> int -> int 104 | addWithOperation fn x y = 105 | (fn x) + (fn y) 106 | 107 | addWithSquare : int -> int -> int 108 | addWithSquare x y = 109 | addWithOperation (fun z -> z * z) x y 110 | ``` 111 | 112 | ## Tuples 113 | 114 | TBD: 115 | 116 | ```FSharp 117 | let (t: int * float * string) = (5, 2.0, "hello") 118 | ``` 119 | 120 | or 121 | 122 | ```FSharp 123 | let (t: int, float, string) = (5, 2.0, "hello") 124 | ``` 125 | 126 | ## Algebraic Data Types 127 | 128 | the support for ADT allow the user to manipulate/represent data in typed manner, where the type models real world data in a very clear and concise manner. 129 | 130 | To represent any data, a logic-like semantic is needed with the 2 basic constructs which are : 131 | 132 | - a *Product* -> **Records** ***OR*** **Tuples** 133 | - a *Sum* -> **Unions** 134 | 135 | ```FSharp 136 | type Boolean = 137 | | True 138 | | False 139 | 140 | type DivisionResult = Result (result:int) (rest:int) 141 | 142 | //concrete record type 143 | type Being = 144 | { Age: int } 145 | 146 | //creates an anonymous record type with the fields x, y, z 147 | let point = 148 | { x = 3, y = 4, z = 5 } 149 | 150 | //field access 151 | let xCoord = point.x 152 | 153 | isOlderThan: int -> Being -> bool 154 | isOlderThan n being = 155 | being.Age > n 156 | 157 | type Person = 158 | { Being with 159 | Name : string 160 | } 161 | 162 | let person = 163 | { Age = 5 164 | Name = "A Name" } 165 | 166 | let result = isOlderThan 10 person 167 | //val result : false 168 | ``` 169 | 170 | Being is a record and same goes with Person, however the Person record is composed from the Being record. Therefore, it can apply any function defined for Being and also functions specific to Person. 171 | 172 | Two new keywords are defined : *`union`* and *`record`* to be explicit on the type of ADT we are using 173 | 174 | *Records have structural inheritance*, therefore functions can be used for other records than the one specified in the function signature, if that record is built as the composition of that record and other fields. 175 | 176 | Records also expose property functions so that a simple property extraction does not need an anonymous function defining: 177 | 178 | ```FSharp 179 | let mappedRecord = 180 | [point,{x=0, y=0}] 181 | |> List.map .x 182 | ``` 183 | 184 | Rather than the verbose anonymous function syntax to extract a value: 185 | 186 | ```FSharp 187 | let mappedRecord = 188 | [point,{x=0, y=0}] 189 | |> List.map (fun recordElement -> recordElement.x) 190 | ``` 191 | 192 | ### Partial application of unions 193 | 194 | Partial application can also be applied to union types: 195 | 196 | ```FSharp 197 | type Visibility = 198 | | All 199 | | Active 200 | | Completed int int 201 | 202 | //partial application 203 | let foo = Completed 20 204 | val: int -> Visibility 205 | 206 | //fully applied Visibility 207 | let foo2 = Completed 20 20 208 | val: Visibility 209 | ``` 210 | 211 | ## Extended Data Types 212 | 213 | **TODO** : This part might need some update in the future. We should add : 214 | 215 | - list -> `[]` 216 | - array -> `[| |]` 217 | - seq 218 | - set 219 | - map 220 | - dictionary 221 | 222 | ### Array type 223 | 224 | An array type is a type that describes a collection of elements of the same types. These elements are accessible via indexes (identifying keys) that allow to manipulate those stored values. 225 | The array type is dependent of the type of the elements inside it. If we were to have an array of ints then we would have the following types : 226 | 227 | ```FSharp 228 | // the type of Array could be theoretically defined as an inductive list // via the use of unions : 229 | type Array a = 230 | | Empty 231 | | Cons a (Array a) 232 | 233 | type ArrayOfInts = Array int <- type aliasing 234 | ``` 235 | 236 | As we can see Array is dependent of the type 'a which can be anything. 237 | In fact, we just took the occasion to show the usage of **generics** as being equivalent to defining *single case union* with a generic parameter of that union case (which constructor would internally be a function at the type level, allowing partial application over union cases). 238 | 239 | ### Construction 240 | 241 | Extended data types can be defined in any of the following ways: 242 | 243 | ```FSharp 244 | let list1 = [1,2,3,4] 245 | let list2 = 1 :: [2,3,4] 246 | let list3 = 1 :: 2 :: 3 :: 4 :: [] 247 | ``` 248 | 249 | ### Contracts (***to be defined later : based on traits and protocols***) 250 | 251 | ### Session types + linear types + typestate definitions (***to be defined later***) 252 | 253 | Session types + linear types + typestate definitions, or how to provide **first-class support for concurrency** in the language at the same level as functions. I will described that in the future if we can get to that, but that would be a unique feature to have!!! 254 | 255 | ## Control Flow 256 | 257 | ### Pattern Matching 258 | 259 | #### Type matches 260 | 261 | #### union matches 262 | 263 | #### Active matches 264 | 265 | F# has a notion of [Active patterns](https://docs.microsoft.com/en-us/dotnet/fsharp/language-reference/active-patterns) 266 | 267 | We can define what is called an active match which is a function that expects the target type followed by any other parameters which will result in a boolean result. 268 | 269 | ```fsharp 270 | type Soldier = 271 | { hp: int 272 | agility: int 273 | strength: int } 274 | 275 | hp: Soldier -> int -> bool 276 | hp value pattern = 277 | pattern = value.hp 278 | 279 | let soldier = Soldier(hp: 99, x: 10, y: 10) 280 | 281 | match soldier with 282 | .hp 0: -> print "dead soldier" 283 | _: -> print "alive soldier" 284 | 285 | val : "alive soldier" 286 | ``` 287 | 288 | ### If Then Else 289 | 290 | ### loops 291 | 292 | #### while loops 293 | 294 | #### for loops (maybe opt for purely iterations ? maybe not?) 295 | 296 | #### iteration loops over collections (maybe ?) 297 | 298 | ## Modules and Namespaces 299 | 300 | ## Notation extension 301 | 302 | See: http://docs.idris-lang.org/en/latest/tutorial/syntax.html 303 | 304 | ## Constraints 305 | 306 | Constraints can be applied to all primitives, lets take string as an example: 307 | 308 | string< P > 309 | 310 | `string < P >` is a kind of string that is defined via a predicate `P` : `string -> bool`, over the string. This is part of the refinement type system, that will be developed by adding a mechanism of constraints over values. 311 | 312 | ## reference counting/tracking, allocation, deallocation 313 | 314 | ## Overall syntax historic 315 | 316 | explain that it is an ML based syntax, light with no curly braces and all that stuff. Maybe to put at the beginning ? 317 | 318 | ## TODO 319 | 320 | What to do in this document. 321 | 322 | - Extract the part that explain features, to be detailed in an other document 323 | - Detail **row-poly** = **structural inheritance** 324 | - Detail **Contracts** 325 | - Detail **structural equality** + referential equality + shared operations within types 326 | - Detail **Quotations** 327 | - Detail **session types** + **linear types** + **typestate oriented programing** 328 | - Detail the mechanism of predicate evaluated at compile-time to constraint types. 329 | - Might be other questions to solve here. -------------------------------------------------------------------------------- /src/POC.ParsingLexing/Lexer.fs: -------------------------------------------------------------------------------- 1 | namespace Compiler 2 | 3 | 4 | module LexerTypes = 5 | 6 | // ************************************* 7 | // Steps 8 | // ************************************* 9 | // v 1) Get lines from string file 10 | // v 2) Tokenize each line 11 | // v 2.1) Create all AP Tokenizer from matchToken function representing the lexical grammar of of each token (regex) 12 | // v 2.2) Create a function which defines the overall lexical grammar, rule priority and longest match invariants 13 | // v 2.3) Loop over that function and generate a token with current state of tokenization ... 14 | 15 | type Ident = Ident of string 16 | 17 | type Keyword = 18 | | Let 19 | // | If | Then | Else 20 | // | For | In | Do 21 | 22 | type Separator = 23 | | LParentheses | RParentheses 24 | // | LBracket | RBracket 25 | // | LBraces | RBraces 26 | | Comma | Colon 27 | | Arrow 28 | 29 | 30 | 31 | type Operator = 32 | | Add | Mult | Div | Subs | And | Or 33 | | Inferior | Superior | Equal 34 | 35 | type Literal = 36 | | Unit 37 | | Bool of bool 38 | | Int of int | Float of float 39 | | String of string 40 | 41 | type Comment = 42 | | LineComment of string 43 | | BlockComment of string 44 | 45 | type Indentation = 46 | | Indent of int 47 | | Dedent of int 48 | | NoDent // No Indentation and no Dedentation 49 | 50 | /// Represent the different possible tokens generated from the lexing 51 | type TokenType = 52 | | Identifier of Ident 53 | | Keyword of Keyword 54 | | Separator of Separator 55 | | Operator of Operator 56 | | Literal of Literal 57 | | Comment of Comment 58 | | Indentation of Indentation 59 | | EOF 60 | 61 | /// Represent the starting location of a particular element 62 | type Column = Column of int 63 | type Line = Line of int 64 | type Location = 65 | { 66 | Line : Line 67 | Column : Column 68 | } 69 | 70 | /// Represents a single Token, we need to know it's name and value (Token Type) and the associated location 71 | type Token = 72 | { 73 | TokenType : TokenType 74 | Location : Location 75 | } 76 | 77 | 78 | module LexerMappings = 79 | open LexerTypes 80 | 81 | let mapOps = 82 | [ "+" , Add 83 | "*" , Mult 84 | "/" , Div 85 | "-" , Subs 86 | "&&" , And 87 | "||" , Or 88 | "<" , Inferior 89 | ">" , Superior 90 | "=" , Equal 91 | ] 92 | |> Map.ofList 93 | 94 | 95 | let mapSeps = 96 | [ "->", Arrow 97 | "(" , LParentheses 98 | ")" , RParentheses 99 | "," , Comma 100 | ":" , Colon 101 | ] 102 | |> Map.ofList 103 | 104 | let mapKeywords = 105 | [ "let" , Let 106 | ] 107 | |> Map.ofList 108 | 109 | module LexerRegexDefinition = 110 | 111 | // Basic Regex 112 | let char = "[a-zA-Z]" 113 | let digit = "[0-9]" 114 | 115 | 116 | // Literals 117 | let int = sprintf "-?%s+" digit 118 | let float = sprintf "-?%s+\.%s+" digit digit 119 | let unit = sprintf "\([ ]*\)" 120 | let string = sprintf "\".*\"" 121 | let bool = sprintf "true|false" 122 | 123 | // Comments 124 | let lineComment = sprintf "//[^\n\r]*" 125 | let blockComment= sprintf "\(\*[^\*\)]*\*\)" 126 | 127 | 128 | // WhiteSpaces 129 | let whitespace = " " 130 | 131 | // End Of Files 132 | let eof = "\z" 133 | 134 | // NewLines 135 | let newline = "\n\r|\n|\r" 136 | 137 | // Identifiers or keywords 138 | let identifierOrKeyword = sprintf "[\'_]?%s[a-zA-Z0-9]*" char 139 | 140 | // Operators 141 | let operators = ["<\-";"\+";"\*";"/";"\-";"&&";"\|\|";"<";">";"="] |> String.concat("|") 142 | 143 | // Separators 144 | let separators = ["\->";"\(";"\)";"\[";"\]";"\{";"\}";",";":"] |> String.concat("|") 145 | 146 | module LexerActivePatterns = 147 | open LexerRegexDefinition 148 | open LexerTypes 149 | open LexerMappings 150 | open System.Text.RegularExpressions 151 | open System 152 | 153 | let matchToken patternToken input = 154 | let genericPattern = sprintf "\A(%s)((?s).*)" 155 | Regex.Match(input, genericPattern patternToken, RegexOptions.Multiline) 156 | |> fun mtch -> 157 | if mtch.Success then 158 | let token = mtch.Groups.[1].Value 159 | let leftOver = mtch.Groups.[2].Value 160 | let length = token.Length |> Column 161 | (token, leftOver, length) |> Some 162 | else 163 | None 164 | 165 | let (|Whitespace|_|) input = 166 | match matchToken whitespace input with 167 | | None -> None 168 | | Some (_ ,leftOver,length) -> Some (leftOver,length) 169 | 170 | let (|Newline|_|) input = 171 | match matchToken newline input with 172 | | None -> None 173 | | Some (_ ,leftOver,length) -> Some (leftOver,length) 174 | 175 | 176 | let (|Comment|_|) input = 177 | 178 | let (|LineComment|_|) input = 179 | match matchToken lineComment input with 180 | | None -> None 181 | | Some (comment,leftOver,length) -> 182 | Some (comment |> Comment.LineComment |> TokenType.Comment,leftOver,length) 183 | 184 | let (|BlockComment|_|) input = 185 | match matchToken blockComment input with 186 | | None -> None 187 | | Some (comment,leftOver,_) -> 188 | 189 | // we split the comment block such that we can handle the issue of the number of line the 190 | // comment block takes, but also the new column value 191 | let commentSplitted = comment.Split( [| "\r\n"; "\r" ; "\n" |] ,StringSplitOptions.None) 192 | let numberOfLines = commentSplitted.Length - 1 193 | let lastPieceOfComment = commentSplitted.[numberOfLines] 194 | let column = lastPieceOfComment.Length |> Column 195 | let line = numberOfLines |> Line 196 | 197 | Some 198 | (comment |> Comment.BlockComment |> TokenType.Comment, leftOver, line, column ) 199 | 200 | 201 | match input with 202 | | LineComment (comment,leftOver,length) -> Some (comment,leftOver, Line 0, length) 203 | | BlockComment (comment, leftOver, line, column) -> Some (comment, leftOver, line, column) 204 | | _ -> None 205 | 206 | 207 | let (|Separator|_|) input = 208 | match matchToken separators input with 209 | | None -> None 210 | | Some (separator,leftOver,length) -> 211 | Some (mapSeps.[separator] |> TokenType.Separator ,leftOver,length) 212 | 213 | 214 | let (|Operator|_|) input = 215 | match matchToken operators input with 216 | | None -> None 217 | | Some (operator,leftOver,length) -> 218 | Some (mapOps.[operator] |> TokenType.Operator ,leftOver,length) 219 | 220 | 221 | let (|IdentifierOrKeyword|_|) input = 222 | match matchToken identifierOrKeyword input with 223 | | None -> None 224 | | Some (idOrKey,leftOver,length) -> 225 | match mapKeywords.TryFind(idOrKey) with 226 | | Some keyword -> keyword |> TokenType.Keyword 227 | | None -> idOrKey |> Ident |> TokenType.Identifier 228 | |> fun data -> Some (data,leftOver,length) 229 | 230 | 231 | let (|Literal|_|) input = 232 | 233 | let (|Int|_|) input = 234 | match matchToken int input with 235 | | None -> None 236 | | Some (dataInt,leftOver,length) -> 237 | Some (Int32.Parse(dataInt) |> Literal.Int ,leftOver,length) 238 | 239 | let (|Float|_|) input = 240 | match matchToken float input with 241 | | None -> None 242 | | Some (dataFloat,leftOver,length) -> 243 | Some (Double.Parse(dataFloat) |> Literal.Float ,leftOver,length) 244 | 245 | let (|Unit|_|) input = 246 | match matchToken unit input with 247 | | None -> None 248 | | Some (_ ,leftOver,length) -> 249 | Some (Literal.Unit, leftOver,length) 250 | 251 | let (|String|_|) input = 252 | match matchToken string input with 253 | | None -> None 254 | | Some (dataString,leftOver,length) -> 255 | Some (Literal.String dataString ,leftOver,length) 256 | 257 | let (|Bool|_|) input = 258 | match matchToken bool input with 259 | | None -> None 260 | | Some (dataBool,leftOver,length) -> 261 | Some ( Boolean.Parse(dataBool) |> Literal.Bool ,leftOver,length) 262 | 263 | match input with 264 | | Int (data,leftOver,length) 265 | | Float (data,leftOver,length) 266 | | Unit (data,leftOver,length) 267 | | String (data,leftOver,length) 268 | | Bool (data,leftOver,length) -> Some (data |> TokenType.Literal,leftOver,length) 269 | | _ -> None 270 | 271 | 272 | let (|EndOfFile|_|) input = 273 | match matchToken eof input with 274 | | None -> None 275 | | Some (_ ,leftOver,length) -> Some (leftOver,length) // Might want to just return Some () 276 | 277 | 278 | module LexerTokenization = 279 | open LexerTypes 280 | open LexerActivePatterns 281 | 282 | let sumColumn (Column c1) (Column c2) = c1 + c2 |> Column 283 | let sumLine (Line l1) (Line l2) = l1 + l2 |> Line 284 | 285 | 286 | type LexerState = 287 | { 288 | InversedTokens : Token list 289 | CurrentLocation : Location 290 | } 291 | /// Order is important as the following rules are applied : 292 | /// 1) Longest match first 293 | /// 2) for a longest match the first regex in pattern matching that can match determines the token type 294 | 295 | let lexing input = 296 | let rec lexToken input state = 297 | match input with 298 | | Whitespace (leftOver,length) -> 299 | let newState = 300 | { state with 301 | CurrentLocation = 302 | { state.CurrentLocation with 303 | Column = sumColumn (state.CurrentLocation.Column) length 304 | } 305 | } 306 | 307 | lexToken leftOver newState 308 | 309 | | Newline (leftOver,_) -> 310 | let newState = 311 | { state with 312 | CurrentLocation = 313 | { state.CurrentLocation with 314 | Line = sumLine (state.CurrentLocation.Line) (Line 1) 315 | Column = Column 1 316 | } 317 | } 318 | lexToken leftOver newState 319 | 320 | 321 | 322 | | Comment (token,leftOver,line , column) -> 323 | let newColumn = 324 | let (Line line) = line 325 | if line = 0 then 326 | sumColumn (state.CurrentLocation.Column) column 327 | else 328 | column 329 | 330 | let token = 331 | { 332 | TokenType = token 333 | Location = state.CurrentLocation 334 | } 335 | let newState = 336 | { state with 337 | InversedTokens = token::state.InversedTokens 338 | CurrentLocation = 339 | { state.CurrentLocation with 340 | Line = sumLine (state.CurrentLocation.Line) line 341 | Column = newColumn 342 | } 343 | } 344 | lexToken leftOver newState 345 | 346 | 347 | | Separator (token,leftOver,length) 348 | | Operator (token,leftOver,length) 349 | | Literal (token,leftOver,length) 350 | | IdentifierOrKeyword (token,leftOver,length) -> 351 | let token = 352 | { 353 | TokenType = token 354 | Location = state.CurrentLocation 355 | } 356 | let newState = 357 | { state with 358 | InversedTokens = token::state.InversedTokens 359 | CurrentLocation = 360 | { state.CurrentLocation with 361 | Column = sumColumn (state.CurrentLocation.Column) length 362 | } 363 | } 364 | lexToken leftOver newState 365 | 366 | | EndOfFile _ -> 367 | let token = 368 | { 369 | TokenType = EOF 370 | Location = state.CurrentLocation 371 | } 372 | let state = { state with InversedTokens = token::state.InversedTokens } 373 | state.InversedTokens |> List.rev 374 | 375 | | _ -> failwith "unexpected stuff" 376 | 377 | 378 | let initState = 379 | { 380 | CurrentLocation = 381 | { 382 | Line = Line 1 383 | Column = Column 1 384 | } 385 | InversedTokens = [] 386 | } 387 | 388 | lexToken input initState 389 | 390 | 391 | let addIndentationTokens (tokens:Token list) = 392 | let rec aux (tokens:Token list) previousLineNumber previousColumnNumber tokensToReturn = 393 | match tokens with 394 | | [] -> List.rev tokensToReturn 395 | | token::tokens -> 396 | let location = token.Location 397 | if location.Line > previousLineNumber then 398 | let indentToken = 399 | let tokenType = 400 | if previousColumnNumber = location.Column then 401 | NoDent 402 | elif previousColumnNumber < location.Column then 403 | let (Column column) = location.Column 404 | let (Column prevColumn) = previousColumnNumber 405 | Indent (column - prevColumn) 406 | else 407 | let (Column column) = location.Column 408 | let (Column prevColumn) = previousColumnNumber 409 | Dedent (prevColumn - column) 410 | |> Indentation 411 | { 412 | TokenType = tokenType 413 | Location = 414 | { Line = location.Line 415 | Column = Column 0 416 | } 417 | } 418 | 419 | 420 | aux tokens (location.Line) (location.Column) (token::indentToken::tokensToReturn) 421 | else 422 | aux tokens (previousLineNumber) (previousColumnNumber) (token::tokensToReturn) 423 | 424 | aux tokens (tokens.Head.Location.Line) (tokens.Head.Location.Column) [] 425 | 426 | 427 | let lexicalAnalysis = lexing >> addIndentationTokens 428 | 429 | 430 | 431 | let input = 432 | "add: int -> int -> int 433 | add a b = 434 | a + b" 435 | 436 | let res = lexicalAnalysis input 437 | 438 | 439 | 440 | 441 | 442 | 443 | -------------------------------------------------------------------------------- /.paket/Paket.Restore.targets: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | $(MSBuildAllProjects);$(MSBuildThisFileFullPath) 8 | 9 | true 10 | $(MSBuildThisFileDirectory) 11 | $(MSBuildThisFileDirectory)..\ 12 | $(PaketRootPath)paket-files\paket.restore.cached 13 | $(PaketRootPath)paket.lock 14 | /Library/Frameworks/Mono.framework/Commands/mono 15 | mono 16 | 17 | $(PaketRootPath)paket.exe 18 | $(PaketToolsPath)paket.exe 19 | "$(PaketExePath)" 20 | $(MonoPath) --runtime=v4.0.30319 "$(PaketExePath)" 21 | 22 | 23 | <_PaketExeExtension>$([System.IO.Path]::GetExtension("$(PaketExePath)")) 24 | dotnet "$(PaketExePath)" 25 | 26 | 27 | "$(PaketExePath)" 28 | 29 | $(PaketRootPath)paket.bootstrapper.exe 30 | $(PaketToolsPath)paket.bootstrapper.exe 31 | "$(PaketBootStrapperExePath)" 32 | $(MonoPath) --runtime=v4.0.30319 "$(PaketBootStrapperExePath)" 33 | 34 | 35 | 36 | 37 | true 38 | true 39 | 40 | 41 | 42 | 43 | 44 | 45 | true 46 | $(NoWarn);NU1603 47 | 48 | 49 | 50 | 51 | /usr/bin/shasum $(PaketRestoreCacheFile) | /usr/bin/awk '{ print $1 }' 52 | /usr/bin/shasum $(PaketLockFilePath) | /usr/bin/awk '{ print $1 }' 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | $([System.IO.File]::ReadAllText('$(PaketRestoreCacheFile)')) 66 | $([System.IO.File]::ReadAllText('$(PaketLockFilePath)')) 67 | true 68 | false 69 | true 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | $(MSBuildProjectDirectory)\obj\$(MSBuildProjectFile).paket.references.cached 79 | 80 | $(MSBuildProjectFullPath).paket.references 81 | 82 | $(MSBuildProjectDirectory)\$(MSBuildProjectName).paket.references 83 | 84 | $(MSBuildProjectDirectory)\paket.references 85 | $(MSBuildProjectDirectory)\obj\$(MSBuildProjectFile).$(TargetFramework).paket.resolved 86 | true 87 | references-file-or-cache-not-found 88 | 89 | 90 | 91 | 92 | $([System.IO.File]::ReadAllText('$(PaketReferencesCachedFilePath)')) 93 | $([System.IO.File]::ReadAllText('$(PaketOriginalReferencesFilePath)')) 94 | references-file 95 | false 96 | 97 | 98 | 99 | 100 | false 101 | 102 | 103 | 104 | 105 | true 106 | target-framework '$(TargetFramework)' 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[0]) 124 | $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[1]) 125 | $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[4]) 126 | 127 | 128 | %(PaketReferencesFileLinesInfo.PackageVersion) 129 | All 130 | 131 | 132 | 133 | 134 | $(MSBuildProjectDirectory)/obj/$(MSBuildProjectFile).paket.clitools 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | $([System.String]::Copy('%(PaketCliToolFileLines.Identity)').Split(',')[0]) 144 | $([System.String]::Copy('%(PaketCliToolFileLines.Identity)').Split(',')[1]) 145 | 146 | 147 | %(PaketCliToolFileLinesInfo.PackageVersion) 148 | 149 | 150 | 151 | 155 | 156 | 157 | 158 | 159 | 160 | false 161 | 162 | 163 | 164 | 165 | 166 | <_NuspecFilesNewLocation Include="$(BaseIntermediateOutputPath)$(Configuration)\*.nuspec"/> 167 | 168 | 169 | 170 | $(MSBuildProjectDirectory)/$(MSBuildProjectFile) 171 | true 172 | false 173 | true 174 | $(BaseIntermediateOutputPath)$(Configuration) 175 | $(BaseIntermediateOutputPath) 176 | 177 | 178 | 179 | <_NuspecFiles Include="$(AdjustedNuspecOutputPath)\*.nuspec"/> 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 232 | 233 | 274 | 275 | 276 | 277 | -------------------------------------------------------------------------------- /src/POC.ParsingLexing/Parser.fs: -------------------------------------------------------------------------------- 1 | namespace Compiler 2 | open LexerTypes 3 | open LexerTokenization 4 | open System 5 | open System.Collections.Generic 6 | open System.Reflection.Metadata 7 | open System.Runtime.Versioning 8 | 9 | 10 | // module AST = 11 | 12 | // // ***************************************************************************************************** 13 | // // AST => might need to change but these Unions represent the AST 14 | // // ***************************************************************************************************** 15 | // type Name = Name of string 16 | // type VarName = VarName of string 17 | 18 | 19 | // type Literal = 20 | // | String of string 21 | // | Bool of bool 22 | // | Int of int 23 | // | Float of float 24 | 25 | // type Expr = 26 | // // For example : x in `f(x)` => Var "x" 27 | // | Var of VarName 28 | // | Literal of Literal 29 | // | Let of Name * Expr * Expr 30 | // | Lambda of Name * Expr 31 | // | Application of Expr * Expr 32 | // | If of Expr * Expr * Expr 33 | // open AST 34 | 35 | 36 | // module ParserTypes = 37 | // open AST 38 | 39 | // let defaultIndent = 4 40 | // type IInvokeIndentRule = 41 | // abstract member Invoke : Location 42 | 43 | // type IndentLevel = IndentLevel of int 44 | 45 | // type IndentRule = 46 | // | IndentRule of (Location -> Location -> IndentLevel -> unit) 47 | // member x.Invoke l1 l2 indentLevel = 48 | // let (IndentRule fn) = x 49 | // fn l1 l2 indentLevel 50 | 51 | // let checkIRLevel = 52 | // fun l1 l2 (IndentLevel indent) -> 53 | // if sumColumn (defaultIndent * indent |> Column ) l1.Column = l2.Column then 54 | // () 55 | // else 56 | // failwith "not properly indented" 57 | // |> IndentRule 58 | 59 | 60 | // let sameLine l1 l2 = 61 | // if l1.Line = l2.Line then 62 | // () 63 | // else 64 | // failwith "not same line" 65 | // let lowerLine l1 l2 = 66 | // if l1.Line < l2.Line then 67 | // () 68 | // else 69 | // failwith "not same line" 70 | // let higherLine l1 l2 = 71 | // if l1.Line > l2.Line then 72 | // () 73 | // else 74 | // failwith "not same line" 75 | 76 | // let sameColumn l1 l2 = 77 | // if l1.Column = l2.Column then 78 | // () 79 | // else 80 | // failwith "not same line" 81 | // let lowerColumn l1 l2 = 82 | // if l1.Column < l2.Column then 83 | // () 84 | // else 85 | // failwith "not same line" 86 | // let higherColumn l1 l2 = 87 | // if l1.Column > l2.Column then 88 | // () 89 | // else 90 | // failwith "not same line" 91 | 92 | 93 | // type IfThenElse_IndentRule = 94 | // { IfLocation : Location 95 | // ThenLocation : Location 96 | // ElseLocation : Location 97 | // ExprIfLocation : Location 98 | // ExprElseLocation : Location 99 | // } 100 | // member x.Invoke() = 101 | // do sameLine x.IfLocation x.ThenLocation 102 | // do checkIRLevel.Invoke x.IfLocation x.ElseLocation (IndentLevel 0) 103 | // do checkIRLevel.Invoke x.ExprIfLocation x.IfLocation (IndentLevel 1) 104 | // do checkIRLevel.Invoke x.ExprIfLocation x.ExprElseLocation (IndentLevel 0) 105 | 106 | // let upToNextToken (condition: Token -> bool) (tokens: Token list) = 107 | // let rec aux (tokens:Token list) (tokensConsumed) = 108 | // match tokens with 109 | // | [] -> failwith "No Next Token" 110 | // | token::left -> 111 | // if condition token then 112 | // (List.rev tokensConsumed,token,left) 113 | // else 114 | // aux left (token::tokensConsumed) 115 | 116 | // aux tokens [] 117 | 118 | 119 | // let nextKeywordTokenIs (keyword:Keyword) (tokens:Token list) = 120 | // let condition (token:Token) = 121 | // match token.TokenType with 122 | // | TokenType.Keyword _ -> true 123 | // | _ -> false 124 | 125 | // let (consumed,nextToken,left) = upToNextToken condition tokens 126 | // if Keyword keyword <> nextToken.TokenType then 127 | // failwith "Unexpected Keyword" 128 | 129 | // (consumed,nextToken,left) 130 | 131 | // let nextTokenBlock (location:Location) (tokens:Token list) = 132 | // let condition (token:Token) = token.Location.Column <= location.Column 133 | // let (consumed,nextToken,left) = upToNextToken condition tokens 134 | // (consumed,nextToken::left) 135 | 136 | 137 | // let (|IfThenElse|_|) (tokens:Token list) = 138 | // let ifToken = tokens.Head 139 | // match ifToken.TokenType with 140 | // | TokenType.Keyword(Keyword.If) -> 141 | // let (condition,thenToken,tokens) = nextKeywordTokenIs Keyword.Then tokens 142 | // // do call for condition 143 | // let (expr1,elseToken,tokens) = nextKeywordTokenIs Keyword.Else tokens 144 | // let (expr2,tokens) = nextTokenBlock ifToken.Location tokens 145 | // // do call for expr1 and expr2 146 | // let (exprIfLocation:Location) = getLocation expr1 147 | // let (ExprElseLocation:Location) = getLocation expr2 148 | 149 | // let rule = 150 | // { IfLocation = ifToken.Location 151 | // ThenLocation = thenToken.Location 152 | // ElseLocation = elseToken.Location 153 | // ExprIfLocation = exprIfLocation 154 | // ExprElseLocation = ExprElseLocation 155 | // } 156 | // rule.Invoke() 157 | 158 | 159 | // None 160 | 161 | 162 | 163 | // // let x = 164 | // // Some Block 165 | // // 166 | 167 | // let ( [|a|]) = [|10|] 168 | 169 | // type SubBlock = SubBlock of SubBlock list 170 | 171 | // type Block = 172 | // { 173 | // Expr : Expr 174 | // SubBlocks : Block 175 | // } 176 | 177 | // ************************************* 178 | // Steps 179 | // ************************************* 180 | // 1) Get token list 181 | // 2) Define set of rules (recursive) to generate each possible union case 182 | // 2.1) Create all AP Tokenizer from matchToken function representing the lexical grammar of of each token (regex) 183 | // 2.2) Create a function which defines the overall lexical grammar, rule priority and longest match invariants 184 | // 2.3) Loop over that function and generate a token with current state of tokenization ... 185 | 186 | // add: int -> int -> int 187 | // add a b = 188 | // a + b 189 | 190 | 191 | // quetzalProg := list of function 192 | 193 | // function := signature? function_def 194 | // signature := identifier COLON type_def 195 | 196 | // identifier := IDENTIFIER 197 | 198 | // type_def := identifier EQUAL function_body 199 | 200 | // function_def := identifier {parameter}+ EQUAL function_body 201 | // function_body := expr 202 | 203 | // expr := LITERAL 204 | // | identifier 205 | // | LPar expr RPar 206 | // // | lookup expr 207 | // | expr expr 208 | // | expr BinOp expr 209 | // | UnaryOp expr 210 | // | expr {COMMA expr}+ 211 | // | expr {expr}+ 212 | // | LET let_binding expr 213 | 214 | // parameter := identifier {COMMA parameter}* 215 | // | LPar parameter RPar 216 | // | LPar parameter COLON type_def RPar 217 | 218 | // let_binding := parameter = expr 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | // type_def := LPar type_def RPar type_def_prime 227 | // | type_value type_def_prime 228 | // type_def_prime := type_def ARROW type_def type_def_prime 229 | // | epsilon 230 | // type_value := identifier 231 | 232 | // parameter := identifier {COMMA parameter}* 233 | // | LPar parameter RPar 234 | // | LPar parameter COLON type_def RPar 235 | 236 | // expr := LITERAL expr_prime 237 | // | identifier expr_prime 238 | // | LPar expr RPar expr_prime 239 | // | UnaryOp expr expr_prime 240 | // | LET let_binding expr expr_prime 241 | 242 | // expr_prime: = BinOp expr expr_prime 243 | // | {COMMA expr}+ expr_prime 244 | // | {expr}+ expr_prime 245 | // | epsilon 246 | 247 | 248 | // let_binding := parameter = expr 249 | 250 | // function_body := expr 251 | 252 | // function_def := identifier {parameter}+ EQUAL function_body 253 | 254 | // signature := identifier COLON type_def 255 | 256 | // function := signature? function_def 257 | 258 | 259 | // quetzalProg := (function)* EOF 260 | 261 | 262 | 263 | // module test = 264 | // open LexerTypes 265 | 266 | // // STRUCTURES =>> ANALYZER COMPONENTS 267 | 268 | // // string =>> LEXER 269 | // // =>> Token Stream =>> PARSER 270 | // // =>> Parse Tree =>> CONVERTER 271 | // // =>> AST 272 | 273 | 274 | 275 | // // type_def := LPar type_def RPar type_def_prime 276 | // // | type_value type_def_prime 277 | // // type_value := identifier 278 | // type PTTypeDef = 279 | // | PTTypeDefBlock of PTTypeDef * PTTypeDefPrime 280 | // | PTTypeValue of Ident * PTTypeDefPrime 281 | 282 | // // type_def_prime := ARROW type_def type_def_prime 283 | // // | epsilon 284 | // and PTTypeDefPrime = 285 | // | PTArrow of PTTypeDef * PTTypeDefPrime 286 | // | PTEpsilon 287 | 288 | // // parameter := identifier {COMMA parameter}* 289 | // // | LPar parameter RPar 290 | // // | LPar parameter COLON type_def RPar 291 | // type PTParam = 292 | // | PTMultParam of Ident * PTParam list 293 | // | PTParamBlock of PTParam 294 | // | PTParamTyped of PTParam * PTTypeDef 295 | 296 | // // expr := LITERAL expr_prime 297 | // // | identifier expr_prime 298 | // // | LPar expr RPar expr_prime 299 | // // | UnaryOp expr expr_prime 300 | // // | LET let_binding expr expr_prime 301 | // type PTExpr = 302 | // | PTLit of Literal * PTExprPrime 303 | // | PTIdent of Ident * PTExprPrime 304 | // | PTBlock of PTExpr * PTExprPrime 305 | // | PTUnaryOp of Operator * PTExpr * PTExprPrime 306 | // | PTLet of PTLetBinding * PTExpr * PTExprPrime 307 | 308 | // // expr_prime: = BinOp expr expr_prime 309 | // // | {COMMA expr}+ expr_prime 310 | // // | {expr}+ expr_prime 311 | // // | epsilon 312 | // and PTExprPrime = 313 | // | PTBinaryOp of Operator * PTExpr * PTExprPrime 314 | // | PTTuple of PTExpr list * PTExprPrime 315 | // | PTAppli of PTExpr list * PTExprPrime 316 | // | PTEpsilon 317 | 318 | // // let_binding := parameter = expr 319 | // and PTLetBinding = PTLetBinding of PTParam * PTExpr 320 | 321 | // // function_body := expr 322 | // type PTFunctionBody = PTExpr 323 | 324 | // // function_def := identifier {parameter}+ EQUAL function_body 325 | // type PTFunctionDef = 326 | // PTFunctionDef of Ident * PTParam list * PTFunctionBody 327 | 328 | // // signature := identifier COLON type_def 329 | // type PTSignature = PTSignature of Ident * PTTypeDef 330 | 331 | // // function := signature? function_def 332 | // type PTFunction = PTFunction of PTSignature option * PTFunctionDef 333 | 334 | // // quetzalProg := (function)* EOF 335 | // type PTQuetzalProg = PTQuetzalProg of PTFunction list 336 | 337 | // let rec parsePTTypeDef (tokens : Token list) = 338 | // let paramToken,tokens = tokens.Head,tokens.Tail 339 | // match paramToken.TokenType with 340 | // | Separator LParentheses -> 341 | // let (ptTypeDef1,tokens):PTTypeDef * Token list = parsePTTypeDef tokens 342 | // let paramToken,tokens = tokens.Head,tokens.Tail 343 | // match paramToken.TokenType with 344 | // | Separator RParentheses -> 345 | // let ptTypeDef2,tokens = parsePTTypeDefPrime tokens 346 | // PTTypeDefBlock (ptTypeDef1,ptTypeDef2),tokens 347 | // | _ -> failwith "unexpected token" 348 | // | Identifier ident -> 349 | // let ptTypeDef,tokens = parsePTTypeDefPrime tokens 350 | // PTTypeValue (ident,ptTypeDef),tokens 351 | // | _ -> failwith "unexpected token" 352 | 353 | // and parsePTTypeDefPrime (tokens : Token list) = 354 | // try 355 | // let paramToken,tokens = tokens.Head,tokens.Tail 356 | // match paramToken.TokenType with 357 | // | Separator Arrow -> 358 | // let ptTypeDef2,tokens = parsePTTypeDef tokens 359 | // let ptTypeDefPrime,tokens = parsePTTypeDefPrime tokens 360 | // PTArrow (ptTypeDef2,ptTypeDefPrime),tokens 361 | // | _ -> failwith "unexpected stuff" 362 | 363 | // with 364 | // | _ -> PTTypeDefPrime.PTEpsilon,tokens 365 | 366 | 367 | 368 | // let rec parsePTParam (tokens : Token list) = 369 | // let paramToken,tokens = tokens.Head,tokens.Tail 370 | // match paramToken.TokenType with 371 | // | Identifier ident -> 372 | // let rec auxParams (tokens : Token list) (parameters : PTParam list) = 373 | // try 374 | // let ptParam,tokens = parsePTParam tokens 375 | // auxParams tokens (ptParam::parameters) 376 | // with 377 | // | _ -> 378 | // PTMultParam (ident,List.rev parameters),tokens 379 | 380 | // auxParams tokens [] 381 | // | Separator LParentheses -> 382 | // let ptParam,tokens = parsePTParam tokens 383 | // let paramToken,tokens = tokens.Head,tokens.Tail 384 | // match paramToken.TokenType with 385 | // | Separator RParenthese -> 386 | // PTParamBlock ptParam , tokens 387 | // | Separator Colon -> 388 | // let ptTypeDef, tokens = parsePTTypeDef tokens 389 | // let paramToken,tokens = tokens.Head,tokens.Tail 390 | // match paramToken.TokenType with 391 | // | Separator RParenthese -> 392 | // PTParamTyped (ptParam,ptTypeDef) , tokens 393 | // | _ -> failwith "unexpected token" 394 | // | _ -> failwith "unexpected token" 395 | // | _ -> failwith "unexpected token" 396 | 397 | 398 | // let rec parsePTExpr (tokens : Token list) = 399 | // let exprToken,tokens = tokens.Head,tokens.Tail 400 | // match exprToken.TokenType with 401 | // | Literal literal -> 402 | // let ptExprPrime, tokens = parsePTExprPrime tokens 403 | // PTLit (literal , ptExprPrime) , tokens 404 | // | Identifier ident -> 405 | // let ptExprPrime, tokens = parsePTExprPrime tokens 406 | // PTIdent (ident , ptExprPrime) , tokens 407 | // | Separator LParentheses -> 408 | // let (ptExpr,tokens):PTExpr * Token list = parsePTExpr tokens 409 | // let rPar,tokens = tokens.Head,tokens.Tail 410 | // match rPar.TokenType with 411 | // | Separator RParentheses -> 412 | // let ptExprPrime, tokens = parsePTExprPrime tokens 413 | // PTBlock (ptExpr , ptExprPrime) , tokens 414 | // | _ -> failwith "unexpected token, expected a ) separator" 415 | // | Operator Operator.Subs -> 416 | // let ptExpr,tokens = parsePTExpr tokens 417 | // let ptExprPrime,tokens = parsePTExprPrime tokens 418 | // PTUnaryOp (Operator.Subs , ptExpr , ptExprPrime) , tokens 419 | // | Keyword Let -> 420 | // let ptLetBinding,tokens = parsePTLetBinding tokens 421 | // let ptExpr,tokens = parsePTExpr tokens 422 | // let ptExprPrime,tokens = parsePTExprPrime tokens 423 | // PTLet (ptLetBinding , ptExpr, ptExprPrime) , tokens 424 | // | _ -> failwith "unexpected token" 425 | 426 | 427 | // and parsePTExprPrime (tokens : Token list) = 428 | // let (|ParsePTBinaryOp|_|) (tokens : Token list) = 429 | // match tokens with 430 | // | [] -> None 431 | // | [token] when token.TokenType = EOF -> Some (PTEpsilon,tokens) 432 | // | token :: tokens -> 433 | // match token.TokenType with 434 | // | Operator operator -> 435 | // let ptExpr,tokens = parsePTExpr tokens 436 | // let ptExprPrime,tokens = parsePTExprPrime tokens 437 | // (PTBinaryOp (operator,ptExpr,ptExprPrime) , tokens) 438 | // |> Some 439 | // | _ -> None 440 | 441 | // let (|ParsePTTuple|_|) (tokens : Token list) = 442 | // let rec aux (tokens : Token list) (tuple : PTExpr list) = 443 | // match tokens with 444 | // | [] -> None 445 | // | [token] when token.TokenType = EOF -> Some (PTEpsilon,tokens) 446 | // | token :: tokens -> 447 | // match token.TokenType with 448 | // | Separator Comma -> 449 | // let ptExpr,tokens = parsePTExpr tokens 450 | // aux tokens (ptExpr::tuple) 451 | // | _ -> 452 | // let ptExprPrime,tokens = parsePTExprPrime tokens 453 | // (PTTuple (List.rev tuple,ptExprPrime) , tokens) 454 | // |> Some 455 | // aux tokens [] 456 | 457 | // let (|ParsePTAppli|_|) (tokens : Token list) = 458 | // let rec aux (tokens : Token list) (exprs : PTExpr list) = 459 | // try 460 | // let ptExpr,left = parsePTExpr tokens 461 | // aux left (ptExpr::exprs) 462 | // with 463 | // | _ -> 464 | // let ptExprPrime,tokens = parsePTExprPrime tokens 465 | // (PTAppli (List.rev exprs,ptExprPrime) , tokens) 466 | // |> Some 467 | // aux tokens [] 468 | 469 | // match tokens with 470 | // | ParsePTBinaryOp (ptExprPrime,tokens) 471 | // | ParsePTTuple (ptExprPrime,tokens) 472 | // | ParsePTAppli (ptExprPrime,tokens) -> ptExprPrime , tokens 473 | // | _ -> 474 | // match tokens with 475 | // | [token] when token.TokenType = EOF -> PTEpsilon , tokens.Tail 476 | // | _ -> failwith "unexpected token" 477 | 478 | 479 | // and parsePTLetBinding (tokens : Token list) = 480 | // let ptParam,tokens = parsePTParam tokens 481 | // let exprEqual,tokens = tokens.Head,tokens.Tail 482 | // match exprEqual.TokenType with 483 | // | Operator Equal -> 484 | // let ptExpr,tokens = parsePTExpr tokens 485 | // (PTLetBinding (ptParam,ptExpr),tokens) 486 | // | _ -> failwith "unexpected token" 487 | 488 | 489 | // let parsePTFunctionBody = parsePTExpr 490 | 491 | // let parsePTParams (tokens:Token list) = 492 | // let rec aux (tokens:Token list) (iCan:bool) (ptParams:PTParam list) = 493 | // if iCan then 494 | // let iCan,ptParams,rest = 495 | // try 496 | // let ptParam,rest = parsePTParam tokens 497 | // true,ptParam::ptParams,rest 498 | // with 499 | // | _ -> 500 | // false,ptParams,tokens 501 | // aux rest iCan ptParams 502 | // else 503 | // (List.rev ptParams,tokens) 504 | // aux tokens true [] 505 | 506 | // let parsePTFunctionDef (tokens : Token list) = 507 | // let identifier,tokens = tokens.Head,tokens.Tail 508 | // match identifier.TokenType with 509 | // | Identifier ident -> 510 | // let ptParams,tokens = parsePTParams tokens 511 | // let equalToken,tokens = tokens.Head,tokens.Tail 512 | // match equalToken.TokenType with 513 | // | Operator Equal -> 514 | // let ptFunctionBody,tokens = parsePTFunctionBody tokens 515 | // PTFunctionDef (ident, ptParams, ptFunctionBody) , tokens 516 | // | _ -> failwith "unexpected token, expected an = operator " 517 | // | _ -> failwith "unexpected token, expected an identifier" 518 | 519 | 520 | // let parsePTSignature (tokens : Token list) = 521 | // let identifier,tokens = tokens.Head,tokens.Tail 522 | // match identifier.TokenType with 523 | // | Identifier ident -> 524 | // let colon,tokens = tokens.Head,tokens.Tail 525 | // match colon.TokenType with 526 | // | Separator Colon -> 527 | // let ptTypeDef,tokens = parsePTTypeDef tokens 528 | // Some (PTSignature (ident, ptTypeDef)) , tokens 529 | // | _ -> None , tokens 530 | // | _ -> failwith "unexpected token, expected an identifier" 531 | 532 | // let parsePTFunction (tokens : Token list) = 533 | // // let rec aux (tokens : Token list) 534 | // let ptSignature,tokens = parsePTSignature tokens 535 | // let ptFunctionDef,tokens = parsePTFunctionDef tokens 536 | // PTFunction (ptSignature,ptFunctionDef) , tokens 537 | 538 | 539 | // let parsePTFunctions (tokens:Token list) = 540 | // let rec aux (tokens:Token list) (iCan:bool) (ptFunctions:PTFunction list) = 541 | // if iCan then 542 | // let iCan,ptFunctions,rest = 543 | // try 544 | // let ptFunction,rest = parsePTFunction tokens 545 | // true,ptFunction::ptFunctions,rest 546 | // with 547 | // | _ -> 548 | // false,ptFunctions,tokens 549 | // aux rest iCan ptFunctions 550 | // else 551 | // (List.rev ptFunctions,tokens) 552 | // aux tokens true [] 553 | 554 | // let parsePTQuetzalProg (tokens:Token list) = 555 | // let ptFunctions,rest = parsePTFunctions(tokens) 556 | // match rest with 557 | // | [token] when token.TokenType = EOF -> PTQuetzalProg ptFunctions 558 | // | _ -> failwith "parse error, expected EOF" 559 | 560 | 561 | // [] 562 | // let main argv = 563 | 564 | // let input = 565 | // "add: int -> int -> int 566 | // add a b = 567 | // a + b 568 | 569 | // t x = 570 | // let y = x + 5 571 | // y + 6" 572 | 573 | 574 | 575 | // let res = 576 | // lexing 577 | // >> parsePTQuetzalProg 578 | 579 | // printfn "START" 580 | 581 | // printfn "%A" (res input) 582 | 583 | // Async.Sleep 50000 |> Async.RunSynchronously 584 | 585 | // 0 586 | 587 | 588 | 589 | // type Bin = 590 | // | Plus 591 | // | Less 592 | // | Times 593 | // | Div 594 | // | Pow 595 | 596 | // type Un = 597 | // | Minus 598 | 599 | // type P = 600 | // | Lit of int 601 | // | Par of Arithmetic 602 | // | Un of Un * P 603 | 604 | // and Arithmetic = 605 | // | Arithm of P * (Bin * P) list 606 | 607 | 608 | 609 | 610 | 611 | // let t = """ 612 | // 5 613 | // 5 + 3 614 | // -5 + -3 615 | // 5 + 3 * 2 + 5 616 | // 5 + 3 / 2 + 5 617 | // 5 ^ 3 * 2 + 5 618 | // (5 + 3) * 2 + 5 619 | // 5 ^ (3 * 2) + 5 620 | // """ 621 | 622 | // let binPrec = 623 | // [ "+" , 1 624 | // "-" , 1 625 | // "*" , 2 626 | // "/" , 2 627 | // "^" , 3 628 | // ] |> Map.ofList 629 | 630 | 631 | 632 | 633 | 634 | 635 | 636 | 637 | // [] 638 | // module Result = 639 | 640 | // type IFailure = 641 | // abstract member Reason : unit -> string 642 | // abstract member GetStackTrace : unit -> string 643 | // abstract member GetStackTraceUpToLevel : int -> string 644 | 645 | // [] 646 | // [] 647 | // type Result<'a> = 648 | // | Success of 'a 649 | // | Failure of IFailure 650 | 651 | // [] 652 | // [] 653 | // type Delayed<'a> = Delayed of (unit -> Result<'a>) 654 | 655 | // type ResultBuilder() = 656 | // member __.Bind(Delayed m : Delayed<'a>, f : 'a -> Result<'b>) : Result<'b> = 657 | // match m() with 658 | // |Success elem -> f elem 659 | // |Failure s -> Failure s 660 | 661 | // member __.Return (x:'a) : Result<'a> = Success x 662 | // member __.ReturnFrom (Delayed m:Delayed<'a>) : Result<'a> = m() 663 | // member __.Zero () = Success () 664 | 665 | // member __.Combine (a:Result<'a>,b:unit -> Result<'a>) : Result<'a> = 666 | // let runnedB = b() 667 | // match a,runnedB with 668 | // | Success _ , Success b1 -> Success b1 669 | // | Success _ , Failure b1 -> Failure b1 670 | // | Failure a1, Success _ -> Failure a1 671 | // | Failure a1, Failure _ -> Failure a1 672 | 673 | // member __.Delay(f:unit -> Result<'a>) : (unit -> Result<'a>) = f 674 | // member __.Run(f:unit -> Result<'a>) = Delayed f 675 | 676 | // let result = ResultBuilder() 677 | 678 | // let runResult (Delayed res) = res() 679 | 680 | 681 | 682 | // module State = 683 | 684 | // type IInternal = interface end 685 | 686 | // type State<'t, 'state> = 687 | // | State of ('state -> 't * 'state) 688 | 689 | 690 | // type Delayed<'t,'state> = Delayed of (unit -> State<'t,'state>) 691 | 692 | // let bind (f:'t -> State<'u,'state>) (Delayed m:Delayed<'t,'state>) : State<'u,'state> = 693 | // fun s -> 694 | // let (State stateFunction) = m() 695 | // let (a, s') = stateFunction s 696 | // let (State stateFunction') = (f a) 697 | // stateFunction' s' 698 | // |> State 699 | 700 | 701 | // type StateBuilder() = 702 | // member __.Bind(m:Delayed<'t,'state>, k:'t -> State<'t,'state>) : State<'t,'state> = bind k m 703 | 704 | // member __.Return(a:'t) : State<'t,'state> = State (fun s -> (a,s)) 705 | // member __.ReturnFrom(Delayed m:Delayed<'t,'state>) : State<'t,'state> = m() 706 | // member this.Zero() = this.Return () 707 | 708 | // member this.Combine(s1 : State<'t,'state>, f2 : unit -> State<'t,'state>) : State<'t,'state> = 709 | // this.Bind(Delayed (fun () -> s1), fun _ -> f2()) 710 | 711 | // member __.Delay(f: unit -> State<'t,'state>) = f 712 | // member __.Run(f: unit -> State<'t,'state>) = Delayed f 713 | 714 | 715 | 716 | // // type StateBuilder< ^I when ^I :> IInternal>() = 717 | // // member inline __.Bind(m:Delayed<'t,'state>, k:'t -> State<'t,'state>) : State<'t,'state> = bind k m 718 | 719 | // // member __.Return(a:'t) : State<'t,'state> = State (fun s -> (a,s)) 720 | // // member __.ReturnFrom(Delayed m:Delayed<'t,'state>) : State<'t,'state> = m() 721 | // // member this.Zero() = this.Return () 722 | 723 | // // member this.Combine(s1 : State<'t,'state>, f2 : unit -> State<'t,'state>) : State<'t,'state> = 724 | // // this.Bind(Delayed (fun () -> s1), fun _ -> f2()) 725 | 726 | // // member __.Delay(f: unit -> State<'t,'state>) = f 727 | // // member __.Run(f: unit -> State<'t,'state>) = Delayed f 728 | 729 | // // let state = new StateBuilder() 730 | 731 | // let getState = 732 | // Delayed (fun () -> (State (fun s -> (s,s)))) 733 | // let putState s = 734 | // Delayed (fun () -> (State (fun _ -> ((),s)))) 735 | 736 | // let eval (Delayed m) state = 737 | // let (State d) = m() 738 | // d state |> fst 739 | // let exec (Delayed m) state = 740 | // let (State d) = m() 741 | // d state |> snd 742 | 743 | // let empty = fun s -> ((), s) 744 | 745 | 746 | 747 | // let state = new StateBuilder() 748 | 749 | 750 | // let t = 751 | // state{ 752 | // do! putState 3.2 753 | // let st = getState 754 | // return 5 755 | // } 756 | 757 | // let r = eval t 5.0 758 | // let s = exec t 2. 759 | 760 | 761 | 762 | 763 | 764 | 765 | 766 | 767 | 768 | 769 | 770 | 771 | 772 | 773 | 774 | 775 | 776 | 777 | 778 | 779 | 780 | 781 | 782 | 783 | 784 | 785 | 786 | 787 | 788 | // module IMonad = 789 | // type IInternal<'a> = interface end 790 | 791 | // type IDelayed<'a> = interface end 792 | 793 | // type IMonadBuilder = 794 | // abstract member Bind< 'a, 't when 't :> IDelayed<'a> > : 't * ('a -> 't) -> 't 795 | // // IDelayed<'a> * ('a -> IDelayed<'b>) -> IDelayed<'b> 796 | // abstract member Return : 'a -> IDelayed<'a> 797 | // abstract member ReturnFrom : IDelayed<'a> -> IInternal<'a> 798 | // abstract member Zero : unit -> IInternal 799 | // abstract member Combine : IInternal<'a> * (unit -> IInternal<'a>) -> IInternal<'a> 800 | // abstract member Delay : (unit -> IInternal<'a>) -> (unit -> IInternal<'a>) 801 | // abstract member Combine : IInternal<'a> * (unit -> IInternal<'a>) -> IInternal<'a> 802 | // abstract member Run : (unit -> IInternal<'a>) -> IDelayed<'a> 803 | 804 | 805 | 806 | 807 | // module Result = 808 | // open IMonad 809 | 810 | // type IFailure = 811 | // abstract member Reason : unit -> string 812 | // abstract member GetStackTrace : unit -> string 813 | // abstract member GetStackTraceUpToLevel : int -> string 814 | 815 | // [] 816 | // [] 817 | // type Result<'a> = 818 | // | Success of 'a 819 | // | Failure of IFailure 820 | // interface IInternal<'a> 821 | 822 | // [] 823 | // [] 824 | // type Delayed<'a> = 825 | // | Delayed of (unit -> Result<'a>) 826 | // interface IDelayed<'a> 827 | // // static member inline Unit<'a>() = () 828 | 829 | // type ResultBuilder() = 830 | // interface IMonadBuilder with 831 | 832 | // member __.Bind(Delayed m:Delayed<'a>, f) = 833 | // match m() with 834 | // |Success elem -> f elem 835 | // |Failure s -> Failure s 836 | 837 | // member __.Return x = Success x 838 | // member __.ReturnFrom (Delayed m) = m() 839 | // member __.Zero () = Success () 840 | 841 | // member __.Combine (a,b)= 842 | // let runnedB = b() 843 | // match a,runnedB with 844 | // | Success _ , Success b1 -> Success b1 845 | // | Success _ , Failure b1 -> Failure b1 846 | // | Failure a1, Success _ -> Failure a1 847 | // | Failure _ , Failure b1 -> Failure b1 848 | 849 | // member __.Delay(f) = f 850 | // member __.Run(funToRun) = 851 | // Delayed funToRun 852 | 853 | // let result = ResultBuilder() 854 | 855 | // let runResult (Delayed result) = result() 856 | 857 | 858 | // module State = 859 | // open Result 860 | 861 | 862 | // type State<'T, 'State> = State of ('State -> IMonad<'T * 'State>) 863 | // type Delayed<'s, 'd> = Delayed of (unit -> State<'s, 'd>) 864 | 865 | 866 | // type StateBuilder<'State>() = 867 | // let bind ( k:'T -> State<'U,'State> ) (Delayed delayed:Delayed<'T,'State>) = 868 | // fun s -> 869 | // let (State m) = delayed() 870 | // match m s with 871 | // | Success (a, s') -> 872 | // // (a, s') 873 | // let (State state) = (k a) 874 | // (state s') 875 | // | Failure failure -> Failure failure 876 | // |> State 877 | 878 | // member __.Bind(m:Delayed<'T,'State>, k:'T -> State<'U,'State>) : State<'U,'State> = bind k m 879 | // member __.Return(a) : State<'T,'State> = State(fun s -> Success(a,s)) 880 | // member __.ReturnFrom(Delayed m:Delayed<'T,'State>) = m() 881 | // member this.Zero() = this.Return () 882 | // member __.Combine(state1 : State<'a,'State>, delayedState2 : unit -> State<'a,'State>) = 883 | // let state2 = delayedState2() 884 | // bind (fun _ -> state2) (Delayed (fun _ -> state1)) 885 | // member __.Delay(f) = f 886 | // member __.Run(delayed) = Delayed delayed 887 | 888 | // let getState = 889 | // let state = State(fun s -> Success(s,s)) 890 | // (fun () -> state) |> Delayed 891 | 892 | // let putState s = 893 | // let state = State(fun _ -> ((),s)) 894 | // (fun () -> state) |> Delayed 895 | 896 | // let runData (Delayed delayed) state = 897 | // let (State m) = delayed() 898 | // m state |> fst 899 | 900 | // let runState (Delayed delayed) state = 901 | // let (State m) = delayed() 902 | // m state |> snd 903 | // let empty = fun s -> ((), s) 904 | 905 | 906 | 907 | // let parse = StateBuilder() 908 | 909 | module ParserErrors = 910 | 911 | 912 | type ParserFailure = 913 | // basic parsing failure 914 | | ExpectedIdentifier of Token 915 | | ExpectedLiteral of Token 916 | | ExpectedBinaryOp of Token 917 | | ExpectedKeyword of Keyword * Token 918 | | ExpectedSeparator of Separator * Token 919 | | ExpectedOperator of Operator * Token 920 | | CannotConsumeEmptyState 921 | // wrapped failures with context 922 | | ParsingPTTypeDefFailure of ParserFailure 923 | | ParsingPTTypeDefPrimeFailure of ParserFailure 924 | | ParsingPTParamFailure of ParserFailure 925 | | ParsingPTExprFailure of ParserFailure 926 | | ParsingPTExprPrimeFailure of ParserFailure 927 | member this.Description = 928 | let identation = " " 929 | let (tokenError,location) = 930 | 931 | let appendIndentedLine description level toAppend = 932 | description + "\n" + String.replicate level identation + toAppend 933 | 934 | let rec getDescription (level:int) (description:string) (failure:ParserFailure) = 935 | match failure with 936 | 937 | | ExpectedIdentifier token -> 938 | let description = 939 | appendIndentedLine description level 940 | (sprintf "Expected an Identifier but received the Token : { %A }" token) 941 | (description, Some token.Location) 942 | | ExpectedLiteral token -> 943 | let description = 944 | appendIndentedLine description level 945 | (sprintf "Expected a Literal but received the Token : { %A }" token ) 946 | (description, Some token.Location) 947 | | ExpectedBinaryOp token -> 948 | let description = 949 | appendIndentedLine description level 950 | (sprintf "Expected a Binary Operator but received the Token : { %A }" token) 951 | (description, Some token.Location) 952 | | CannotConsumeEmptyState -> 953 | let description = 954 | appendIndentedLine description level 955 | (sprintf "Cannot consume a token as the state is empty") 956 | (description, None) 957 | | ExpectedKeyword (keyword , token) -> 958 | let description = 959 | appendIndentedLine description level 960 | (sprintf "Expected a { Keyword %A } but received the Token : { %A }" keyword (token.TokenType)) 961 | (description, Some token.Location) 962 | | ExpectedSeparator (separator , token) -> 963 | let description = 964 | appendIndentedLine description level 965 | (sprintf "Expected a { Separator %A } but received the Token : { %A }" separator (token.TokenType) ) 966 | (description, Some token.Location) 967 | | ExpectedOperator (operator , token) -> 968 | let description = 969 | appendIndentedLine description level 970 | (sprintf "Expected an { Operator %A } but received the Token : { %A }" operator (token.TokenType) ) 971 | (description, Some token.Location) 972 | 973 | 974 | | ParsingPTTypeDefFailure parserFailure -> 975 | ("Failed to Parse type definition" ,parserFailure) 976 | |> fun (description,parserFailure) -> getDescription (level+1) description parserFailure 977 | | ParsingPTTypeDefPrimeFailure parserFailure -> 978 | ("Failed to Parse type definition Prime",parserFailure) 979 | |> fun (description,parserFailure) -> getDescription (level+1) description parserFailure 980 | | ParsingPTParamFailure parserFailure -> 981 | ("Failed to Parse parameters" ,parserFailure) 982 | |> fun (description,parserFailure) -> getDescription (level+1) description parserFailure 983 | | ParsingPTExprFailure parserFailure -> 984 | ("Failed to Parse Expression" ,parserFailure) 985 | |> fun (description,parserFailure) -> getDescription (level+1) description parserFailure 986 | | ParsingPTExprPrimeFailure parserFailure -> 987 | ("Failed to Parse Expression Prime" ,parserFailure) 988 | |> fun (description,parserFailure) -> getDescription (level+1) description parserFailure 989 | 990 | getDescription 0 "" this 991 | 992 | 993 | match location with 994 | | None -> sprintf "Parsing Error : \n%s" tokenError 995 | | Some location -> 996 | let (Line line) = location.Line 997 | let (Column column) = location.Column 998 | sprintf "Parsing Error : Line %i Column %i \n%s" line column tokenError 999 | 1000 | 1001 | 1002 | module Parser = 1003 | open ParserErrors 1004 | 1005 | type Parser<'T> = RWSResult 1006 | let parser = RWSResultBuilder(Monoid.list) 1007 | 1008 | let getState : Parser= rwsGetState Monoid.list 1009 | let putState state : Parser= rwsPutState Monoid.list state 1010 | let read : Parser = rwsRead Monoid.list 1011 | let write value : Parser = rwsWrite value 1012 | let runParser state (computation : Parser<'T>) : Result= rwsRun (state,()) computation 1013 | 1014 | let error (failure : ParserFailure) : Parser<'T> = RWSResult (fun() -> RWSRInternal (fun _ -> Failure failure)) 1015 | 1016 | let catchError (handler : ParserFailure -> Parser<'T>) (computation : Parser<'T>) : Parser<'T> = 1017 | parser{ 1018 | let! state = getState 1019 | let result = runParser state computation 1020 | match result with 1021 | | Success _ -> return! computation 1022 | | Failure failure -> return! handler failure 1023 | } 1024 | 1025 | let parserNop: Parser = parser {return ()} 1026 | 1027 | let fromList (computation: Parser<'T> list) : Parser<'T list>= 1028 | let rec aux (list:Parser<'T> list) output = 1029 | parser{ 1030 | match list with 1031 | | [] -> return List.rev output 1032 | | hd::tl -> 1033 | let! data = hd 1034 | return! aux tl (data::output) 1035 | 1036 | } 1037 | aux computation [] 1038 | 1039 | 1040 | let tryWhileSeparator (failure:Separator) (action:Parser<'T>) : Parser<'T list> = 1041 | parser{ 1042 | let mutable acc = [] 1043 | let mutable guard = true 1044 | while guard do 1045 | try 1046 | let! result = action 1047 | acc <- result::acc 1048 | with 1049 | | ExpectedSeparator(f,_) when f = failure -> 1050 | guard <- false 1051 | return () 1052 | 1053 | return List.rev acc 1054 | } 1055 | 1056 | let private getParserFailureArgs<'a> (withSubArg:bool) (failure : ParserFailure) : Parser<'a> = 1057 | parser{ 1058 | try 1059 | let arguments = 1060 | let rec getArgs (withSubArg:bool) (parserFailure:ParserFailure) = 1061 | match parserFailure with 1062 | | ExpectedIdentifier token -> box token 1063 | | ExpectedLiteral token -> box token 1064 | | ExpectedBinaryOp token -> box token 1065 | | CannotConsumeEmptyState -> box () 1066 | | ExpectedKeyword (keyword , token) -> box (keyword , token) 1067 | | ExpectedSeparator (separator , token) -> box (separator , token) 1068 | | ExpectedOperator (operator , token) -> box (operator , token) 1069 | 1070 | | ParsingPTTypeDefFailure parserFailure 1071 | | ParsingPTTypeDefPrimeFailure parserFailure 1072 | | ParsingPTParamFailure parserFailure 1073 | | ParsingPTExprFailure parserFailure 1074 | | ParsingPTExprPrimeFailure parserFailure -> 1075 | if withSubArg then 1076 | getArgs withSubArg parserFailure 1077 | else 1078 | box parserFailure 1079 | getArgs withSubArg failure |> unbox<'a> 1080 | return arguments 1081 | with 1082 | | _ -> 1083 | return! error failure 1084 | } 1085 | 1086 | let t = fun token -> ExpectedSeparator(Comma,token) |> ParsingPTExprFailure 1087 | 1088 | 1089 | let private parseUntilWithSubArg 1090 | (failureCase: 'args -> ParserFailure) 1091 | (withSubArg:bool) 1092 | (isSubArg:'args -> bool) 1093 | (computation:Parser<'T>) : Parser<'T list> = 1094 | let rec aux count acc = 1095 | parser{ 1096 | try 1097 | let! result = computation 1098 | return! aux (count + 1) (result::acc) 1099 | with 1100 | | catchedFailure -> 1101 | let! args = getParserFailureArgs<'args> withSubArg catchedFailure 1102 | if failureCase args = catchedFailure && isSubArg args && count > 0 then 1103 | return acc 1104 | else 1105 | return! error catchedFailure 1106 | } 1107 | 1108 | aux 0 [] 1109 | 1110 | let parseUntilWithArg (failureCase: 'args -> ParserFailure) (isSubArg:'args -> bool) (computation:Parser<'T>) : Parser<'T list> = 1111 | parseUntilWithSubArg failureCase true isSubArg computation 1112 | 1113 | 1114 | 1115 | let parseUntil (failureCase: 'args -> ParserFailure) (computation:Parser<'T>) : Parser<'T list> = 1116 | parseUntilWithSubArg failureCase false (fun _ -> true) computation 1117 | 1118 | 1119 | let wrapFailure (failureCase : ParserFailure -> ParserFailure) (computation:Parser<'T> ) :Parser<'T> = 1120 | parser{ 1121 | try 1122 | return! computation 1123 | with 1124 | | failure -> return! error (failureCase failure) 1125 | } 1126 | 1127 | // An exception of type 'System.InvalidCastException' occurred in Compiler.dll 1128 | // but was not handled in user code: 'Unable to cast object of 1129 | // type 'System.Tuple`2[Compiler.LexerTypes+Keyword,Compiler.LexerTypes+Token]' to type 'ParserFailure'.' 1130 | 1131 | module ParserHelper = 1132 | open Parser 1133 | open ParserErrors 1134 | 1135 | let consumeToken : Parser = 1136 | parser{ 1137 | let! tokens = getState 1138 | match tokens with 1139 | | [] -> return! Parser.error CannotConsumeEmptyState 1140 | | token::tokens -> 1141 | do! putState tokens 1142 | return token 1143 | } 1144 | let restoreTokens (tokens : Token list) : Parser = 1145 | parser{ 1146 | let! stateTokens = getState 1147 | do! putState (tokens @ stateTokens) 1148 | } 1149 | 1150 | let restoreToken (token : Token) : Parser = 1151 | parser{ 1152 | let! tokens = getState 1153 | do! putState (token::tokens) 1154 | } 1155 | 1156 | 1157 | let parseIdentifier : Parser = 1158 | parser{ 1159 | let! token = consumeToken 1160 | match token.TokenType with 1161 | | Identifier ident -> return ident 1162 | | _ -> return! Parser.error (ExpectedIdentifier token) 1163 | } 1164 | 1165 | let private parseTokenType 1166 | (tokenType : 'a -> TokenType) 1167 | (failure : ('a * Token) -> ParserFailure) 1168 | (tokenParameter : 'a) 1169 | : Parser = 1170 | parser{ 1171 | let! token = consumeToken 1172 | if token.TokenType = tokenType tokenParameter then 1173 | return () 1174 | else 1175 | return! Parser.error (failure (tokenParameter , token)) 1176 | } 1177 | 1178 | 1179 | let parseOperator = parseTokenType Operator ExpectedOperator 1180 | let parseSeparator = parseTokenType Separator ExpectedSeparator 1181 | let parseKeyword = parseTokenType Keyword ExpectedKeyword 1182 | 1183 | // TODO : CHANGE TO HANDLE ONLY UNARY OPERATORS 1184 | let parseUnaryOperator = parseOperator 1185 | let parseBinaryOp : Parser = 1186 | parser{ 1187 | let! token = consumeToken 1188 | match token.TokenType with 1189 | | Operator op -> return op 1190 | | _ -> return! Parser.error (ExpectedBinaryOp token) 1191 | } 1192 | 1193 | 1194 | let parseLiteral : Parser = 1195 | parser{ 1196 | let! token = consumeToken 1197 | match token.TokenType with 1198 | | Literal lit -> return lit 1199 | | _ -> return! Parser.error (ExpectedLiteral token) 1200 | } 1201 | 1202 | 1203 | 1204 | module Parsing = 1205 | open Parser 1206 | open ParserErrors 1207 | open ParserHelper 1208 | open Error 1209 | open Error.Helper 1210 | 1211 | 1212 | 1213 | // STRUCTURES =>> ANALYZER COMPONENTS 1214 | 1215 | // string =>> LEXER 1216 | // =>> Token Stream =>> PARSER 1217 | // =>> Parse Tree =>> CONVERTER 1218 | // =>> AST 1219 | 1220 | 1221 | 1222 | // // type_def := LPar type_def RPar type_def_prime 1223 | // // | type_value type_def_prime 1224 | // // type_value := identifier 1225 | // type PTTypeDef = 1226 | // | PTTypeDefBlock of PTTypeDef * PTTypeDefPrime 1227 | // | PTTypeValue of Ident * PTTypeDefPrime 1228 | 1229 | // // type_def_prime := ARROW type_def type_def_prime 1230 | // // | epsilon 1231 | // and PTTypeDefPrime = 1232 | // | PTArrow of PTTypeDef * PTTypeDefPrime 1233 | // | PTEpsilon 1234 | 1235 | // // parameter := identifier {COMMA parameter}* 1236 | // // | LPar parameter RPar 1237 | // // | LPar parameter COLON type_def RPar 1238 | // type PTParam = 1239 | // | PTMultParam of Ident * PTParam list 1240 | // | PTParamBlock of PTParam 1241 | // | PTParamTyped of PTParam * PTTypeDef 1242 | 1243 | // // expr := LITERAL expr_prime 1244 | // // | identifier expr_prime 1245 | // // | LPar expr RPar expr_prime 1246 | // // | UnaryOp expr expr_prime 1247 | // // | LET let_binding expr expr_prime 1248 | // type PTExpr = 1249 | // | PTLit of Literal * PTExprPrime 1250 | // | PTIdent of Ident * PTExprPrime 1251 | // | PTBlock of PTExpr * PTExprPrime 1252 | // | PTUnaryOp of Operator * PTExpr * PTExprPrime 1253 | // | PTLet of PTLetBinding * PTExpr * PTExprPrime 1254 | 1255 | // // expr_prime: = BinOp expr expr_prime 1256 | // // | {COMMA expr}+ expr_prime 1257 | // // | {expr}+ expr_prime 1258 | // // | epsilon 1259 | // and PTExprPrime = 1260 | // | PTBinaryOp of Operator * PTExpr * PTExprPrime 1261 | // | PTTuple of PTExpr list * PTExprPrime 1262 | // | PTAppli of PTExpr list * PTExprPrime 1263 | // | PTEpsilon 1264 | 1265 | // // let_binding := parameter = expr 1266 | // and PTLetBinding = PTLetBinding of PTParam * PTExpr 1267 | 1268 | // // function_body := expr 1269 | // type PTFunctionBody = PTExpr 1270 | 1271 | // // function_def := identifier {parameter}+ EQUAL function_body 1272 | // type PTFunctionDef = 1273 | // | PTFunctionDef of Ident * PTParam list * PTFunctionBody 1274 | 1275 | // // signature := identifier COLON type_def 1276 | // type PTSignature = 1277 | // // | PTSignature of Ident * PTTypeDef 1278 | // | PTSignature of Ident // TO REMOVE 1279 | // | SigNot of Ident // TO REMOVE 1280 | // // function := signature? function_def 1281 | // type PTFunction = PTFunction of PTSignature option * PTFunctionDef 1282 | 1283 | // // quetzalProg := (function)* EOF 1284 | // type PTQuetzalProg = PTQuetzalProg of PTFunction list 1285 | 1286 | 1287 | // let parsePTSignature = 1288 | // parser{ 1289 | // let! ident = parseIdentifier 1290 | // try 1291 | // do! parseSeparator Colon 1292 | // return PTSignature ident 1293 | // with 1294 | // | ExpectedSeparator (Colon,_) -> return (SigNot ident) 1295 | // } 1296 | 1297 | // let parsePTFunctionDef (tokens:Token list) = 1298 | // let (ident,tokens) = parseIdentifier tokens 1299 | // let tokens = parseOperator Equal tokens 1300 | // (FnNot ident, tokens) 1301 | 1302 | // let parseFunction (tokens : Token list) = 1303 | // let (ptSignature,tokens) = parsePTSignature tokens 1304 | // let (ptFunctionDef,tokens) = parsePTFunctionDef tokens 1305 | // (PTFunction (ptSignature,ptFunctionDef) , tokens) 1306 | 1307 | 1308 | 1309 | // let parsePTQuetzalProg (tokens : Token list) = 1310 | // let rec auxParser (tokens : Token list) (ptFunctions: PTFunction list) = 1311 | // match tokens with 1312 | // | [token] when token.TokenType = EOF -> PTQuetzalProg (ptFunctions |> List.rev) 1313 | // | [] -> failwith "No Possible" 1314 | // | _ -> 1315 | // let (ptFunction,tokensLeft) = parseFunction tokens 1316 | // auxParser tokensLeft (ptFunction::ptFunctions) 1317 | 1318 | // auxParser tokens [] 1319 | 1320 | 1321 | 1322 | // type_def := LPar type_def RPar type_def_prime 1323 | // | type_value type_def_prime 1324 | // type_value := identifier 1325 | type PTTypeDef = 1326 | | PTTypeDefBlock of PTTypeDef * PTTypeDefPrime 1327 | | PTTypeValue of Ident * PTTypeDefPrime 1328 | 1329 | // type_def_prime := ARROW type_def type_def_prime 1330 | // | epsilon 1331 | and PTTypeDefPrime = 1332 | | PTArrow of PTTypeDef * PTTypeDefPrime 1333 | | PTEpsilon 1334 | 1335 | 1336 | 1337 | let rec parsePTTypeDef () = 1338 | parser{ 1339 | printfn "parsePTTypeDef" 1340 | try 1341 | let! ident = parseIdentifier 1342 | let! ptTypeDefPrime = parsePTTypeDefPrime () 1343 | return PTTypeValue (ident,ptTypeDefPrime) 1344 | with 1345 | | ExpectedIdentifier _ -> 1346 | do! parseSeparator LParentheses 1347 | let! ptTypeDef = parsePTTypeDef() 1348 | do! parseSeparator RParentheses 1349 | let! ptTypeDefPrime = parsePTTypeDefPrime () 1350 | return PTTypeDefBlock (ptTypeDef,ptTypeDefPrime) 1351 | } 1352 | |> wrapFailure ParsingPTTypeDefFailure 1353 | 1354 | and parsePTTypeDefPrime () = 1355 | parser{ 1356 | printfn "parsePTTypeDefPrime" 1357 | try 1358 | do! parseSeparator Arrow 1359 | let! ptTypeDef = parsePTTypeDef() 1360 | let! ptTypeDefPrime = parsePTTypeDefPrime () 1361 | return PTArrow (ptTypeDef,ptTypeDefPrime) 1362 | with 1363 | | ExpectedSeparator (Arrow,_) -> 1364 | return PTEpsilon 1365 | } 1366 | |> wrapFailure ParsingPTTypeDefPrimeFailure 1367 | 1368 | 1369 | 1370 | 1371 | // parameter := 1372 | // | LPar parameter RPar 1373 | // | identifier COMMA parameter 1374 | // | identifier COLON type_def {COMMA parameter}? 1375 | // | identifier 1376 | 1377 | type PTParam = 1378 | | PTIdent of Ident 1379 | | PTParamBlock of PTParam 1380 | | PTIdentSeparated of Ident * PTParam 1381 | | PTParamTyped of Ident * PTTypeDef * PTParam option 1382 | 1383 | let rec parsePTParam () = 1384 | parser{ 1385 | try 1386 | printfn "parsePTParam" 1387 | do! parseSeparator LParentheses 1388 | let! ptParam = parsePTParam() 1389 | do! parseSeparator RParentheses 1390 | return PTParamBlock ptParam 1391 | with 1392 | | ExpectedSeparator (LParentheses,_) -> 1393 | try 1394 | let! ident = parseIdentifier 1395 | do! parseSeparator Comma 1396 | let! ptParam = parsePTParam() 1397 | return PTIdentSeparated (ident,ptParam) 1398 | with 1399 | | ExpectedSeparator (Comma,_) -> 1400 | try 1401 | let! ident = parseIdentifier 1402 | do! parseSeparator Colon 1403 | let! ptTypeDef = parsePTTypeDef () 1404 | try 1405 | do! parseSeparator Comma 1406 | let! ptParam = parsePTParam() 1407 | return PTParamTyped (ident,ptTypeDef,Some ptParam) 1408 | with 1409 | | ExpectedSeparator (Comma,_) -> 1410 | return PTParamTyped (ident,ptTypeDef,None) 1411 | with 1412 | | ExpectedSeparator (Colon,_) -> 1413 | let! ident = parseIdentifier 1414 | return PTIdent ident 1415 | } 1416 | |> wrapFailure ParsingPTParamFailure 1417 | 1418 | // expr := | LPar expr RPar expr_prime 1419 | // | LITERAL expr_prime 1420 | // | identifier expr_prime 1421 | // | UnaryOp expr expr_prime 1422 | // | LET let_binding expr_prime 1423 | type PTExpr = 1424 | | PTBlock of PTExpr * PTExprPrime 1425 | | PTLit of Literal * PTExprPrime 1426 | | PTIdent of Ident * PTExprPrime 1427 | | PTUnaryOp of Operator * PTExpr * PTExprPrime 1428 | | PTLet of PTLetBinding * PTExprPrime 1429 | 1430 | // expr_prime: = BinOp expr expr_prime 1431 | // | {expr}+ expr_prime 1432 | // | {COMMA expr}+ expr_prime 1433 | // | epsilon 1434 | and PTExprPrime = 1435 | | PTBinaryOp of Operator * PTExpr * PTExprPrime 1436 | | PTAppli of PTExpr list * PTExprPrime 1437 | | PTTuple of PTExpr list * PTExprPrime 1438 | | PTEpsilon 1439 | 1440 | // let_binding := parameter = expr 1441 | and PTLetBinding = PTLetBinding of PTParam * PTExpr 1442 | 1443 | let rec parsePTExpr () = 1444 | parser{ 1445 | try 1446 | printfn "parsePTExpr" 1447 | do! parseSeparator LParentheses 1448 | let! ptExpr = parsePTExpr() 1449 | do! parseSeparator RParentheses 1450 | let! ptExprPrime = parsePTExprPrime () 1451 | return PTBlock (ptExpr,ptExprPrime) 1452 | with 1453 | | ExpectedSeparator (LParentheses,_) -> 1454 | try 1455 | let! literal = parseLiteral 1456 | let! ptExprPrime = parsePTExprPrime () 1457 | return PTLit (literal,ptExprPrime) 1458 | with 1459 | | ExpectedLiteral _ -> 1460 | try 1461 | let! ident = parseIdentifier 1462 | let! ptExprPrime = parsePTExprPrime () 1463 | return PTIdent (ident,ptExprPrime) 1464 | with 1465 | | ExpectedIdentifier _ -> 1466 | try 1467 | do! parseUnaryOperator Subs 1468 | let! ptExpr = parsePTExpr() 1469 | let! ptExprPrime = parsePTExprPrime () 1470 | return PTUnaryOp (Subs,ptExpr,ptExprPrime) 1471 | with 1472 | | ExpectedOperator (Subs,_) -> 1473 | do! parseKeyword Let 1474 | let! ptLetBinding = parsePTLetBinding () 1475 | let! ptExprPrime = parsePTExprPrime () 1476 | return PTLet (ptLetBinding,ptExprPrime) 1477 | } 1478 | |> wrapFailure ParsingPTExprFailure 1479 | 1480 | and parsePTExprPrime () = 1481 | parser{ 1482 | try 1483 | printfn "parsePTExprPrime" 1484 | let! binaryOp = parseBinaryOp 1485 | printfn "ici" 1486 | let! ptExpr = parsePTExpr() 1487 | printfn "la" 1488 | let! ptExprPrime = parsePTExprPrime() 1489 | printfn "fin" 1490 | return PTBinaryOp(binaryOp,ptExpr,ptExprPrime) 1491 | with 1492 | | ExpectedBinaryOp _ -> 1493 | try 1494 | let! ptExprs = 1495 | parser{ 1496 | printfn "expr list" 1497 | return! parsePTExpr () 1498 | } 1499 | |> parseUntil ParsingPTExprFailure 1500 | 1501 | printfn "exprPrimesss" 1502 | let! ptExprPrime = parsePTExprPrime() 1503 | printfn "end 2" 1504 | return PTAppli (ptExprs,ptExprPrime) 1505 | with 1506 | | f -> 1507 | let! s = getState 1508 | printfn "----" 1509 | printfn "[%A]" s 1510 | printfn "----" 1511 | try 1512 | let! ptTuple = 1513 | parser{ 1514 | do! parseSeparator Comma 1515 | return! parsePTExpr () 1516 | } 1517 | |> parseUntilWithArg 1518 | (ExpectedSeparator >> ParsingPTExprFailure) 1519 | (fun (op,_) -> op = Comma) 1520 | 1521 | let! ptExprPrime = parsePTExprPrime() 1522 | return PTTuple (ptTuple,ptExprPrime) 1523 | with 1524 | | f -> 1525 | printfn "----" 1526 | printfn "[%A]" f 1527 | printfn "----" 1528 | return PTEpsilon 1529 | } 1530 | |> wrapFailure ParsingPTExprPrimeFailure 1531 | 1532 | and parsePTLetBinding() = 1533 | parser{ 1534 | printfn "parsePTLetBinding" 1535 | let! ptParam = parsePTParam () 1536 | do! parseOperator Equal 1537 | let! ptExpr = parsePTExpr () 1538 | return PTLetBinding (ptParam,ptExpr) 1539 | } 1540 | 1541 | // let rec parsePTParam () = 1542 | // parser{ 1543 | // try 1544 | // do! parseSeparator LParentheses 1545 | // let! pTParam = parsePTParam() 1546 | // do! parseSeparator RParentheses 1547 | // return PTParamBlock pTParam 1548 | 1549 | // with 1550 | // | ExpectedSeparator (LParentheses,_) -> 1551 | // try 1552 | // let! pTParam = parsePTParam() 1553 | // do! parseSeparator Colon 1554 | // let! ptTypeDef = parsePTTypeDef () 1555 | // return PTParamTyped (pTParam,ptTypeDef) 1556 | // with 1557 | // | ExpectedSeparator (Colon,_) -> 1558 | // let! ident = parseIdentifier 1559 | // let! paramList = 1560 | // Parser.tryWhileSeparator Comma 1561 | // (parser{ 1562 | // do! parseSeparator Comma 1563 | // return! parsePTParam() 1564 | // }) 1565 | 1566 | // return PTMultParam (ident,paramList) 1567 | // } 1568 | 1569 | 1570 | // let input = 1571 | // "ot >" 1572 | // let tokens = lexing input 1573 | 1574 | 1575 | // let parsed = runParser tokens parsePTSignature 1576 | // match parsed with 1577 | // | Success (_,_,s,_) -> printfn " ==> { %A }" s 1578 | // | Failure failure -> () 1579 | 1580 | // let parsePTSignature = 1581 | // parser{ 1582 | // let! ident = parseIdentifier 1583 | // do! parseSeparator Colon 1584 | // |> catchError( fun failure -> parser{ 1585 | // match failure with 1586 | // | ExpectedTokenType _ -> 1587 | // do! restoreToken 1588 | // return (SigNot ident) 1589 | // | _ -> 1590 | // return! Parser.error failure 1591 | // } 1592 | // ) 1593 | // return PTSignature ident 1594 | 1595 | // } 1596 | 1597 | 1598 | // let parsePTSignature = 1599 | // parser{ 1600 | // let! ident = parseIdentifier 1601 | 1602 | // return! 1603 | // parser{ 1604 | // do! parseSeparator Colon 1605 | // return PTSignature ident 1606 | // } 1607 | // |> ParseFailure.OnFailure ( 1608 | // <@ ExpectedTokenType @> , 1609 | // fun ( _ , consumedToken) -> parser{ 1610 | // do! restoreToken consumedToken 1611 | // return (SigNot ident) 1612 | // } 1613 | // ) 1614 | // } 1615 | 1616 | 1617 | // let parsePTSignature = 1618 | // parser{ 1619 | // let! ident = parseIdentifier 1620 | // return! parseSeparator Colon 1621 | // |> ParseFailure.OnFailure( 1622 | 1623 | // <@ ExpectedTokenType @>, 1624 | 1625 | // (fun ( _ , consumedToken) -> parser{ 1626 | // do! restoreToken consumedToken 1627 | // return (SigNot ident) 1628 | // }), 1629 | 1630 | // parser{return PTSignature ident} 1631 | // ) 1632 | 1633 | 1634 | // } 1635 | 1636 | 1637 | // let parsePTFunctionDef (tokens:Token list) = 1638 | // let (ident,tokens) = parseIdentifier tokens 1639 | // let tokens = parseOperator Equal tokens 1640 | // (FnNot ident, tokens) 1641 | 1642 | // let parseFunction (tokens : Token list) = 1643 | // let (ptSignature,tokens) = parsePTSignature tokens 1644 | // let (ptFunctionDef,tokens) = parsePTFunctionDef tokens 1645 | // (PTFunction (ptSignature,ptFunctionDef) , tokens) 1646 | 1647 | 1648 | 1649 | // let parsePTQuetzalProg (tokens : Token list) = 1650 | // let rec auxParser (tokens : Token list) (ptFunctions: PTFunction list) = 1651 | // match tokens with 1652 | // | [token] when token.TokenType = EOF -> PTQuetzalProg (ptFunctions |> List.rev) 1653 | // | [] -> failwith "No Possible" 1654 | // | _ -> 1655 | // let (ptFunction,tokensLeft) = parseFunction tokens 1656 | // auxParser tokensLeft (ptFunction::ptFunctions) 1657 | 1658 | // auxParser tokens [] 1659 | 1660 | 1661 | 1662 | // // type_def := LPar type_def RPar type_def_prime 1663 | // // | type_value type_def_prime 1664 | // // type_value := identifier 1665 | // type PTTypeDef = 1666 | // | PTTypeDefBlock of PTTypeDef * PTTypeDefPrime 1667 | // | PTTypeValue of Ident * PTTypeDefPrime 1668 | 1669 | // // type_def_prime := ARROW type_def type_def_prime 1670 | // // | epsilon 1671 | // and PTTypeDefPrime = 1672 | // | PTArrow of PTTypeDef * PTTypeDefPrime 1673 | // | PTEpsilon 1674 | 1675 | 1676 | 1677 | // let rec parsePTTypeDef (tokens : Token List) = 1678 | // let parsePTTypeDefBlock (tokens : Token List) = 1679 | // let tokens = parseSeparator LParentheses tokens 1680 | // let ptTypeDef,tokens = parsePTTypeDef tokens 1681 | // let tokens = parseSeparator RParentheses tokens 1682 | // let ptTypeDefPrime,tokens = parsePTTypeDefPrime tokens 1683 | // PTTypeDefBlock (ptTypeDef,ptTypeDefPrime),tokens 1684 | 1685 | // let parsePTTypeValue (tokens : Token List) = 1686 | // let ident,tokens = parseIdentifier tokens 1687 | // let ptTypeDefPrime,tokens = parsePTTypeDefPrime tokens 1688 | // PTTypeValue (ident,ptTypeDefPrime) , tokens 1689 | 1690 | // try 1691 | // parsePTTypeDefBlock tokens 1692 | // with 1693 | // | _ -> parsePTTypeValue tokens 1694 | 1695 | 1696 | // and parsePTTypeDefPrime (tokens : Token List) = 1697 | // let parsePTArrow (tokens : Token List) = 1698 | // let tokens = parseSeparator Arrow tokens 1699 | // let ptTypeDef,tokens = parsePTTypeDef tokens 1700 | // let ptTypeDefPrime,tokens = parsePTTypeDefPrime tokens 1701 | // PTArrow (ptTypeDef,ptTypeDefPrime) , tokens 1702 | 1703 | // try 1704 | // parsePTArrow tokens 1705 | // with 1706 | // | _ -> PTEpsilon , tokens 1707 | 1708 | 1709 | 1710 | 1711 | // let input = 1712 | // "ot -> int -> float -> (int -> float) -> Test" 1713 | 1714 | 1715 | // let input = 1716 | // "add: 1717 | // add = 1718 | 1719 | // Toz = 1720 | 1721 | // Toz = 1722 | // Toz = 1723 | // Test: 1724 | // Test = " 1725 | 1726 | 1727 | // let res = 1728 | // lexing 1729 | // >> parsePTTypeDef 1730 | 1731 | // printfn "START" 1732 | 1733 | // printfn "%A" (res input) 1734 | 1735 | 1736 | 1737 | 1738 | // module TokenParser = 1739 | 1740 | // type IError = 1741 | // abstract member Description : string 1742 | 1743 | // type ParsingOutcome<'TOutcome> = 1744 | // | Result of 'TOutcome 1745 | // | Error of IError 1746 | 1747 | // type IParseable = 1748 | // abstract member 1749 | 1750 | 1751 | // type TokenParser<'TOutcome> = TokenParser of (Token list -> Token list * ParsingOutcome<'TOutcome> ) 1752 | 1753 | // let runParser (TokenParser parser) = parser 1754 | 1755 | // // Parser a single token 1756 | // let pToken (tokenToMatch:Token) = 1757 | // let innerParser (tokens:Token list) = 1758 | // match tokens with 1759 | // | [] -> (tokens,Error "It's done!!") // TODO : Add a case that's it's done 1760 | // | token::left -> 1761 | // if token = tokenToMatch then 1762 | // (left,Result token) 1763 | // else 1764 | // (tokens,Error (sprintf "Expected %A but got %A" tokenToMatch token) ) 1765 | 1766 | // TokenParser innerParser 1767 | 1768 | // let pAnd parser1 parser2 = 1769 | 1770 | // let innerParser (tokens:Token list) = 1771 | // let (tokens,result1) = runParser parser1 tokens 1772 | // match result1 with 1773 | // | Result outcome1 -> 1774 | // let (tokens,result2) = runParser parser2 tokens 1775 | // match result2 with 1776 | // | Result outcome2 -> (tokens, Result (outcome1,outcome2)) 1777 | // | Error err -> (tokens) 1778 | // match tokens with 1779 | // | [] -> (tokens,Error "It's done!!") // TODO : Add a case that's it's done 1780 | // | token::tokens -> 1781 | // if token = tokenToMatch then 1782 | // (tokens,Result token) 1783 | // else 1784 | // (tokens,Error (sprintf "Expected %A but got %A" tokenToMatch token) ) 1785 | 1786 | // TokenParser innerParser 1787 | 1788 | 1789 | 1790 | // let parseExpr, parseExprRef : PTExprParser * PTExprParser ref = createParserForwardedToRef() 1791 | // let parseTypeDef (tokens : Token list) = 1792 | // let parseTypeDef, parseTypeDefRef : Parser * Parser ref = 1793 | // createParserForwardedToRef() 1794 | 1795 | // let parsePTArrow () = 1796 | // parseTypeDef .>> 1797 | 1798 | 1799 | // let (|IsTypeValue|_|) (token:Token) = 1800 | // match token.TokenType with 1801 | // | Identifier ident -> 1802 | 1803 | // let rec aux tokens acc = 1804 | // match tokens with 1805 | // | [] -> acc 1806 | // | token::tokens -> 1807 | 1808 | 1809 | 1810 | // let parseExpr (tokens : Token list) = 1811 | // let rec aux tokens acc = 1812 | // match tokens with 1813 | // | [] -> acc 1814 | // | token::tokens -> 1815 | 1816 | 1817 | 1818 | 1819 | 1820 | // v quetzalProg := list of function 1821 | 1822 | // v function := signature? function_def 1823 | // v signature := identifier COLON type_def 1824 | 1825 | // v? identifier := IDENTIFIER 1826 | 1827 | // v type_def := type_def ARROW type_def 1828 | // | LPar type_def RPar 1829 | 1830 | // v function_def := identifier {parameter}+ EQUAL function_body 1831 | // v function_body := expr 1832 | 1833 | // v expr := LITERAL 1834 | // | identifier 1835 | // | LPar expr RPar 1836 | // | expr BinOp expr 1837 | // | UnaryOp expr 1838 | // | expr {COMMA expr}+ 1839 | // | expr {expr}+ 1840 | // | LET let_binding expr 1841 | 1842 | // v parameter := identifier {COMMA parameter}* 1843 | // | LPar parameter RPar 1844 | // | LPar parameter COLON type_def RPar 1845 | 1846 | // v let_binding := parameter = expr 1847 | 1848 | 1849 | 1850 | 1851 | 1852 | 1853 | 1854 | 1855 | 1856 | 1857 | 1858 | 1859 | 1860 | 1861 | 1862 | 1863 | 1864 | 1865 | 1866 | --------------------------------------------------------------------------------