├── .gitattributes
├── .github
└── workflows
│ └── dotnetcore.yml
├── .gitignore
├── PythonCoreFramework.sln
├── PythonCoreFramework
├── PythonCoreFramework.fsproj
└── PythonCoreParser.fs
├── README.md
├── TestPythonCoreFramework
├── MockTokenizer.fs
├── Program.fs
├── TestPythonCoreFramework.fsproj
├── TestPythonCoreParser.fs
├── TestPythonCoreParserSyntaxErrorHandling.fs
└── TestPythonCoreTokenizer.fs
└── _config.yml
/.gitattributes:
--------------------------------------------------------------------------------
1 | ###############################################################################
2 | # Set default behavior to automatically normalize line endings.
3 | ###############################################################################
4 | * text=auto
5 |
6 | ###############################################################################
7 | # Set default behavior for command prompt diff.
8 | #
9 | # This is need for earlier builds of msysgit that does not have it on by
10 | # default for csharp files.
11 | # Note: This is only used by command line
12 | ###############################################################################
13 | #*.cs diff=csharp
14 |
15 | ###############################################################################
16 | # Set the merge driver for project and solution files
17 | #
18 | # Merging from the command prompt will add diff markers to the files if there
19 | # are conflicts (Merging from VS is not affected by the settings below, in VS
20 | # the diff markers are never inserted). Diff markers may cause the following
21 | # file extensions to fail to load in VS. An alternative would be to treat
22 | # these files as binary and thus will always conflict and require user
23 | # intervention with every merge. To do so, just uncomment the entries below
24 | ###############################################################################
25 | #*.sln merge=binary
26 | #*.csproj merge=binary
27 | #*.vbproj merge=binary
28 | #*.vcxproj merge=binary
29 | #*.vcproj merge=binary
30 | #*.dbproj merge=binary
31 | #*.fsproj merge=binary
32 | #*.lsproj merge=binary
33 | #*.wixproj merge=binary
34 | #*.modelproj merge=binary
35 | #*.sqlproj merge=binary
36 | #*.wwaproj merge=binary
37 |
38 | ###############################################################################
39 | # behavior for image files
40 | #
41 | # image files are treated as binary by default.
42 | ###############################################################################
43 | #*.jpg binary
44 | #*.png binary
45 | #*.gif binary
46 |
47 | ###############################################################################
48 | # diff behavior for common document formats
49 | #
50 | # Convert binary document formats to text before diffing them. This feature
51 | # is only available from the command line. Turn it on by uncommenting the
52 | # entries below.
53 | ###############################################################################
54 | #*.doc diff=astextplain
55 | #*.DOC diff=astextplain
56 | #*.docx diff=astextplain
57 | #*.DOCX diff=astextplain
58 | #*.dot diff=astextplain
59 | #*.DOT diff=astextplain
60 | #*.pdf diff=astextplain
61 | #*.PDF diff=astextplain
62 | #*.rtf diff=astextplain
63 | #*.RTF diff=astextplain
64 |
--------------------------------------------------------------------------------
/.github/workflows/dotnetcore.yml:
--------------------------------------------------------------------------------
1 | name: .NET Core
2 |
3 | on:
4 | push:
5 | branches: [ master ]
6 | pull_request:
7 | branches: [ master ]
8 |
9 | jobs:
10 | build:
11 |
12 | runs-on: ubuntu-latest
13 |
14 | steps:
15 | - uses: actions/checkout@v2
16 | - name: Setup .NET Core
17 | uses: actions/setup-dotnet@v1
18 | with:
19 | dotnet-version: 3.1.101
20 | - name: Install dependencies
21 | run: dotnet restore
22 | - name: Build
23 | run: dotnet build --configuration Release --no-restore
24 | - name: Test
25 | run: dotnet test --no-restore --verbosity normal
26 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ## Ignore Visual Studio temporary files, build results, and
2 | ## files generated by popular Visual Studio add-ons.
3 | ##
4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
5 |
6 | # User-specific files
7 | *.rsuser
8 | *.suo
9 | *.user
10 | *.userosscache
11 | *.sln.docstates
12 |
13 | # User-specific files (MonoDevelop/Xamarin Studio)
14 | *.userprefs
15 |
16 | # Build results
17 | [Dd]ebug/
18 | [Dd]ebugPublic/
19 | [Rr]elease/
20 | [Rr]eleases/
21 | x64/
22 | x86/
23 | [Aa][Rr][Mm]/
24 | [Aa][Rr][Mm]64/
25 | bld/
26 | [Bb]in/
27 | [Oo]bj/
28 | [Ll]og/
29 |
30 | # Visual Studio 2015/2017 cache/options directory
31 | .vs/
32 | # Uncomment if you have tasks that create the project's static files in wwwroot
33 | #wwwroot/
34 |
35 | # Visual Studio 2017 auto generated files
36 | Generated\ Files/
37 |
38 | # MSTest test Results
39 | [Tt]est[Rr]esult*/
40 | [Bb]uild[Ll]og.*
41 |
42 | # NUNIT
43 | *.VisualState.xml
44 | TestResult.xml
45 |
46 | # Build Results of an ATL Project
47 | [Dd]ebugPS/
48 | [Rr]eleasePS/
49 | dlldata.c
50 |
51 | # Benchmark Results
52 | BenchmarkDotNet.Artifacts/
53 |
54 | # .NET Core
55 | project.lock.json
56 | project.fragment.lock.json
57 | artifacts/
58 |
59 | # StyleCop
60 | StyleCopReport.xml
61 |
62 | # Files built by Visual Studio
63 | *_i.c
64 | *_p.c
65 | *_h.h
66 | *.ilk
67 | *.meta
68 | *.obj
69 | *.iobj
70 | *.pch
71 | *.pdb
72 | *.ipdb
73 | *.pgc
74 | *.pgd
75 | *.rsp
76 | *.sbr
77 | *.tlb
78 | *.tli
79 | *.tlh
80 | *.tmp
81 | *.tmp_proj
82 | *_wpftmp.csproj
83 | *.log
84 | *.vspscc
85 | *.vssscc
86 | .builds
87 | *.pidb
88 | *.svclog
89 | *.scc
90 |
91 | # Chutzpah Test files
92 | _Chutzpah*
93 |
94 | # Visual C++ cache files
95 | ipch/
96 | *.aps
97 | *.ncb
98 | *.opendb
99 | *.opensdf
100 | *.sdf
101 | *.cachefile
102 | *.VC.db
103 | *.VC.VC.opendb
104 |
105 | # Visual Studio profiler
106 | *.psess
107 | *.vsp
108 | *.vspx
109 | *.sap
110 |
111 | # Visual Studio Trace Files
112 | *.e2e
113 |
114 | # TFS 2012 Local Workspace
115 | $tf/
116 |
117 | # Guidance Automation Toolkit
118 | *.gpState
119 |
120 | # ReSharper is a .NET coding add-in
121 | _ReSharper*/
122 | *.[Rr]e[Ss]harper
123 | *.DotSettings.user
124 |
125 | # JustCode is a .NET coding add-in
126 | .JustCode
127 |
128 | # TeamCity is a build add-in
129 | _TeamCity*
130 |
131 | # DotCover is a Code Coverage Tool
132 | *.dotCover
133 |
134 | # AxoCover is a Code Coverage Tool
135 | .axoCover/*
136 | !.axoCover/settings.json
137 |
138 | # Visual Studio code coverage results
139 | *.coverage
140 | *.coveragexml
141 |
142 | # NCrunch
143 | _NCrunch_*
144 | .*crunch*.local.xml
145 | nCrunchTemp_*
146 |
147 | # MightyMoose
148 | *.mm.*
149 | AutoTest.Net/
150 |
151 | # Web workbench (sass)
152 | .sass-cache/
153 |
154 | # Installshield output folder
155 | [Ee]xpress/
156 |
157 | # DocProject is a documentation generator add-in
158 | DocProject/buildhelp/
159 | DocProject/Help/*.HxT
160 | DocProject/Help/*.HxC
161 | DocProject/Help/*.hhc
162 | DocProject/Help/*.hhk
163 | DocProject/Help/*.hhp
164 | DocProject/Help/Html2
165 | DocProject/Help/html
166 |
167 | # Click-Once directory
168 | publish/
169 |
170 | # Publish Web Output
171 | *.[Pp]ublish.xml
172 | *.azurePubxml
173 | # Note: Comment the next line if you want to checkin your web deploy settings,
174 | # but database connection strings (with potential passwords) will be unencrypted
175 | *.pubxml
176 | *.publishproj
177 |
178 | # Microsoft Azure Web App publish settings. Comment the next line if you want to
179 | # checkin your Azure Web App publish settings, but sensitive information contained
180 | # in these scripts will be unencrypted
181 | PublishScripts/
182 |
183 | # NuGet Packages
184 | *.nupkg
185 | # The packages folder can be ignored because of Package Restore
186 | **/[Pp]ackages/*
187 | # except build/, which is used as an MSBuild target.
188 | !**/[Pp]ackages/build/
189 | # Uncomment if necessary however generally it will be regenerated when needed
190 | #!**/[Pp]ackages/repositories.config
191 | # NuGet v3's project.json files produces more ignorable files
192 | *.nuget.props
193 | *.nuget.targets
194 |
195 | # Microsoft Azure Build Output
196 | csx/
197 | *.build.csdef
198 |
199 | # Microsoft Azure Emulator
200 | ecf/
201 | rcf/
202 |
203 | # Windows Store app package directories and files
204 | AppPackages/
205 | BundleArtifacts/
206 | Package.StoreAssociation.xml
207 | _pkginfo.txt
208 | *.appx
209 |
210 | # Visual Studio cache files
211 | # files ending in .cache can be ignored
212 | *.[Cc]ache
213 | # but keep track of directories ending in .cache
214 | !?*.[Cc]ache/
215 |
216 | # Others
217 | ClientBin/
218 | ~$*
219 | *~
220 | *.dbmdl
221 | *.dbproj.schemaview
222 | *.jfm
223 | *.pfx
224 | *.publishsettings
225 | orleans.codegen.cs
226 |
227 | # Including strong name files can present a security risk
228 | # (https://github.com/github/gitignore/pull/2483#issue-259490424)
229 | #*.snk
230 |
231 | # Since there are multiple workflows, uncomment next line to ignore bower_components
232 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
233 | #bower_components/
234 |
235 | # RIA/Silverlight projects
236 | Generated_Code/
237 |
238 | # Backup & report files from converting an old project file
239 | # to a newer Visual Studio version. Backup files are not needed,
240 | # because we have git ;-)
241 | _UpgradeReport_Files/
242 | Backup*/
243 | UpgradeLog*.XML
244 | UpgradeLog*.htm
245 | ServiceFabricBackup/
246 | *.rptproj.bak
247 |
248 | # SQL Server files
249 | *.mdf
250 | *.ldf
251 | *.ndf
252 |
253 | # Business Intelligence projects
254 | *.rdl.data
255 | *.bim.layout
256 | *.bim_*.settings
257 | *.rptproj.rsuser
258 | *- Backup*.rdl
259 |
260 | # Microsoft Fakes
261 | FakesAssemblies/
262 |
263 | # GhostDoc plugin setting file
264 | *.GhostDoc.xml
265 |
266 | # Node.js Tools for Visual Studio
267 | .ntvs_analysis.dat
268 | node_modules/
269 |
270 | # Visual Studio 6 build log
271 | *.plg
272 |
273 | # Visual Studio 6 workspace options file
274 | *.opt
275 |
276 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
277 | *.vbw
278 |
279 | # Visual Studio LightSwitch build output
280 | **/*.HTMLClient/GeneratedArtifacts
281 | **/*.DesktopClient/GeneratedArtifacts
282 | **/*.DesktopClient/ModelManifest.xml
283 | **/*.Server/GeneratedArtifacts
284 | **/*.Server/ModelManifest.xml
285 | _Pvt_Extensions
286 |
287 | # Paket dependency manager
288 | .paket/paket.exe
289 | paket-files/
290 |
291 | # FAKE - F# Make
292 | .fake/
293 |
294 | # JetBrains Rider
295 | .idea/
296 | *.sln.iml
297 |
298 | # CodeRush personal settings
299 | .cr/personal
300 |
301 | # Python Tools for Visual Studio (PTVS)
302 | __pycache__/
303 | *.pyc
304 |
305 | # Cake - Uncomment if you are using it
306 | # tools/**
307 | # !tools/packages.config
308 |
309 | # Tabs Studio
310 | *.tss
311 |
312 | # Telerik's JustMock configuration file
313 | *.jmconfig
314 |
315 | # BizTalk build output
316 | *.btp.cs
317 | *.btm.cs
318 | *.odx.cs
319 | *.xsd.cs
320 |
321 | # OpenCover UI analysis results
322 | OpenCover/
323 |
324 | # Azure Stream Analytics local run output
325 | ASALocalRun/
326 |
327 | # MSBuild Binary and Structured Log
328 | *.binlog
329 |
330 | # NVidia Nsight GPU debugger configuration file
331 | *.nvuser
332 |
333 | # MFractors (Xamarin productivity tool) working folder
334 | .mfractor/
335 |
336 | # Local History for Visual Studio
337 | .localhistory/
338 |
339 | # BeatPulse healthcheck temp database
340 | healthchecksdb
--------------------------------------------------------------------------------
/PythonCoreFramework.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio Version 16
4 | VisualStudioVersion = 16.0.29926.136
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{6EC3EE1D-3C4E-46DD-8F32-0CC8E7565705}") = "PythonCoreFramework", "PythonCoreFramework\PythonCoreFramework.fsproj", "{601F58FA-073F-47F8-B350-07AE8CD994B7}"
7 | EndProject
8 | Project("{6EC3EE1D-3C4E-46DD-8F32-0CC8E7565705}") = "TestPythonCoreFramework", "TestPythonCoreFramework\TestPythonCoreFramework.fsproj", "{F91DD791-9DBB-4191-9286-8A6A058320F8}"
9 | EndProject
10 | Global
11 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
12 | Debug|Any CPU = Debug|Any CPU
13 | Release|Any CPU = Release|Any CPU
14 | EndGlobalSection
15 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
16 | {601F58FA-073F-47F8-B350-07AE8CD994B7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
17 | {601F58FA-073F-47F8-B350-07AE8CD994B7}.Debug|Any CPU.Build.0 = Debug|Any CPU
18 | {601F58FA-073F-47F8-B350-07AE8CD994B7}.Release|Any CPU.ActiveCfg = Release|Any CPU
19 | {601F58FA-073F-47F8-B350-07AE8CD994B7}.Release|Any CPU.Build.0 = Release|Any CPU
20 | {F91DD791-9DBB-4191-9286-8A6A058320F8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
21 | {F91DD791-9DBB-4191-9286-8A6A058320F8}.Debug|Any CPU.Build.0 = Debug|Any CPU
22 | {F91DD791-9DBB-4191-9286-8A6A058320F8}.Release|Any CPU.ActiveCfg = Release|Any CPU
23 | {F91DD791-9DBB-4191-9286-8A6A058320F8}.Release|Any CPU.Build.0 = Release|Any CPU
24 | EndGlobalSection
25 | GlobalSection(SolutionProperties) = preSolution
26 | HideSolutionNode = FALSE
27 | EndGlobalSection
28 | GlobalSection(ExtensibilityGlobals) = postSolution
29 | SolutionGuid = {67C16483-FE24-4080-B760-40379DE6D52D}
30 | EndGlobalSection
31 | EndGlobal
32 |
--------------------------------------------------------------------------------
/PythonCoreFramework/PythonCoreFramework.fsproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | netcoreapp3.1
5 | true
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # PythonCoreFramework
2 | Python core implemented in F# language and under dotnetcore 3.1 or later.
3 |
4 | As of April 2020, i have started on the parser for Python 3.9 grammar and will create all needed unittests to make sure
5 | parser is doing its job correctly. Then i will implement the full Tokenizer with its UnitTests.
6 |
7 | This is the first part of what i hope will be a core implementation of Python interpreter / compiler for use under dot net core 3.1
8 | or later on several different Operating Systems and machines, including the Raspberry Pi 4 or later.
9 |
10 | The idea for parser subsystem is to implement Roslyn like functions for python source code analyzis and modifications based on using
11 | the framework i am hoping will be usefull some time in the future. It is really early in the development process and sometime before
12 | it can be used for analyzing of source code as the first step.
13 |
--------------------------------------------------------------------------------
/TestPythonCoreFramework/MockTokenizer.fs:
--------------------------------------------------------------------------------
1 |
2 | // PythonCoreFramework.Net - Mock Tokenizer for pure parse rule UnitTests.
3 | // Written by Richard Magnor Stenbro.
4 |
5 | namespace PythonCoreFramework.UnitTests
6 |
7 | open PythonCoreFramework
8 |
9 |
10 | // MOCK of Tokenizer for pure parser testing //////////////////////////////////////////////////////
11 | type MockTokenizer( nodes : ( Token * int ) list ) =
12 |
13 | let mutable nodeList = nodes
14 |
15 | member this.Next() =
16 | match nodeList with
17 | | head :: tail ->
18 | let a, b = head
19 | (this :> ITokenizer).Symbol <- a
20 | (this :> ITokenizer).Position <- b
21 | nodeList <- tail
22 | | [] ->
23 | (this :> ITokenizer).Symbol <- Token.EOF( [| |] )
24 |
25 | interface ITokenizer with
26 |
27 | member val Symbol : Token = Token.Empty with get, set
28 |
29 | member val Position : int = 0 with get, set
30 |
31 | member this.Advance() =
32 | this.Next()
33 |
--------------------------------------------------------------------------------
/TestPythonCoreFramework/Program.fs:
--------------------------------------------------------------------------------
1 | module Program = let [] main _ = 0
2 |
--------------------------------------------------------------------------------
/TestPythonCoreFramework/TestPythonCoreFramework.fsproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | netcoreapp3.1
5 |
6 | false
7 | false
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 | all
23 | runtime; build; native; contentfiles; analyzers; buildtransitive
24 |
25 |
26 | all
27 | runtime; build; native; contentfiles; analyzers; buildtransitive
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/TestPythonCoreFramework/TestPythonCoreParserSyntaxErrorHandling.fs:
--------------------------------------------------------------------------------
1 |
2 | // PythonCoreFramework.Net - UnitTests for correct handling of SyntaxError during parsing of Python Grammar.
3 | // Written by Richard Magnor Stenbro.
4 |
5 | namespace PythonCoreFramework.UnitTests
6 |
7 | open Xunit
8 | open PythonCoreFramework
9 |
10 |
11 | module TestPythonCoreParserSyntaxErrorHandling =
12 |
13 | []
14 | let ``Illegal atom UnitTest`` () =
15 | try
16 | let lex = new MockTokenizer( [ ( Token.Comma(0, 1, [| |]), 0 ); ( Token.EOF([| |]), 2 ); ] )
17 | lex.Next()
18 | let parser = new Parser(lex)
19 | parser.ParseAtom() |> ignore
20 | with
21 | | :? SyntaxError as ex ->
22 | Assert.Equal( Token.Comma(0, 1, [| |]), ex.Data0)
23 | Assert.Equal( "Illegal literal!", ex.Data1)
24 | | _ ->
25 | Assert.False(false)
26 |
27 | []
28 | let ``star expr UnitTest`` () =
29 | try
30 | let lex = new MockTokenizer( [ ( Token.Comma(0, 1, [| |]), 0 ); ( Token.EOF([| |]), 2 ); ] )
31 | lex.Next()
32 | let parser = new Parser(lex)
33 | parser.ParseStarExpr() |> ignore
34 | with
35 | | :? SyntaxError as ex ->
36 | Assert.Equal( Token.Comma(0, 1, [| |]), ex.Data0)
37 | Assert.Equal( "Expecting '*' in expression!", ex.Data1)
38 | | _ ->
39 | Assert.False(false)
40 |
41 | []
42 | let ``not missing in UnitTest`` () =
43 | try
44 | let lex = new MockTokenizer( [ ( Token.Name(0, 1, "a", [| |]), 0 ); ( Token.Not(2, 5, [| |]), 2 ); ( Token.Name(6, 7, "b", [| |]), 6 ); ( Token.EOF([| |]), 8 ); ] )
45 | lex.Next()
46 | let parser = new Parser(lex)
47 | parser.ParseComparison() |> ignore
48 | with
49 | | :? SyntaxError as ex ->
50 | Assert.Equal( Token.Name(6, 7, "b", [| |]), ex.Data0)
51 | Assert.Equal( "Missing 'in' in 'not in ' expression!", ex.Data1)
52 | | _ ->
53 | Assert.False(false)
54 |
55 | []
56 | let ``lambda UnitTest`` () =
57 | try
58 | let lex = new MockTokenizer( [ ( Token.Comma(0, 1, [| |]), 0 ); ( Token.EOF([| |]), 2 ); ] )
59 | lex.Next()
60 | let parser = new Parser(lex)
61 | parser.ParseLambda() |> ignore
62 | with
63 | | :? SyntaxError as ex ->
64 | Assert.Equal( Token.Comma(0, 1, [| |]), ex.Data0)
65 | Assert.Equal( "Expected 'lambda' in lambda expression!", ex.Data1)
66 | | _ ->
67 | Assert.False(false)
68 |
69 | []
70 | let ``lambda mising colon UnitTest`` () =
71 | try
72 | let lex = new MockTokenizer( [ ( Token.Lambda(0, 6, [| |]), 0 ); ( Token.Name(7, 8, "a", [| |]), 7 ); ( Token.Name(9, 10, "b", [| |]), 9 ); ( Token.EOF([| |]), 11 ); ] )
73 | lex.Next()
74 | let parser = new Parser(lex)
75 | parser.ParseLambda() |> ignore
76 | with
77 | | :? SyntaxError as ex ->
78 | Assert.Equal( Token.Name(9, 10, "b", [| |]), ex.Data0)
79 | Assert.Equal( "Expected ':' in lambda expression!", ex.Data1)
80 | | _ ->
81 | Assert.False(false)
82 |
83 | []
84 | let ``Test missing else part UnitTest`` () =
85 | try
86 | let lex = new MockTokenizer( [ ( Token.Name(0, 1, "a", [| |]), 0 ); ( Token.If(2, 4, [| |]), 2 ); ( Token.Name(5, 6, "b", [| |]), 5 ); ( Token.Name(7, 8, "c", [| |]), 7 ); ( Token.EOF([| |]), 9 ); ] )
87 | lex.Next()
88 | let parser = new Parser(lex)
89 | parser.ParseTest() |> ignore
90 | with
91 | | :? SyntaxError as ex ->
92 | Assert.Equal( Token.Name(7, 8, "c", [| |]), ex.Data0)
93 | Assert.Equal( "Expected 'else' in test expression!", ex.Data1)
94 | | _ ->
95 | Assert.False(false)
96 |
97 | []
98 | let ``Tuple missing trailing ')' UnitTest`` () =
99 | try
100 | let lex = new MockTokenizer( [ ( Token.LeftParen(0, 1, [| |]), 0 ); ( Token.Name(2, 3, "a", [| |]), 2 ); ( Token.Name(4, 5, "b", [| |]), 4 ); ( Token.EOF([| |]), 6 ); ] )
101 | lex.Next()
102 | let parser = new Parser(lex)
103 | parser.ParseTest() |> ignore
104 | with
105 | | :? SyntaxError as ex ->
106 | Assert.Equal( Token.Name(4, 5, "b", [| |]), ex.Data0)
107 | Assert.Equal( "Missing ')' in expression!", ex.Data1)
108 | | _ ->
109 | Assert.False(false)
110 |
111 | []
112 | let ``List missing trailing ']' UnitTest`` () =
113 | try
114 | let lex = new MockTokenizer( [ ( Token.LeftBracket(0, 1, [| |]), 0 ); ( Token.Name(2, 3, "a", [| |]), 2 ); ( Token.Name(4, 5, "b", [| |]), 4 ); ( Token.EOF([| |]), 6 ); ] )
115 | lex.Next()
116 | let parser = new Parser(lex)
117 | parser.ParseTest() |> ignore
118 | with
119 | | :? SyntaxError as ex ->
120 | Assert.Equal( Token.Name(4, 5, "b", [| |]), ex.Data0)
121 | Assert.Equal( "Missing ']' in expression!", ex.Data1)
122 | | _ ->
123 | Assert.False(false)
124 |
125 | []
126 | let ``List missing trailing '}' UnitTest`` () =
127 | try
128 | let lex = new MockTokenizer( [ ( Token.LeftCurly(0, 1, [| |]), 0 ); ( Token.Name(2, 3, "a", [| |]), 2 ); ( Token.Name(4, 5, "b", [| |]), 4 ); ( Token.EOF([| |]), 6 ); ] )
129 | lex.Next()
130 | let parser = new Parser(lex)
131 | parser.ParseTest() |> ignore
132 | with
133 | | :? SyntaxError as ex ->
134 | Assert.Equal( Token.Name(4, 5, "b", [| |]), ex.Data0)
135 | Assert.Equal( "Missing '}' in dictionary!", ex.Data1)
136 | | _ ->
137 | Assert.False(false)
138 |
139 | []
140 | let ``Missing trailing ')' in call UnitTest`` () =
141 | try
142 | let lex = new MockTokenizer( [ ( Token.LeftParen(0, 1, [| |]), 0 ); ( Token.Name(2, 3, "a", [| |]), 2 ); ( Token.Name(4, 5, "b", [| |]), 4 ); ( Token.EOF([| |]), 6 ); ] )
143 | lex.Next()
144 | let parser = new Parser(lex)
145 | parser.ParseTrailer() |> ignore
146 | with
147 | | :? SyntaxError as ex ->
148 | Assert.Equal( Token.Name(4, 5, "b", [| |]), ex.Data0)
149 | Assert.Equal( "Missing ')' in call expression!", ex.Data1)
150 | | _ ->
151 | Assert.False(false)
152 |
153 | []
154 | let ``Missing trailing ']' in index UnitTest`` () =
155 | try
156 | let lex = new MockTokenizer( [ ( Token.LeftBracket(0, 1, [| |]), 0 ); ( Token.Name(2, 3, "a", [| |]), 2 ); ( Token.Name(4, 5, "b", [| |]), 4 ); ( Token.EOF([| |]), 6 ); ] )
157 | lex.Next()
158 | let parser = new Parser(lex)
159 | parser.ParseTrailer() |> ignore
160 | with
161 | | :? SyntaxError as ex ->
162 | Assert.Equal( Token.Name(4, 5, "b", [| |]), ex.Data0)
163 | Assert.Equal( "Missing ']' in index expression!", ex.Data1)
164 | | _ ->
165 | Assert.False(false)
166 |
167 | []
168 | let ``Missing name after dot in dot name UnitTest`` () =
169 | try
170 | let lex = new MockTokenizer( [ ( Token.Dot(0, 1, [| |]), 0 ); ( Token.Dot(1, 2, [| |]), 1 ); ( Token.Name(3, 4, "b", [| |]), 3 ); ( Token.EOF([| |]), 5 ); ] )
171 | lex.Next()
172 | let parser = new Parser(lex)
173 | parser.ParseTrailer() |> ignore
174 | with
175 | | :? SyntaxError as ex ->
176 | Assert.Equal( Token.Dot(1, 2, [| |]), ex.Data0)
177 | Assert.Equal( "Expecting name literal after '.'", ex.Data1)
178 | | _ ->
179 | Assert.False(false)
180 |
181 | []
182 | let ``Illegal trailer UnitTest`` () =
183 | try
184 | let lex = new MockTokenizer( [ ( Token.Comma(0, 1, [| |]), 0 ); ( Token.EOF([| |]), 2 ); ] )
185 | lex.Next()
186 | let parser = new Parser(lex)
187 | parser.ParseTrailer() |> ignore
188 | with
189 | | :? SyntaxError as ex ->
190 | Assert.Equal( Token.Comma(0, 1, [| |]), ex.Data0)
191 | Assert.Equal( "Expected '(', '[' or '.' in trailer expression!", ex.Data1)
192 | | _ ->
193 | Assert.False(false)
194 |
195 | []
196 | let ``subscript missing item with comma UnitTest`` () =
197 | try
198 | let lex = new MockTokenizer( [ ( Token.Comma(0, 1, [| |]), 0 ); ( Token.EOF([| |]), 2 ); ] )
199 | lex.Next()
200 | let parser = new Parser(lex)
201 | parser.ParseSubscript() |> ignore
202 | with
203 | | :? SyntaxError as ex ->
204 | Assert.Equal( Token.Comma(0, 1, [| |]), ex.Data0)
205 | Assert.Equal( "Missing subscript item!", ex.Data1)
206 | | _ ->
207 | Assert.False(false)
208 |
209 | []
210 | let ``subscript missing item with ']' UnitTest`` () =
211 | try
212 | let lex = new MockTokenizer( [ ( Token.RightBracket(0, 1, [| |]), 0 ); ( Token.EOF([| |]), 2 ); ] )
213 | lex.Next()
214 | let parser = new Parser(lex)
215 | parser.ParseSubscript() |> ignore
216 | with
217 | | :? SyntaxError as ex ->
218 | Assert.Equal( Token.RightBracket(0, 1, [| |]), ex.Data0)
219 | Assert.Equal( "Missing subscript item!", ex.Data1)
220 | | _ ->
221 | Assert.False(false)
222 |
223 | []
224 | let ``dictionary entry missing item with ':' UnitTest`` () =
225 | try
226 | let lex = new MockTokenizer( [ ( Token.Name(0, 1, "a", [| |]), 0 ); ( Token.Name(2, 3, "b", [| |]), 2 ); ( Token.EOF([| |]), 4 ); ] )
227 | lex.Next()
228 | let parser = new Parser(lex)
229 | parser.ParseSubscript() |> ignore
230 | with
231 | | :? SyntaxError as ex ->
232 | Assert.Equal( Token.Name(2, 3, "b", [| |]), ex.Data0)
233 | Assert.Equal( "Missing ':' in dictionary entry!", ex.Data1)
234 | | _ ->
235 | Assert.False(false)
236 |
237 | []
238 | let ``argument not allowed UnitTest`` () =
239 | try
240 | let lex = new MockTokenizer( [ ( Token.Comma(0, 1, [| |]), 0 ); ( Token.EOF([| |]), 2 ); ] )
241 | lex.Next()
242 | let parser = new Parser(lex)
243 | parser.ParseArgument() |> ignore
244 | with
245 | | :? SyntaxError as ex ->
246 | Assert.Equal( Token.Comma(0, 1, [| |]), ex.Data0)
247 | Assert.Equal( "Missing argument!", ex.Data1)
248 | | _ ->
249 | Assert.False(false)
250 |
251 | []
252 | let ``argument * mising name UnitTest`` () =
253 | try
254 | let lex = new MockTokenizer( [ ( Token.Mul(0, 1, [| |]), 0 ); ( Token.Comma(2, 3, [| |]), 2 ); ( Token.EOF([| |]), 4 ); ] )
255 | lex.Next()
256 | let parser = new Parser(lex)
257 | parser.ParseArgument() |> ignore
258 | with
259 | | :? SyntaxError as ex ->
260 | Assert.Equal( Token.Comma(2, 3, [| |]), ex.Data0)
261 | Assert.Equal( "Missing argument!", ex.Data1)
262 | | _ ->
263 | Assert.False(false)
264 |
265 | []
266 | let ``argument ** mising name UnitTest`` () =
267 | try
268 | let lex = new MockTokenizer( [ ( Token.Power(0, 2, [| |]), 0 ); ( Token.Comma(2, 3, [| |]), 2 ); ( Token.EOF([| |]), 4 ); ] )
269 | lex.Next()
270 | let parser = new Parser(lex)
271 | parser.ParseArgument() |> ignore
272 | with
273 | | :? SyntaxError as ex ->
274 | Assert.Equal( Token.Comma(2, 3, [| |]), ex.Data0)
275 | Assert.Equal( "Missing argument!", ex.Data1)
276 | | _ ->
277 | Assert.False(false)
278 |
279 | []
280 | let ``comp for missing for UnitTest`` () =
281 | try
282 | let lex = new MockTokenizer( [ ( Token.Comma(0, 1, [| |]), 0 ); ( Token.EOF([| |]), 2 ); ] )
283 | lex.Next()
284 | let parser = new Parser(lex)
285 | parser.ParseSyncCompFor() |> ignore
286 | with
287 | | :? SyntaxError as ex ->
288 | Assert.Equal( Token.Comma(0, 1, [| |]), ex.Data0)
289 | Assert.Equal( "Missing 'for' in comprehension expression!", ex.Data1)
290 | | _ ->
291 | Assert.False(false)
292 |
293 | []
294 | let ``comp for missing in UnitTest`` () =
295 | try
296 | let lex = new MockTokenizer( [ ( Token.For(0, 3, [| |]), 0 ); ( Token.Name(4, 5, "a", [| |]), 4 ); ( Token.Name(7, 8, "b", [| |]), 7 ); ( Token.EOF([| |]), 9 ); ] )
297 | lex.Next()
298 | let parser = new Parser(lex)
299 | parser.ParseSyncCompFor() |> ignore
300 | with
301 | | :? SyntaxError as ex ->
302 | Assert.Equal( Token.Name(7, 8, "b", [| |]), ex.Data0)
303 | Assert.Equal( "Missing 'in' in for comprehension expression!", ex.Data1)
304 | | _ ->
305 | Assert.False(false)
306 |
307 | []
308 | let ``comp if missing if UnitTest`` () =
309 | try
310 | let lex = new MockTokenizer( [ ( Token.Comma(0, 1, [| |]), 0 ); ( Token.EOF([| |]), 2 ); ] )
311 | lex.Next()
312 | let parser = new Parser(lex)
313 | parser.ParseCompIf() |> ignore
314 | with
315 | | :? SyntaxError as ex ->
316 | Assert.Equal( Token.Comma(0, 1, [| |]), ex.Data0)
317 | Assert.Equal( "Missing 'if' in comprehension expression!", ex.Data1)
318 | | _ ->
319 | Assert.False(false)
320 |
321 | []
322 | let ``yield expr missing yield UnitTest`` () =
323 | try
324 | let lex = new MockTokenizer( [ ( Token.Comma(0, 1, [| |]), 0 ); ( Token.EOF([| |]), 2 ); ] )
325 | lex.Next()
326 | let parser = new Parser(lex)
327 | parser.ParseYieldExpr() |> ignore
328 | with
329 | | :? SyntaxError as ex ->
330 | Assert.Equal( Token.Comma(0, 1, [| |]), ex.Data0)
331 | Assert.Equal( "Missing 'yield' in yield expression!", ex.Data1)
332 | | _ ->
333 | Assert.False(false)
334 |
335 | []
336 | let ``function suite missing indent UnitTest`` () =
337 | try
338 | let lex = new MockTokenizer( [ ( Token.Newline(0, 2, [| |]), 0 ); ( Token.Pass(3, 7, [| |]), 3 ); ( Token.EOF([| |]), 8 ); ] )
339 | lex.Next()
340 | let parser = new Parser(lex)
341 | parser.ParseFuncBodySuite() |> ignore
342 | with
343 | | :? SyntaxError as ex ->
344 | Assert.Equal( Token.Pass(3, 7, [| |]), ex.Data0)
345 | Assert.Equal( "Expecting indentation in function block statement!", ex.Data1)
346 | | _ ->
347 | Assert.False(false)
348 |
349 | []
350 | let ``function suite missing newline after typecomment UnitTest`` () =
351 | try
352 | let lex = new MockTokenizer( [ ( Token.Newline(0, 2, [| |]), 0 ); ( Token.Indent([| |]), 2 ); ( Token.TypeComment(3, 13, "#type: int"), 3 ); ( Token.Pass(14, 18, [| |]), 14 ); ( Token.EOF([| |]), 19 ); ] )
353 | lex.Next()
354 | let parser = new Parser(lex)
355 | parser.ParseFuncBodySuite() |> ignore
356 | with
357 | | :? SyntaxError as ex ->
358 | Assert.Equal( Token.Pass(14, 18, [| |]), ex.Data0)
359 | Assert.Equal( "Expecting newline after type comment!", ex.Data1)
360 | | _ ->
361 | Assert.False(false)
362 |
363 | []
364 | let ``functype missing ')' UnitTest`` () =
365 | try
366 | let lex = new MockTokenizer( [ ( Token.LeftParen(0, 1, [| |]), 1 ); ( Token.Name(2, 3, "a", [| |]), 2 ); ( Token.Name(4, 5, "b", [| |]), 4 ); ( Token.EOF([| |]), 6 ); ] )
367 | lex.Next()
368 | let parser = new Parser(lex)
369 | parser.ParseFuncType() |> ignore
370 | with
371 | | :? SyntaxError as ex ->
372 | Assert.Equal( Token.Name(4, 5, "b", [| |]), ex.Data0)
373 | Assert.Equal( "Expecting ')' in func definition!", ex.Data1)
374 | | _ ->
375 | Assert.False(false)
376 |
377 | []
378 | let ``functype missing '->' UnitTest`` () =
379 | try
380 | let lex = new MockTokenizer( [ ( Token.LeftParen(0, 1, [| |]), 1 ); ( Token.Name(2, 3, "a", [| |]), 2 ); ( Token.RightParen(4, 5, [| |]), 4 ); ( Token.Name(6, 7, "b", [| |]), 6 ); ( Token.EOF([| |]), 8 ); ] )
381 | lex.Next()
382 | let parser = new Parser(lex)
383 | parser.ParseFuncType() |> ignore
384 | with
385 | | :? SyntaxError as ex ->
386 | Assert.Equal( Token.Name(6, 7, "b", [| |]), ex.Data0)
387 | Assert.Equal( "Expecting '->' in func definition!", ex.Data1)
388 | | _ ->
389 | Assert.False(false)
390 |
391 | []
392 | let ``functype missing '(' UnitTest`` () =
393 | try
394 | let lex = new MockTokenizer( [ ( Token.Name(0, 1, "a", [| |]), 0 ); ( Token.RightParen(2, 3, [| |]), 2 ); ( Token.Name(4, 5, "b", [| |]), 4 ); ( Token.EOF([| |]), 6 ); ] )
395 | lex.Next()
396 | let parser = new Parser(lex)
397 | parser.ParseFuncType() |> ignore
398 | with
399 | | :? SyntaxError as ex ->
400 | Assert.Equal( Token.Name(0, 1, "a", [| |]), ex.Data0)
401 | Assert.Equal( "Expecting '(' in func definition!", ex.Data1)
402 | | _ ->
403 | Assert.False(false)
404 |
405 | []
406 | let ``if statement missing 'if' UnitTest`` () =
407 | try
408 | let lex = new MockTokenizer( [ ( Token.Name(0, 1, "a", [| |]), 0 ); ( Token.Colon(2, 3, [| |]), 2 ); ( Token.Pass(4, 8, [| |]), 4 ); ( Token.Newline(9, 11, [| |]), 9 ); ( Token.EOF([| |]), 12 ); ] )
409 | lex.Next()
410 | let parser = new Parser(lex)
411 | parser.ParseIfStmt() |> ignore
412 | with
413 | | :? SyntaxError as ex ->
414 | Assert.Equal( Token.Name(0, 1, "a", [| |]), ex.Data0)
415 | Assert.Equal( "Expecting 'if' statement!", ex.Data1)
416 | | _ ->
417 | Assert.False(false)
418 |
419 | []
420 | let ``if statement missing ':' UnitTest`` () =
421 | try
422 | let lex = new MockTokenizer( [ ( Token.If(0, 2, [| |]), 0 ); ( Token.Name(3, 4, "a", [| |]), 3 ); ( Token.Pass(5, 9, [| |]), 5 ); ( Token.Newline(9, 11, [| |]), 9 ); ( Token.EOF([| |]), 12 ); ] )
423 | lex.Next()
424 | let parser = new Parser(lex)
425 | parser.ParseIfStmt() |> ignore
426 | with
427 | | :? SyntaxError as ex ->
428 | Assert.Equal( Token.Pass(5, 9, [| |]), ex.Data0)
429 | Assert.Equal( "Expecting ':' in if statement!", ex.Data1)
430 | | _ ->
431 | Assert.False(false)
432 |
433 | []
434 | let ``elif statement missing ':' UnitTest`` () =
435 | try
436 | let lex = new MockTokenizer( [ ( Token.If(0, 2, [| |]), 0 ); ( Token.Name(3, 4, "a", [| |]), 3 ); ( Token.Colon(5, 6, [| |]), 5 ); ( Token.Pass(7, 11, [| |]), 7 ); ( Token.Newline(11, 13, [| |]), 11 );
437 | ( Token.Elif(14, 18, [| |]), 14 ); ( Token.Name(19, 20, "b", [| |]), 19 ); ( Token.Pass(21, 25, [| |]), 21 ); ( Token.Newline(26, 28, [| |]), 26 );
438 | ( Token.EOF([| |]), 26 ); ] )
439 | lex.Next()
440 | let parser = new Parser(lex)
441 | parser.ParseIfStmt() |> ignore
442 | with
443 | | :? SyntaxError as ex ->
444 | Assert.Equal( Token.Pass(21, 25, [| |]), ex.Data0)
445 | Assert.Equal( "Expecting ':' in elif statement!", ex.Data1)
446 | | _ ->
447 | Assert.False(false)
448 |
449 | []
450 | let ``else statement after if statement missing ':' UnitTest`` () =
451 | try
452 | let lex = new MockTokenizer( [ ( Token.If(0, 2, [| |]), 0 ); ( Token.Name(3, 4, "a", [| |]), 3 ); ( Token.Colon(5, 6, [| |]), 5 ); ( Token.Pass(7, 11, [| |]), 7 ); ( Token.Newline(11, 13, [| |]), 11 );
453 | ( Token.Else(14, 18, [| |]), 14 ); ( Token.Pass(20, 24, [| |]), 20 ); ( Token.Newline(25, 27, [| |]), 25 );
454 | ( Token.EOF([| |]), 28 ); ] )
455 | lex.Next()
456 | let parser = new Parser(lex)
457 | parser.ParseIfStmt() |> ignore
458 | with
459 | | :? SyntaxError as ex ->
460 | Assert.Equal( Token.Pass(20, 24, [| |]), ex.Data0)
461 | Assert.Equal( "Expecting ':' in else statement!", ex.Data1)
462 | | _ ->
463 | Assert.False(false)
464 |
465 | []
466 | let ``while statement missing 'while' UnitTest`` () =
467 | try
468 | let lex = new MockTokenizer( [ ( Token.Name(0, 1, "a", [| |]), 0 ); ( Token.Colon(2, 3, [| |]), 2 ); ( Token.Pass(4, 8, [| |]), 4 ); ( Token.Newline(9, 11, [| |]), 9 ); ( Token.EOF([| |]), 12 ); ] )
469 | lex.Next()
470 | let parser = new Parser(lex)
471 | parser.ParseWhileStmt() |> ignore
472 | with
473 | | :? SyntaxError as ex ->
474 | Assert.Equal( Token.Name(0, 1, "a", [| |]), ex.Data0)
475 | Assert.Equal( "Expecting 'while' statement!", ex.Data1)
476 | | _ ->
477 | Assert.False(false)
478 |
479 | []
480 | let ``while statement missing ':' UnitTest`` () =
481 | try
482 | let lex = new MockTokenizer( [ ( Token.While(0, 5, [| |]), 0 ); ( Token.Name(6, 7, "a", [| |]), 6 ); ( Token.Pass(8, 12, [| |]), 8 ); ( Token.Newline(13, 15, [| |]), 13 ); ( Token.EOF([| |]), 16 ); ] )
483 | lex.Next()
484 | let parser = new Parser(lex)
485 | parser.ParseWhileStmt() |> ignore
486 | with
487 | | :? SyntaxError as ex ->
488 | Assert.Equal( Token.Pass(8, 12, [| |]), ex.Data0)
489 | Assert.Equal( "Expecting ':' in while statement!", ex.Data1)
490 | | _ ->
491 | Assert.False(false)
492 |
493 | []
494 | let ``else statement after while statement missing ':' UnitTest`` () =
495 | try
496 | let lex = new MockTokenizer( [ ( Token.While(0, 5, [| |]), 0 ); ( Token.Name(6, 7, "a", [| |]), 6 ); ( Token.Colon(8, 9, [| |]), 8 ); ( Token.Pass(10, 14, [| |]), 10 ); ( Token.Newline(14, 16, [| |]), 12 );
497 | ( Token.Else(16, 20, [| |]), 16 ); ( Token.Pass(22, 26, [| |]), 22 ); ( Token.Newline(27, 29, [| |]), 27 );
498 | ( Token.EOF([| |]), 30 ); ] )
499 | lex.Next()
500 | let parser = new Parser(lex)
501 | parser.ParseWhileStmt() |> ignore
502 | with
503 | | :? SyntaxError as ex ->
504 | Assert.Equal( Token.Pass(22, 26, [| |]), ex.Data0)
505 | Assert.Equal( "Expecting ':' in else statement!", ex.Data1)
506 | | _ ->
507 | Assert.False(false)
508 |
509 | []
510 | let ``for statement missing 'for' UnitTest`` () =
511 | try
512 | let lex = new MockTokenizer( [ ( Token.Name(0, 1, "a", [| |]), 0 ); ( Token.Colon(2, 3, [| |]), 2 ); ( Token.Pass(4, 8, [| |]), 4 ); ( Token.Newline(9, 11, [| |]), 9 ); ( Token.EOF([| |]), 12 ); ] )
513 | lex.Next()
514 | let parser = new Parser(lex)
515 | parser.ParseForStmt() |> ignore
516 | with
517 | | :? SyntaxError as ex ->
518 | Assert.Equal( Token.Name(0, 1, "a", [| |]), ex.Data0)
519 | Assert.Equal( "Expecting 'for' statement!", ex.Data1)
520 | | _ ->
521 | Assert.False(false)
522 |
523 | []
524 | let ``for statement missing 'in' UnitTest`` () =
525 | try
526 | let lex = new MockTokenizer( [ ( Token.For(0, 3, [| |]), 0 ); ( Token.Name(6, 7, "a", [| |]), 6 ); ( Token.Pass(8, 12, [| |]), 8 ); ( Token.Newline(13, 15, [| |]), 13 ); ( Token.EOF([| |]), 16 ); ] )
527 | lex.Next()
528 | let parser = new Parser(lex)
529 | parser.ParseForStmt() |> ignore
530 | with
531 | | :? SyntaxError as ex ->
532 | Assert.Equal( Token.Pass(8, 12, [| |]), ex.Data0)
533 | Assert.Equal( "Expecting 'in' in for statement!", ex.Data1)
534 | | _ ->
535 | Assert.False(false)
536 |
537 | []
538 | let ``for statement missing ':' UnitTest`` () =
539 | try
540 | let lex = new MockTokenizer( [ ( Token.For(0, 3, [| |]), 0 ); ( Token.Name(6, 7, "a", [| |]), 6 ); ( Token.In(8, 10, [| |]), 8 ); ( Token.Name(11, 12, "b", [| |]), 11 ); ( Token.Pass(14, 18, [| |]), 14 ); ( Token.Newline(19, 21, [| |]), 19 ); ( Token.EOF([| |]), 22 ); ] )
541 | lex.Next()
542 | let parser = new Parser(lex)
543 | parser.ParseForStmt() |> ignore
544 | with
545 | | :? SyntaxError as ex ->
546 | Assert.Equal( Token.Pass(14, 18, [| |]), ex.Data0)
547 | Assert.Equal( "Expecting ':' in for statement!", ex.Data1)
548 | | _ ->
549 | Assert.False(false)
550 |
551 | []
552 | let ``else statement after for statement missing ':' UnitTest`` () =
553 | try
554 | let lex = new MockTokenizer( [ ( Token.For(0, 3, [| |]), 0 ); ( Token.Name(4, 5, "a", [| |]), 4 ); ( Token.In(6, 8, [| |]), 6 ); ( Token.Name(10, 11, "b", [| |]), 10 ); ( Token.Colon(12, 13, [| |]), 12 ); ( Token.Pass(14, 18, [| |]), 14 ); ( Token.Newline(18, 20, [| |]), 18 );
555 | ( Token.Else(21, 25, [| |]), 21 ); ( Token.Pass(26, 30, [| |]), 26 ); ( Token.Newline(31, 33, [| |]), 31 );
556 | ( Token.EOF([| |]), 34 ); ] )
557 | lex.Next()
558 | let parser = new Parser(lex)
559 | parser.ParseForStmt() |> ignore
560 | with
561 | | :? SyntaxError as ex ->
562 | Assert.Equal( Token.Pass(26, 30, [| |]), ex.Data0)
563 | Assert.Equal( "Expecting ':' in else statement!", ex.Data1)
564 | | _ ->
565 | Assert.False(false)
566 |
567 | []
568 | let ``try statement missing 'try' UnitTest`` () =
569 | try
570 | let lex = new MockTokenizer( [ ( Token.Name(0, 1, "a", [| |]), 0 ); ( Token.Colon(2, 3, [| |]), 2 ); ( Token.Pass(4, 8, [| |]), 4 ); ( Token.Newline(9, 11, [| |]), 9 ); ( Token.EOF([| |]), 12 ); ] )
571 | lex.Next()
572 | let parser = new Parser(lex)
573 | parser.ParseTryStmt() |> ignore
574 | with
575 | | :? SyntaxError as ex ->
576 | Assert.Equal( Token.Name(0, 1, "a", [| |]), ex.Data0)
577 | Assert.Equal( "Expecting 'try' in try statement!", ex.Data1)
578 | | _ ->
579 | Assert.False(false)
580 |
581 | []
582 | let ``try statement missing ':' UnitTest`` () =
583 | try
584 | let lex = new MockTokenizer( [ ( Token.Try(0, 3, [| |]), 0 ); ( Token.Pass(4, 8, [| |]), 4 ); ( Token.Newline(9, 11, [| |]), 9 ); ( Token.EOF([| |]), 12 ); ] )
585 | lex.Next()
586 | let parser = new Parser(lex)
587 | parser.ParseTryStmt() |> ignore
588 | with
589 | | :? SyntaxError as ex ->
590 | Assert.Equal( Token.Pass(4, 8, [| |]), ex.Data0)
591 | Assert.Equal( "Expecting ':' in try statement!", ex.Data1)
592 | | _ ->
593 | Assert.False(false)
594 |
595 | []
596 | let ``try statement missing ':' in finally UnitTest`` () =
597 | try
598 | let lex = new MockTokenizer( [ ( Token.Try(0, 4, [| |]), 0 ); ( Token.Colon(7, 8, [| |]), 7 ); ( Token.Pass(9, 13, [| |]), 9 ); ( Token.Newline(14, 16, [| |]), 14 );
599 | ( Token.Finally(17, 24, [| |]), 17 ); ( Token.Pass(25, 29, [| |]), 25 ); ( Token.Newline(30, 32, [| |]), 30 );
600 | ( Token.EOF([| |]), 33 ); ] )
601 | lex.Next()
602 | let parser = new Parser(lex)
603 | parser.ParseTryStmt() |> ignore
604 | with
605 | | :? SyntaxError as ex ->
606 | Assert.Equal( Token.Pass(25, 29, [| |]), ex.Data0)
607 | Assert.Equal( "Expecting ':' in finally statement!", ex.Data1)
608 | | _ ->
609 | Assert.False(false)
610 |
611 | []
612 | let ``try statement missing ':' in finally but with except item UnitTest`` () =
613 | try
614 | let lex = new MockTokenizer( [ ( Token.Try(0, 4, [| |]), 0 ); ( Token.Colon(7, 8, [| |]), 7 ); ( Token.Pass(9, 13, [| |]), 9 ); ( Token.Newline(14, 16, [| |]), 14 );
615 | ( Token.Except(17, 23, [| |]), 17 ); ( Token.Pass(26, 30, [| |]), 26 ); ( Token.Newline(31, 33, [| |]), 31 );
616 | ( Token.Finally(32, 39, [| |]), 32 ); ( Token.Colon(40, 41, [| |]), 40); ( Token.Pass(42, 46, [| |]), 42 ); ( Token.Newline(47, 49, [| |]), 47 );
617 | ( Token.EOF([| |]), 50 ); ] )
618 | lex.Next()
619 | let parser = new Parser(lex)
620 | parser.ParseTryStmt() |> ignore
621 | with
622 | | :? SyntaxError as ex ->
623 | Assert.Equal( Token.Pass(26, 30, [| |]), ex.Data0)
624 | Assert.Equal( "Illegal literal!", ex.Data1)
625 | | _ ->
626 | Assert.False(false)
627 |
628 | []
629 | let ``except missing ':' UnitTest`` () =
630 | try
631 | let lex = new MockTokenizer( [ ( Token.Try(0, 4, [| |]), 0 ); ( Token.Colon(7, 8, [| |]), 7 ); ( Token.Pass(9, 13, [| |]), 9 ); ( Token.Newline(14, 16, [| |]), 14 );
632 | ( Token.Except(17, 23, [| |]), 17 ); ( Token.Colon(24, 25, [| |]), 24 ); ( Token.Pass(26, 30, [| |]), 26 ); ( Token.Newline(31, 33, [| |]), 31 );
633 | ( Token.EOF([| |]), 50 ); ] )
634 | lex.Next()
635 | let parser = new Parser(lex)
636 | parser.ParseTryStmt() |> ignore
637 | with
638 | | :? SyntaxError as ex ->
639 | Assert.Equal( Token.Pass(26, 30, [| |]), ex.Data0)
640 | Assert.Equal( "", ex.Data1)
641 | | _ ->
642 | Assert.False(false)
643 |
644 | []
645 | let ``except missing ':' but with test argument UnitTest`` () =
646 | try
647 | let lex = new MockTokenizer( [ ( Token.Try(0, 4, [| |]), 0 ); ( Token.Colon(7, 8, [| |]), 7 ); ( Token.Pass(9, 13, [| |]), 9 ); ( Token.Newline(14, 16, [| |]), 14 );
648 | ( Token.Except(17, 23, [| |]), 17 ); ( Token.Name(24, 25, "c", [| |]), 24 ); ( Token.Pass(26, 30, [| |]), 26 ); ( Token.Newline(31, 33, [| |]), 31 );
649 | ( Token.EOF([| |]), 50 ); ] )
650 | lex.Next()
651 | let parser = new Parser(lex)
652 | parser.ParseTryStmt() |> ignore
653 | with
654 | | :? SyntaxError as ex ->
655 | Assert.Equal( Token.Pass(26, 30, [| |]), ex.Data0)
656 | Assert.Equal( "Expecting ':' in except statement!", ex.Data1)
657 | | _ ->
658 | Assert.False(false)
659 |
660 | []
661 | let ``except missing ':' but with test argument and 'as' without expr UnitTest`` () =
662 | try
663 | let lex = new MockTokenizer( [ ( Token.Try(0, 4, [| |]), 0 ); ( Token.Colon(7, 8, [| |]), 7 ); ( Token.Pass(9, 13, [| |]), 9 ); ( Token.Newline(14, 16, [| |]), 14 );
664 | ( Token.Except(17, 23, [| |]), 17 ); ( Token.Name(24, 25, "c", [| |]), 24 ); ( Token.As(26, 27, [| |]), 26 ); ( Token.Colon(27, 28, [| |]), 27 ); ( Token.Pass(30, 34, [| |]), 30 ); ( Token.Newline(35, 37, [| |]), 35 );
665 | ( Token.EOF([| |]), 38 ); ] )
666 | lex.Next()
667 | let parser = new Parser(lex)
668 | parser.ParseTryStmt() |> ignore
669 | with
670 | | :? SyntaxError as ex ->
671 | Assert.Equal( Token.Colon(27, 28, [| |]), ex.Data0)
672 | Assert.Equal( "Missing name literal afer 'as' in except statement!", ex.Data1)
673 | | _ ->
674 | Assert.False(false)
675 |
676 | []
677 | let ``else missing ':' in try / execept / else statement UnitTest`` () =
678 | try
679 | let lex = new MockTokenizer( [ ( Token.Try(0, 4, [| |]), 0 ); ( Token.Colon(7, 8, [| |]), 7 ); ( Token.Pass(9, 13, [| |]), 9 ); ( Token.Newline(14, 16, [| |]), 14 );
680 | ( Token.Except(17, 23, [| |]), 17 ); ( Token.Name(24, 25, "c", [| |]), 24 ); ( Token.As(26, 27, [| |]), 26 ); ( Token.Name(28, 29, "b", [| |]), 28 ); ( Token.Colon(30, 31, [| |]), 30 ); ( Token.Pass(32, 36, [| |]), 32 ); ( Token.Newline(37, 39, [| |]), 37 );
681 | ( Token.Else(38, 42, [| |]), 38 ); ( Token.Pass(43, 47, [| |]), 43 ); ( Token.Newline(48, 50, [| |]), 48);
682 | ( Token.EOF([| |]), 51 ); ] )
683 | lex.Next()
684 | let parser = new Parser(lex)
685 | parser.ParseTryStmt() |> ignore
686 | with
687 | | :? SyntaxError as ex ->
688 | Assert.Equal( Token.Pass(43, 47, [| |]), ex.Data0)
689 | Assert.Equal( "Expecting ':' in else statement!", ex.Data1)
690 | | _ ->
691 | Assert.False(false)
692 |
693 | []
694 | let ``with statement missing 'with' UnitTest`` () =
695 | try
696 | let lex = new MockTokenizer( [ ( Token.Name(0, 1, "a", [| |]), 0 ); ( Token.Colon(2, 3, [| |]), 2 ); ( Token.Pass(4, 8, [| |]), 4 ); ( Token.Newline(9, 11, [| |]), 9 ); ( Token.EOF([| |]), 12 ); ] )
697 | lex.Next()
698 | let parser = new Parser(lex)
699 | parser.ParseWithStmt() |> ignore
700 | with
701 | | :? SyntaxError as ex ->
702 | Assert.Equal( Token.Name(0, 1, "a", [| |]), ex.Data0)
703 | Assert.Equal( "Expecting 'with' in with statement!", ex.Data1)
704 | | _ ->
705 | Assert.False(false)
706 |
707 | []
708 | let ``with statement missing ':' UnitTest`` () =
709 | try
710 | let lex = new MockTokenizer( [ ( Token.With(0, 4, [| |]), 0 ); ( Token.Name(5, 6, "a", [| |]), 5 ); ( Token.Pass(9, 13, [| |]), 9 ); ( Token.Newline(14, 16, [| |]), 14 ); ( Token.EOF([| |]), 17 ); ] )
711 | lex.Next()
712 | let parser = new Parser(lex)
713 | parser.ParseWithStmt() |> ignore
714 | with
715 | | :? SyntaxError as ex ->
716 | Assert.Equal( Token.Pass(9, 13, [| |]), ex.Data0)
717 | Assert.Equal( "Expecting ':' in with statement!", ex.Data1)
718 | | _ ->
719 | Assert.False(false)
720 |
721 | []
722 | let ``with statement with with_item with 'as' missing next argument UnitTest`` () =
723 | try
724 | let lex = new MockTokenizer( [ ( Token.With(0, 4, [| |]), 0 ); ( Token.Name(5, 6, "a", [| |]), 5 ); ( Token.As(7, 9, [| |]), 7); ( Token.Colon(10, 11, [| |]), 10 ); ( Token.Pass(12, 16, [| |]), 12 ); ( Token.Newline(17, 19, [| |]), 17 ); ( Token.EOF([| |]), 20 ); ] )
725 | lex.Next()
726 | let parser = new Parser(lex)
727 | parser.ParseWithStmt() |> ignore
728 | with
729 | | :? SyntaxError as ex ->
730 | Assert.Equal( Token.Colon(10, 11, [| |]), ex.Data0)
731 | Assert.Equal( "Illegal literal!", ex.Data1)
732 | | _ ->
733 | Assert.False(false)
734 |
735 | []
736 | let ``Suite missing indent token UnitTest`` () =
737 | try
738 | let lex = new MockTokenizer( [ ( Token.Newline(0, 2, [| |]), 0 ); ( Token.Pass(3, 7, [| |]), 3 ); ( Token.Newline(8, 10, [| |]), 8 ); ( Token.EOF([| |]), 11 ); ] )
739 | lex.Next()
740 | let parser = new Parser(lex)
741 | parser.ParseSuite() |> ignore
742 | with
743 | | :? SyntaxError as ex ->
744 | Assert.Equal( Token.Pass(3, 7, [| |]), ex.Data0)
745 | Assert.Equal( "Expecting indentation in statement block!", ex.Data1)
746 | | _ ->
747 | Assert.False(false)
748 |
749 | []
750 | let ``Suite missing dedent token UnitTest`` () =
751 | try
752 | let lex = new MockTokenizer( [ ( Token.Newline(0, 2, [| |]), 0 ); ( Token.Indent([| |]), 3 ); ( Token.Pass(3, 7, [| |]), 3 ); ( Token.Newline(8, 10, [| |]), 8 ); ( Token.EOF([| |]), 11 ); ] )
753 | lex.Next()
754 | let parser = new Parser(lex)
755 | parser.ParseSuite() |> ignore
756 | with
757 | | :? SyntaxError as ex ->
758 | Assert.Equal( Token.EOF([| |]), ex.Data0)
759 | Assert.Equal( "Expecting dedentation in statement block!", ex.Data1)
760 | | _ ->
761 | Assert.False(false)
762 |
763 | []
764 | let ``Simple statement missing trailing newline UnitTest`` () =
765 | try
766 | let lex = new MockTokenizer( [ ( Token.Pass(0, 4, [| |]), 0 ); ( Token.EOF([| |]), 5 ); ] )
767 | lex.Next()
768 | let parser = new Parser(lex)
769 | parser.ParseSuite() |> ignore
770 | with
771 | | :? SyntaxError as ex ->
772 | Assert.Equal( Token.EOF([| |]), ex.Data0)
773 | Assert.Equal( "Expecting newline after statements!", ex.Data1)
774 | | _ ->
775 | Assert.False(false)
776 |
777 | []
778 | let ``Simple statement missing ';' between statements before newline UnitTest`` () =
779 | try
780 | let lex = new MockTokenizer( [ ( Token.Pass(0, 4, [| |]), 0 ); ( Token.Pass(5, 9, [| |]), 5 ); ( Token.Newline(10, 12, [| |]), 10 ); ( Token.EOF([| |]), 13 ); ] )
781 | lex.Next()
782 | let parser = new Parser(lex)
783 | parser.ParseSuite() |> ignore
784 | with
785 | | :? SyntaxError as ex ->
786 | Assert.Equal( Token.Pass(5, 9, [| |]), ex.Data0)
787 | Assert.Equal( "Expecting newline after statements!", ex.Data1)
788 | | _ ->
789 | Assert.False(false)
790 |
791 | []
792 | let ``Asign after typecomment error UnitTest`` () =
793 | try
794 | let lex = new MockTokenizer( [ ( Token.Name(0, 1, "a", [| |]), 0 ); ( Token.Assign(2, 3, [| |]), 2 ); ( Token.Name(4, 5, "b", [| |]), 4 ); ( Token.TypeComment(6, 16, "#type: int"), 6 ); ( Token.Assign(17, 18, [| |]), 17 ); ( Token.Name(19, 20, "c", [| |]), 19 ); ( Token.Newline(21, 23, [| |]), 21 ); ( Token.EOF([| |]), 24 ); ] )
795 | lex.Next()
796 | let parser = new Parser(lex)
797 | parser.ParseStmt() |> ignore
798 | with
799 | | :? SyntaxError as ex ->
800 | Assert.Equal( Token.Assign(17, 18, [| |]), ex.Data0)
801 | Assert.Equal( "Type comment only after last '=' expression.", ex.Data1)
802 | | _ ->
803 | Assert.False(false)
804 |
805 | []
806 | let ``del statement missing 'del' UnitTest`` () =
807 | try
808 | let lex = new MockTokenizer( [ ( Token.Pass(0, 4, [| |]), 0 ); ( Token.EOF([| |]), 5 ); ] )
809 | lex.Next()
810 | let parser = new Parser(lex)
811 | parser.ParseDelStmt() |> ignore
812 | with
813 | | :? SyntaxError as ex ->
814 | Assert.Equal( Token.Pass(0, 4, [| |]), ex.Data0)
815 | Assert.Equal( "Expecting 'del' in del statement!", ex.Data1)
816 | | _ ->
817 | Assert.False(false)
818 |
819 | []
820 | let ``pass statement missing 'pas' UnitTest`` () =
821 | try
822 | let lex = new MockTokenizer( [ ( Token.Del(0, 3, [| |]), 0 ); ( Token.EOF([| |]), 4 ); ] )
823 | lex.Next()
824 | let parser = new Parser(lex)
825 | parser.ParsePassStmt() |> ignore
826 | with
827 | | :? SyntaxError as ex ->
828 | Assert.Equal( Token.Del(0, 3, [| |]), ex.Data0)
829 | Assert.Equal( "Expecting 'pass' in pass statement!", ex.Data1)
830 | | _ ->
831 | Assert.False(false)
832 |
833 | []
834 | let ``illegal flow statement UnitTest`` () =
835 | try
836 | let lex = new MockTokenizer( [ ( Token.Pass(0, 4, [| |]), 0 ); ( Token.EOF([| |]), 5 ); ] )
837 | lex.Next()
838 | let parser = new Parser(lex)
839 | parser.ParseFlowStmt() |> ignore
840 | with
841 | | :? SyntaxError as ex ->
842 | Assert.Equal( Token.Pass(0, 4, [| |]), ex.Data0)
843 | Assert.Equal( "Illegal flow statement!", ex.Data1)
844 | | _ ->
845 | Assert.False(false)
846 |
847 | []
848 | let ``return statement outside of func def statement UnitTest`` () =
849 | try
850 | let lex = new MockTokenizer( [ ( Token.Return(0, 6, [| |]), 0 ); ( Token.EOF([| |]), 7 ); ] )
851 | lex.Next()
852 | let parser = new Parser(lex)
853 | parser.ParseFlowStmt() |> ignore
854 | with
855 | | :? SyntaxError as ex ->
856 | Assert.Equal( Token.Return(0, 6, [| |]), ex.Data0)
857 | Assert.Equal( "Found 'return' outside of function!", ex.Data1)
858 | | _ ->
859 | Assert.False(false)
860 |
861 | []
862 | let ``break statement outside of flow statement UnitTest`` () =
863 | try
864 | let lex = new MockTokenizer( [ ( Token.Break(0, 5, [| |]), 0 ); ( Token.EOF([| |]), 6 ); ] )
865 | lex.Next()
866 | let parser = new Parser(lex)
867 | parser.ParseFlowStmt() |> ignore
868 | with
869 | | :? SyntaxError as ex ->
870 | Assert.Equal( Token.Break(0, 5, [| |]), ex.Data0)
871 | Assert.Equal( "Found 'break' outside of loop statement!", ex.Data1)
872 | | _ ->
873 | Assert.False(false)
874 |
875 | []
876 | let ``continue statement outside of flow statement UnitTest`` () =
877 | try
878 | let lex = new MockTokenizer( [ ( Token.Continue(0, 8, [| |]), 0 ); ( Token.EOF([| |]), 9 ); ] )
879 | lex.Next()
880 | let parser = new Parser(lex)
881 | parser.ParseFlowStmt() |> ignore
882 | with
883 | | :? SyntaxError as ex ->
884 | Assert.Equal( Token.Continue(0, 8, [| |]), ex.Data0)
885 | Assert.Equal( "Found 'continue' outside of loop statement!", ex.Data1)
886 | | _ ->
887 | Assert.False(false)
888 |
889 | []
890 | let ``raise statement outside of flow statement UnitTest`` () =
891 | try
892 | let lex = new MockTokenizer( [ ( Token.Raise(0, 5, [| |]), 0 ); ( Token.EOF([| |]), 6 ); ] )
893 | lex.Next()
894 | let parser = new Parser(lex)
895 | parser.ParseFlowStmt() |> ignore
896 | with
897 | | :? SyntaxError as ex ->
898 | Assert.Equal( Token.Raise(0, 5, [| |]), ex.Data0)
899 | Assert.Equal( "Found 'raise' outside of loop statement!", ex.Data1)
900 | | _ ->
901 | Assert.False(false)
902 |
903 | []
904 | let ``yield statement outside of flow statement UnitTest`` () =
905 | try
906 | let lex = new MockTokenizer( [ ( Token.Yield(0, 5, [| |]), 0 ); ( Token.EOF([| |]), 6 ); ] )
907 | lex.Next()
908 | let parser = new Parser(lex)
909 | parser.ParseFlowStmt() |> ignore
910 | with
911 | | :? SyntaxError as ex ->
912 | Assert.Equal( Token.Yield(0, 5, [| |]), ex.Data0)
913 | Assert.Equal( "Found 'yield' outside of loop statement!", ex.Data1)
914 | | _ ->
915 | Assert.False(false)
916 |
917 | []
918 | let ``Handle no name left side of ':=' operator UnitTest`` () =
919 | try
920 | let lex = new MockTokenizer( [ ( Token.Number(0, 1, "1", [| |]), 0 ); ( Token.ColonAssign(2, 4, [| |]), 1 ); ( Token.Number(5, 6, "2", [| |]), 5 ); ( Token.EOF([| |]), 7 ); ] )
921 | lex.Next()
922 | let parser = new Parser(lex)
923 | parser.ParseNamedExpr() |> ignore
924 | with
925 | | :? SyntaxError as ex ->
926 | Assert.Equal( Token.ColonAssign(2, 4, [| |]), ex.Data0)
927 | Assert.Equal( "Must be NAME literal on left side of ':=' operator!", ex.Data1)
928 | | _ ->
929 | Assert.False(false)
930 |
--------------------------------------------------------------------------------
/TestPythonCoreFramework/TestPythonCoreTokenizer.fs:
--------------------------------------------------------------------------------
1 |
2 | namespace PythonCoreFramework.UnitTests
3 |
4 | open Xunit
5 | open PythonCoreFramework
6 |
7 | module TestPythonCoreTokenizer =
8 |
9 | []
10 | let ``Reserved keyword 'False'`` () =
11 | let lex = new Tokenizer()
12 | Assert.True(
13 | match lex.IsReservedKeywordOrLiteralName("False") with
14 | | Option.Some(x) ->
15 | match x(0, 0, [||]) with
16 | | Token.False( _ , _ , [| |]) ->
17 | true
18 | | _ ->
19 | false
20 | | Option.None ->
21 | false
22 | )
23 |
24 | []
25 | let ``Reserved keyword 'None'`` () =
26 | let lex = new Tokenizer()
27 | Assert.True(
28 | match lex.IsReservedKeywordOrLiteralName("None") with
29 | | Option.Some(x) ->
30 | match x(0, 0, [||]) with
31 | | Token.None( _ , _ , [| |]) ->
32 | true
33 | | _ ->
34 | false
35 | | Option.None ->
36 | false
37 | )
38 |
39 | []
40 | let ``Reserved keyword 'True'`` () =
41 | let lex = new Tokenizer()
42 | Assert.True(
43 | match lex.IsReservedKeywordOrLiteralName("True") with
44 | | Option.Some(x) ->
45 | match x(0, 0, [||]) with
46 | | Token.True( _ , _ , [| |]) ->
47 | true
48 | | _ ->
49 | false
50 | | Option.None ->
51 | false
52 | )
53 |
54 | []
55 | let ``Reserved keyword 'and'`` () =
56 | let lex = new Tokenizer()
57 | Assert.True(
58 | match lex.IsReservedKeywordOrLiteralName("and") with
59 | | Option.Some(x) ->
60 | match x(0, 0, [||]) with
61 | | Token.And( _ , _ , [| |]) ->
62 | true
63 | | _ ->
64 | false
65 | | Option.None ->
66 | false
67 | )
68 |
69 | []
70 | let ``Reserved keyword 'as'`` () =
71 | let lex = new Tokenizer()
72 | Assert.True(
73 | match lex.IsReservedKeywordOrLiteralName("as") with
74 | | Option.Some(x) ->
75 | match x(0, 0, [||]) with
76 | | Token.As( _ , _ , [| |]) ->
77 | true
78 | | _ ->
79 | false
80 | | Option.None ->
81 | false
82 | )
83 |
84 | []
85 | let ``Reserved keyword 'assert'`` () =
86 | let lex = new Tokenizer()
87 | Assert.True(
88 | match lex.IsReservedKeywordOrLiteralName("assert") with
89 | | Option.Some(x) ->
90 | match x(0, 0, [||]) with
91 | | Token.Assert( _ , _ , [| |]) ->
92 | true
93 | | _ ->
94 | false
95 | | Option.None ->
96 | false
97 | )
98 |
99 | []
100 | let ``Reserved keyword 'async'`` () =
101 | let lex = new Tokenizer()
102 | Assert.True(
103 | match lex.IsReservedKeywordOrLiteralName("async") with
104 | | Option.Some(x) ->
105 | match x(0, 0, [||]) with
106 | | Token.Async( _ , _ , [| |]) ->
107 | true
108 | | _ ->
109 | false
110 | | Option.None ->
111 | false
112 | )
113 |
114 | []
115 | let ``Reserved keyword 'await'`` () =
116 | let lex = new Tokenizer()
117 | Assert.True(
118 | match lex.IsReservedKeywordOrLiteralName("await") with
119 | | Option.Some(x) ->
120 | match x(0, 0, [||]) with
121 | | Token.Await( _ , _ , [| |]) ->
122 | true
123 | | _ ->
124 | false
125 | | Option.None ->
126 | false
127 | )
128 |
129 | []
130 | let ``Reserved keyword 'break'`` () =
131 | let lex = new Tokenizer()
132 | Assert.True(
133 | match lex.IsReservedKeywordOrLiteralName("break") with
134 | | Option.Some(x) ->
135 | match x(0, 0, [||]) with
136 | | Token.Break( _ , _ , [| |]) ->
137 | true
138 | | _ ->
139 | false
140 | | Option.None ->
141 | false
142 | )
143 |
144 | []
145 | let ``Reserved keyword 'class'`` () =
146 | let lex = new Tokenizer()
147 | Assert.True(
148 | match lex.IsReservedKeywordOrLiteralName("class") with
149 | | Option.Some(x) ->
150 | match x(0, 0, [||]) with
151 | | Token.Class( _ , _ , [| |]) ->
152 | true
153 | | _ ->
154 | false
155 | | Option.None ->
156 | false
157 | )
158 |
159 | []
160 | let ``Reserved keyword 'continue'`` () =
161 | let lex = new Tokenizer()
162 | Assert.True(
163 | match lex.IsReservedKeywordOrLiteralName("continue") with
164 | | Option.Some(x) ->
165 | match x(0, 0, [||]) with
166 | | Token.Continue( _ , _ , [| |]) ->
167 | true
168 | | _ ->
169 | false
170 | | Option.None ->
171 | false
172 | )
173 |
174 | []
175 | let ``Reserved keyword 'def'`` () =
176 | let lex = new Tokenizer()
177 | Assert.True(
178 | match lex.IsReservedKeywordOrLiteralName("def") with
179 | | Option.Some(x) ->
180 | match x(0, 0, [||]) with
181 | | Token.Def( _ , _ , [| |]) ->
182 | true
183 | | _ ->
184 | false
185 | | Option.None ->
186 | false
187 | )
188 |
189 | []
190 | let ``Reserved keyword 'del'`` () =
191 | let lex = new Tokenizer()
192 | Assert.True(
193 | match lex.IsReservedKeywordOrLiteralName("del") with
194 | | Option.Some(x) ->
195 | match x(0, 0, [||]) with
196 | | Token.Del( _ , _ , [| |]) ->
197 | true
198 | | _ ->
199 | false
200 | | Option.None ->
201 | false
202 | )
203 |
204 | []
205 | let ``Reserved keyword 'elif'`` () =
206 | let lex = new Tokenizer()
207 | Assert.True(
208 | match lex.IsReservedKeywordOrLiteralName("elif") with
209 | | Option.Some(x) ->
210 | match x(0, 0, [||]) with
211 | | Token.Elif( _ , _ , [| |]) ->
212 | true
213 | | _ ->
214 | false
215 | | Option.None ->
216 | false
217 | )
218 |
219 | []
220 | let ``Reserved keyword 'else'`` () =
221 | let lex = new Tokenizer()
222 | Assert.True(
223 | match lex.IsReservedKeywordOrLiteralName("else") with
224 | | Option.Some(x) ->
225 | match x(0, 0, [||]) with
226 | | Token.Else( _ , _ , [| |]) ->
227 | true
228 | | _ ->
229 | false
230 | | Option.None ->
231 | false
232 | )
233 |
234 | []
235 | let ``Reserved keyword 'except'`` () =
236 | let lex = new Tokenizer()
237 | Assert.True(
238 | match lex.IsReservedKeywordOrLiteralName("except") with
239 | | Option.Some(x) ->
240 | match x(0, 0, [||]) with
241 | | Token.Except( _ , _ , [| |]) ->
242 | true
243 | | _ ->
244 | false
245 | | Option.None ->
246 | false
247 | )
248 |
249 | []
250 | let ``Reserved keyword 'finally'`` () =
251 | let lex = new Tokenizer()
252 | Assert.True(
253 | match lex.IsReservedKeywordOrLiteralName("finally") with
254 | | Option.Some(x) ->
255 | match x(0, 0, [||]) with
256 | | Token.Finally( _ , _ , [| |]) ->
257 | true
258 | | _ ->
259 | false
260 | | Option.None ->
261 | false
262 | )
263 |
264 | []
265 | let ``Reserved keyword 'for'`` () =
266 | let lex = new Tokenizer()
267 | Assert.True(
268 | match lex.IsReservedKeywordOrLiteralName("for") with
269 | | Option.Some(x) ->
270 | match x(0, 0, [||]) with
271 | | Token.For( _ , _ , [| |]) ->
272 | true
273 | | _ ->
274 | false
275 | | Option.None ->
276 | false
277 | )
278 |
279 | []
280 | let ``Reserved keyword 'from'`` () =
281 | let lex = new Tokenizer()
282 | Assert.True(
283 | match lex.IsReservedKeywordOrLiteralName("from") with
284 | | Option.Some(x) ->
285 | match x(0, 0, [||]) with
286 | | Token.From( _ , _ , [| |]) ->
287 | true
288 | | _ ->
289 | false
290 | | Option.None ->
291 | false
292 | )
293 |
294 | []
295 | let ``Reserved keyword 'global'`` () =
296 | let lex = new Tokenizer()
297 | Assert.True(
298 | match lex.IsReservedKeywordOrLiteralName("global") with
299 | | Option.Some(x) ->
300 | match x(0, 0, [||]) with
301 | | Token.Global( _ , _ , [| |]) ->
302 | true
303 | | _ ->
304 | false
305 | | Option.None ->
306 | false
307 | )
308 |
309 | []
310 | let ``Reserved keyword 'if'`` () =
311 | let lex = new Tokenizer()
312 | Assert.True(
313 | match lex.IsReservedKeywordOrLiteralName("if") with
314 | | Option.Some(x) ->
315 | match x(0, 0, [||]) with
316 | | Token.If( _ , _ , [| |]) ->
317 | true
318 | | _ ->
319 | false
320 | | Option.None ->
321 | false
322 | )
323 |
324 | []
325 | let ``Reserved keyword 'import'`` () =
326 | let lex = new Tokenizer()
327 | Assert.True(
328 | match lex.IsReservedKeywordOrLiteralName("import") with
329 | | Option.Some(x) ->
330 | match x(0, 0, [||]) with
331 | | Token.Import( _ , _ , [| |]) ->
332 | true
333 | | _ ->
334 | false
335 | | Option.None ->
336 | false
337 | )
338 |
339 | []
340 | let ``Reserved keyword 'in'`` () =
341 | let lex = new Tokenizer()
342 | Assert.True(
343 | match lex.IsReservedKeywordOrLiteralName("in") with
344 | | Option.Some(x) ->
345 | match x(0, 0, [||]) with
346 | | Token.In( _ , _ , [| |]) ->
347 | true
348 | | _ ->
349 | false
350 | | Option.None ->
351 | false
352 | )
353 |
354 | []
355 | let ``Reserved keyword 'is'`` () =
356 | let lex = new Tokenizer()
357 | Assert.True(
358 | match lex.IsReservedKeywordOrLiteralName("is") with
359 | | Option.Some(x) ->
360 | match x(0, 0, [||]) with
361 | | Token.Is( _ , _ , [| |]) ->
362 | true
363 | | _ ->
364 | false
365 | | Option.None ->
366 | false
367 | )
368 |
369 | []
370 | let ``Reserved keyword 'lambda'`` () =
371 | let lex = new Tokenizer()
372 | Assert.True(
373 | match lex.IsReservedKeywordOrLiteralName("lambda") with
374 | | Option.Some(x) ->
375 | match x(0, 0, [||]) with
376 | | Token.Lambda( _ , _ , [| |]) ->
377 | true
378 | | _ ->
379 | false
380 | | Option.None ->
381 | false
382 | )
383 |
384 | []
385 | let ``Reserved keyword 'nonlocal'`` () =
386 | let lex = new Tokenizer()
387 | Assert.True(
388 | match lex.IsReservedKeywordOrLiteralName("nonlocal") with
389 | | Option.Some(x) ->
390 | match x(0, 0, [||]) with
391 | | Token.Nonlocal( _ , _ , [| |]) ->
392 | true
393 | | _ ->
394 | false
395 | | Option.None ->
396 | false
397 | )
398 |
399 | []
400 | let ``Reserved keyword 'not'`` () =
401 | let lex = new Tokenizer()
402 | Assert.True(
403 | match lex.IsReservedKeywordOrLiteralName("not") with
404 | | Option.Some(x) ->
405 | match x(0, 0, [||]) with
406 | | Token.Not( _ , _ , [| |]) ->
407 | true
408 | | _ ->
409 | false
410 | | Option.None ->
411 | false
412 | )
413 |
414 | []
415 | let ``Reserved keyword 'or'`` () =
416 | let lex = new Tokenizer()
417 | Assert.True(
418 | match lex.IsReservedKeywordOrLiteralName("or") with
419 | | Option.Some(x) ->
420 | match x(0, 0, [||]) with
421 | | Token.Or( _ , _ , [| |]) ->
422 | true
423 | | _ ->
424 | false
425 | | Option.None ->
426 | false
427 | )
428 |
429 | []
430 | let ``Reserved keyword 'pass'`` () =
431 | let lex = new Tokenizer()
432 | Assert.True(
433 | match lex.IsReservedKeywordOrLiteralName("pass") with
434 | | Option.Some(x) ->
435 | match x(0, 0, [||]) with
436 | | Token.Pass( _ , _ , [| |]) ->
437 | true
438 | | _ ->
439 | false
440 | | Option.None ->
441 | false
442 | )
443 |
444 | []
445 | let ``Reserved keyword 'raise'`` () =
446 | let lex = new Tokenizer()
447 | Assert.True(
448 | match lex.IsReservedKeywordOrLiteralName("raise") with
449 | | Option.Some(x) ->
450 | match x(0, 0, [||]) with
451 | | Token.Raise( _ , _ , [| |]) ->
452 | true
453 | | _ ->
454 | false
455 | | Option.None ->
456 | false
457 | )
458 |
459 | []
460 | let ``Reserved keyword 'return'`` () =
461 | let lex = new Tokenizer()
462 | Assert.True(
463 | match lex.IsReservedKeywordOrLiteralName("return") with
464 | | Option.Some(x) ->
465 | match x(0, 0, [||]) with
466 | | Token.Return( _ , _ , [| |]) ->
467 | true
468 | | _ ->
469 | false
470 | | Option.None ->
471 | false
472 | )
473 |
474 | []
475 | let ``Reserved keyword 'try'`` () =
476 | let lex = new Tokenizer()
477 | Assert.True(
478 | match lex.IsReservedKeywordOrLiteralName("try") with
479 | | Option.Some(x) ->
480 | match x(0, 0, [||]) with
481 | | Token.Try( _ , _ , [| |]) ->
482 | true
483 | | _ ->
484 | false
485 | | Option.None ->
486 | false
487 | )
488 |
489 | []
490 | let ``Reserved keyword 'while'`` () =
491 | let lex = new Tokenizer()
492 | Assert.True(
493 | match lex.IsReservedKeywordOrLiteralName("while") with
494 | | Option.Some(x) ->
495 | match x(0, 0, [||]) with
496 | | Token.While( _ , _ , [| |]) ->
497 | true
498 | | _ ->
499 | false
500 | | Option.None ->
501 | false
502 | )
503 |
504 | []
505 | let ``Reserved keyword 'with'`` () =
506 | let lex = new Tokenizer()
507 | Assert.True(
508 | match lex.IsReservedKeywordOrLiteralName("with") with
509 | | Option.Some(x) ->
510 | match x(0, 0, [||]) with
511 | | Token.With( _ , _ , [| |]) ->
512 | true
513 | | _ ->
514 | false
515 | | Option.None ->
516 | false
517 | )
518 |
519 | []
520 | let ``Reserved keyword 'yield'`` () =
521 | let lex = new Tokenizer()
522 | Assert.True(
523 | match lex.IsReservedKeywordOrLiteralName("yield") with
524 | | Option.Some(x) ->
525 | match x(0, 0, [||]) with
526 | | Token.Yield( _ , _ , [| |]) ->
527 | true
528 | | _ ->
529 | false
530 | | Option.None ->
531 | false
532 | )
533 |
534 | []
535 | let ``Operator or delimiter '+'`` () =
536 | let lex = new Tokenizer()
537 | Assert.True(
538 | match lex.IsOperatorOrDelimiter('+', ' ', ' ') with
539 | | Option.Some(x) ->
540 | match x(0, 0, [||]) with
541 | | Token.Plus( _ , _ , [| |]) ->
542 | true
543 | | _ ->
544 | false
545 | | Option.None ->
546 | false
547 | )
548 |
549 | []
550 | let ``Operator or delimiter '-'`` () =
551 | let lex = new Tokenizer()
552 | Assert.True(
553 | match lex.IsOperatorOrDelimiter('-', ' ', ' ') with
554 | | Option.Some(x) ->
555 | match x(0, 0, [||]) with
556 | | Token.Minus( _ , _ , [| |]) ->
557 | true
558 | | _ ->
559 | false
560 | | Option.None ->
561 | false
562 | )
563 |
564 | []
565 | let ``Operator or delimiter '*'`` () =
566 | let lex = new Tokenizer()
567 | Assert.True(
568 | match lex.IsOperatorOrDelimiter('*', ' ', ' ') with
569 | | Option.Some(x) ->
570 | match x(0, 0, [||]) with
571 | | Token.Mul( _ , _ , [| |]) ->
572 | true
573 | | _ ->
574 | false
575 | | Option.None ->
576 | false
577 | )
578 |
579 | []
580 | let ``Operator or delimiter '**'`` () =
581 | let lex = new Tokenizer()
582 | Assert.True(
583 | match lex.IsOperatorOrDelimiter('*', '*', ' ') with
584 | | Option.Some(x) ->
585 | match x(0, 0, [||]) with
586 | | Token.Power( _ , _ , [| |]) ->
587 | true
588 | | _ ->
589 | false
590 | | Option.None ->
591 | false
592 | )
593 |
594 | []
595 | let ``Operator or delimiter '/'`` () =
596 | let lex = new Tokenizer()
597 | Assert.True(
598 | match lex.IsOperatorOrDelimiter('/', ' ', ' ') with
599 | | Option.Some(x) ->
600 | match x(0, 0, [||]) with
601 | | Token.Div( _ , _ , [| |]) ->
602 | true
603 | | _ ->
604 | false
605 | | Option.None ->
606 | false
607 | )
608 |
609 | []
610 | let ``Operator or delimiter '//'`` () =
611 | let lex = new Tokenizer()
612 | Assert.True(
613 | match lex.IsOperatorOrDelimiter('/', '/', ' ') with
614 | | Option.Some(x) ->
615 | match x(0, 0, [||]) with
616 | | Token.FloorDiv( _ , _ , [| |]) ->
617 | true
618 | | _ ->
619 | false
620 | | Option.None ->
621 | false
622 | )
623 |
624 | []
625 | let ``Operator or delimiter '%'`` () =
626 | let lex = new Tokenizer()
627 | Assert.True(
628 | match lex.IsOperatorOrDelimiter('%', ' ', ' ') with
629 | | Option.Some(x) ->
630 | match x(0, 0, [||]) with
631 | | Token.Modulo( _ , _ , [| |]) ->
632 | true
633 | | _ ->
634 | false
635 | | Option.None ->
636 | false
637 | )
638 |
639 | []
640 | let ``Operator or delimiter Matrice`` () =
641 | let lex = new Tokenizer()
642 | Assert.True(
643 | match lex.IsOperatorOrDelimiter('@', ' ', ' ') with
644 | | Option.Some(x) ->
645 | match x(0, 0, [||]) with
646 | | Token.Matrice( _ , _ , [| |]) ->
647 | true
648 | | _ ->
649 | false
650 | | Option.None ->
651 | false
652 | )
653 |
654 | []
655 | let ``Operator or delimiter '<<'`` () =
656 | let lex = new Tokenizer()
657 | Assert.True(
658 | match lex.IsOperatorOrDelimiter('<', '<', ' ') with
659 | | Option.Some(x) ->
660 | match x(0, 0, [||]) with
661 | | Token.ShiftLeft( _ , _ , [| |]) ->
662 | true
663 | | _ ->
664 | false
665 | | Option.None ->
666 | false
667 | )
668 |
669 | []
670 | let ``Operator or delimiter '>>'`` () =
671 | let lex = new Tokenizer()
672 | Assert.True(
673 | match lex.IsOperatorOrDelimiter('>', '>', ' ') with
674 | | Option.Some(x) ->
675 | match x(0, 0, [||]) with
676 | | Token.ShiftRight( _ , _ , [| |]) ->
677 | true
678 | | _ ->
679 | false
680 | | Option.None ->
681 | false
682 | )
683 |
684 | []
685 | let ``Operator or delimiter '&'`` () =
686 | let lex = new Tokenizer()
687 | Assert.True(
688 | match lex.IsOperatorOrDelimiter('&', ' ', ' ') with
689 | | Option.Some(x) ->
690 | match x(0, 0, [||]) with
691 | | Token.BitAnd( _ , _ , [| |]) ->
692 | true
693 | | _ ->
694 | false
695 | | Option.None ->
696 | false
697 | )
698 |
699 | []
700 | let ``Operator or delimiter '|'`` () =
701 | let lex = new Tokenizer()
702 | Assert.True(
703 | match lex.IsOperatorOrDelimiter('|', ' ', ' ') with
704 | | Option.Some(x) ->
705 | match x(0, 0, [||]) with
706 | | Token.BitOr( _ , _ , [| |]) ->
707 | true
708 | | _ ->
709 | false
710 | | Option.None ->
711 | false
712 | )
713 |
714 | []
715 | let ``Operator or delimiter '^'`` () =
716 | let lex = new Tokenizer()
717 | Assert.True(
718 | match lex.IsOperatorOrDelimiter('^', ' ', ' ') with
719 | | Option.Some(x) ->
720 | match x(0, 0, [||]) with
721 | | Token.BitXor( _ , _ , [| |]) ->
722 | true
723 | | _ ->
724 | false
725 | | Option.None ->
726 | false
727 | )
728 |
729 | []
730 | let ``Operator or delimiter '~'`` () =
731 | let lex = new Tokenizer()
732 | Assert.True(
733 | match lex.IsOperatorOrDelimiter('~', ' ', ' ') with
734 | | Option.Some(x) ->
735 | match x(0, 0, [||]) with
736 | | Token.BitInvert( _ , _ , [| |]) ->
737 | true
738 | | _ ->
739 | false
740 | | Option.None ->
741 | false
742 | )
743 |
744 | []
745 | let ``Operator or delimiter '<'`` () =
746 | let lex = new Tokenizer()
747 | Assert.True(
748 | match lex.IsOperatorOrDelimiter('<', ' ', ' ') with
749 | | Option.Some(x) ->
750 | match x(0, 0, [||]) with
751 | | Token.Less( _ , _ , [| |]) ->
752 | true
753 | | _ ->
754 | false
755 | | Option.None ->
756 | false
757 | )
758 |
759 | []
760 | let ``Operator or delimiter '>'`` () =
761 | let lex = new Tokenizer()
762 | Assert.True(
763 | match lex.IsOperatorOrDelimiter('>', ' ', ' ') with
764 | | Option.Some(x) ->
765 | match x(0, 0, [||]) with
766 | | Token.Greater( _ , _ , [| |]) ->
767 | true
768 | | _ ->
769 | false
770 | | Option.None ->
771 | false
772 | )
773 |
774 | []
775 | let ``Operator or delimiter '<='`` () =
776 | let lex = new Tokenizer()
777 | Assert.True(
778 | match lex.IsOperatorOrDelimiter('<', '=', ' ') with
779 | | Option.Some(x) ->
780 | match x(0, 0, [||]) with
781 | | Token.LessEqual( _ , _ , [| |]) ->
782 | true
783 | | _ ->
784 | false
785 | | Option.None ->
786 | false
787 | )
788 |
789 | []
790 | let ``Operator or delimiter '>='`` () =
791 | let lex = new Tokenizer()
792 | Assert.True(
793 | match lex.IsOperatorOrDelimiter('>', '=', ' ') with
794 | | Option.Some(x) ->
795 | match x(0, 0, [||]) with
796 | | Token.GreaterEqual( _ , _ , [| |]) ->
797 | true
798 | | _ ->
799 | false
800 | | Option.None ->
801 | false
802 | )
803 |
804 | []
805 | let ``Operator or delimiter '=='`` () =
806 | let lex = new Tokenizer()
807 | Assert.True(
808 | match lex.IsOperatorOrDelimiter('=', '=', ' ') with
809 | | Option.Some(x) ->
810 | match x(0, 0, [||]) with
811 | | Token.Equal( _ , _ , [| |]) ->
812 | true
813 | | _ ->
814 | false
815 | | Option.None ->
816 | false
817 | )
818 |
819 | []
820 | let ``Operator or delimiter '<>'`` () =
821 | let lex = new Tokenizer()
822 | Assert.True(
823 | match lex.IsOperatorOrDelimiter('<', '>', ' ') with
824 | | Option.Some(x) ->
825 | match x(0, 0, [||]) with
826 | | Token.NotEqual( _ , _ , [| |]) ->
827 | true
828 | | _ ->
829 | false
830 | | Option.None ->
831 | false
832 | )
833 |
834 | []
835 | let ``Operator or delimiter '!='`` () =
836 | let lex = new Tokenizer()
837 | Assert.True(
838 | match lex.IsOperatorOrDelimiter('!', '=', ' ') with
839 | | Option.Some(x) ->
840 | match x(0, 0, [||]) with
841 | | Token.NotEqual( _ , _ , [| |]) ->
842 | true
843 | | _ ->
844 | false
845 | | Option.None ->
846 | false
847 | )
848 |
849 | []
850 | let ``Operator or delimiter '('`` () =
851 | let lex = new Tokenizer()
852 | Assert.True(
853 | match lex.IsOperatorOrDelimiter('(', ' ', ' ') with
854 | | Option.Some(x) ->
855 | match x(0, 0, [||]) with
856 | | Token.LeftParen( _ , _ , [| |]) ->
857 | true
858 | | _ ->
859 | false
860 | | Option.None ->
861 | false
862 | )
863 |
864 | []
865 | let ``Operator or delimiter ')'`` () =
866 | let lex = new Tokenizer()
867 | Assert.True(
868 | match lex.IsOperatorOrDelimiter(')', ' ', ' ') with
869 | | Option.Some(x) ->
870 | match x(0, 0, [||]) with
871 | | Token.RightParen( _ , _ , [| |]) ->
872 | true
873 | | _ ->
874 | false
875 | | Option.None ->
876 | false
877 | )
878 |
879 | []
880 | let ``Operator or delimiter '['`` () =
881 | let lex = new Tokenizer()
882 | Assert.True(
883 | match lex.IsOperatorOrDelimiter('[', ' ', ' ') with
884 | | Option.Some(x) ->
885 | match x(0, 0, [||]) with
886 | | Token.LeftBracket( _ , _ , [| |]) ->
887 | true
888 | | _ ->
889 | false
890 | | Option.None ->
891 | false
892 | )
893 |
894 | []
895 | let ``Operator or delimiter ']'`` () =
896 | let lex = new Tokenizer()
897 | Assert.True(
898 | match lex.IsOperatorOrDelimiter(']', ' ', ' ') with
899 | | Option.Some(x) ->
900 | match x(0, 0, [||]) with
901 | | Token.RightBracket( _ , _ , [| |]) ->
902 | true
903 | | _ ->
904 | false
905 | | Option.None ->
906 | false
907 | )
908 |
909 | []
910 | let ``Operator or delimiter '{'`` () =
911 | let lex = new Tokenizer()
912 | Assert.True(
913 | match lex.IsOperatorOrDelimiter('{', ' ', ' ') with
914 | | Option.Some(x) ->
915 | match x(0, 0, [||]) with
916 | | Token.LeftCurly( _ , _ , [| |]) ->
917 | true
918 | | _ ->
919 | false
920 | | Option.None ->
921 | false
922 | )
923 |
924 | []
925 | let ``Operator or delimiter '}'`` () =
926 | let lex = new Tokenizer()
927 | Assert.True(
928 | match lex.IsOperatorOrDelimiter('}', ' ', ' ') with
929 | | Option.Some(x) ->
930 | match x(0, 0, [||]) with
931 | | Token.RightCurly( _ , _ , [| |]) ->
932 | true
933 | | _ ->
934 | false
935 | | Option.None ->
936 | false
937 | )
938 |
939 | []
940 | let ``Operator or delimiter ','`` () =
941 | let lex = new Tokenizer()
942 | Assert.True(
943 | match lex.IsOperatorOrDelimiter(',', ' ', ' ') with
944 | | Option.Some(x) ->
945 | match x(0, 0, [||]) with
946 | | Token.Comma( _ , _ , [| |]) ->
947 | true
948 | | _ ->
949 | false
950 | | Option.None ->
951 | false
952 | )
953 |
954 | []
955 | let ``Operator or delimiter ':'`` () =
956 | let lex = new Tokenizer()
957 | Assert.True(
958 | match lex.IsOperatorOrDelimiter(':', ' ', ' ') with
959 | | Option.Some(x) ->
960 | match x(0, 0, [||]) with
961 | | Token.Colon( _ , _ , [| |]) ->
962 | true
963 | | _ ->
964 | false
965 | | Option.None ->
966 | false
967 | )
968 |
969 | []
970 | let ``Operator or delimiter ':='`` () =
971 | let lex = new Tokenizer()
972 | Assert.True(
973 | match lex.IsOperatorOrDelimiter(':', '=', ' ') with
974 | | Option.Some(x) ->
975 | match x(0, 0, [||]) with
976 | | Token.ColonAssign( _ , _ , [| |]) ->
977 | true
978 | | _ ->
979 | false
980 | | Option.None ->
981 | false
982 | )
983 |
984 | []
985 | let ``Operator or delimiter dot`` () =
986 | let lex = new Tokenizer()
987 | Assert.True(
988 | match lex.IsOperatorOrDelimiter('.', ' ', ' ') with
989 | | Option.Some(x) ->
990 | match x(0, 0, [||]) with
991 | | Token.Dot( _ , _ , [| |]) ->
992 | true
993 | | _ ->
994 | false
995 | | Option.None ->
996 | false
997 | )
998 |
999 | []
1000 | let ``Operator or delimiter '...'`` () =
1001 | let lex = new Tokenizer()
1002 | Assert.True(
1003 | match lex.IsOperatorOrDelimiter('.', '.', '.') with
1004 | | Option.Some(x) ->
1005 | match x(0, 0, [||]) with
1006 | | Token.Elipsis( _ , _ , [| |]) ->
1007 | true
1008 | | _ ->
1009 | false
1010 | | Option.None ->
1011 | false
1012 | )
1013 |
1014 | []
1015 | let ``Operator or delimiter ';'`` () =
1016 | let lex = new Tokenizer()
1017 | Assert.True(
1018 | match lex.IsOperatorOrDelimiter(';', ' ', ' ') with
1019 | | Option.Some(x) ->
1020 | match x(0, 0, [||]) with
1021 | | Token.SemiColon( _ , _ , [| |]) ->
1022 | true
1023 | | _ ->
1024 | false
1025 | | Option.None ->
1026 | false
1027 | )
1028 |
1029 | []
1030 | let ``Operator or delimiter '='`` () =
1031 | let lex = new Tokenizer()
1032 | Assert.True(
1033 | match lex.IsOperatorOrDelimiter('=', ' ', ' ') with
1034 | | Option.Some(x) ->
1035 | match x(0, 0, [||]) with
1036 | | Token.Assign( _ , _ , [| |]) ->
1037 | true
1038 | | _ ->
1039 | false
1040 | | Option.None ->
1041 | false
1042 | )
1043 |
1044 | []
1045 | let ``Operator or delimiter '->'`` () =
1046 | let lex = new Tokenizer()
1047 | Assert.True(
1048 | match lex.IsOperatorOrDelimiter('-', '>', ' ') with
1049 | | Option.Some(x) ->
1050 | match x(0, 0, [||]) with
1051 | | Token.Ptr( _ , _ , [| |]) ->
1052 | true
1053 | | _ ->
1054 | false
1055 | | Option.None ->
1056 | false
1057 | )
1058 |
1059 | []
1060 | let ``Operator or delimiter '+='`` () =
1061 | let lex = new Tokenizer()
1062 | Assert.True(
1063 | match lex.IsOperatorOrDelimiter('+', '=', ' ') with
1064 | | Option.Some(x) ->
1065 | match x(0, 0, [||]) with
1066 | | Token.PlusAssign( _ , _ , [| |]) ->
1067 | true
1068 | | _ ->
1069 | false
1070 | | Option.None ->
1071 | false
1072 | )
1073 |
1074 | []
1075 | let ``Operator or delimiter '-='`` () =
1076 | let lex = new Tokenizer()
1077 | Assert.True(
1078 | match lex.IsOperatorOrDelimiter('-', '=', ' ') with
1079 | | Option.Some(x) ->
1080 | match x(0, 0, [||]) with
1081 | | Token.MinusAssign( _ , _ , [| |]) ->
1082 | true
1083 | | _ ->
1084 | false
1085 | | Option.None ->
1086 | false
1087 | )
1088 |
1089 | []
1090 | let ``Operator or delimiter '*='`` () =
1091 | let lex = new Tokenizer()
1092 | Assert.True(
1093 | match lex.IsOperatorOrDelimiter('*', '=', ' ') with
1094 | | Option.Some(x) ->
1095 | match x(0, 0, [||]) with
1096 | | Token.MulAssign( _ , _ , [| |]) ->
1097 | true
1098 | | _ ->
1099 | false
1100 | | Option.None ->
1101 | false
1102 | )
1103 |
1104 | []
1105 | let ``Operator or delimiter '/='`` () =
1106 | let lex = new Tokenizer()
1107 | Assert.True(
1108 | match lex.IsOperatorOrDelimiter('/', '=', ' ') with
1109 | | Option.Some(x) ->
1110 | match x(0, 0, [||]) with
1111 | | Token.DivAssign( _ , _ , [| |]) ->
1112 | true
1113 | | _ ->
1114 | false
1115 | | Option.None ->
1116 | false
1117 | )
1118 |
1119 | []
1120 | let ``Operator or delimiter '//='`` () =
1121 | let lex = new Tokenizer()
1122 | Assert.True(
1123 | match lex.IsOperatorOrDelimiter('/', '/', '=') with
1124 | | Option.Some(x) ->
1125 | match x(0, 0, [||]) with
1126 | | Token.FloorDivAssign( _ , _ , [| |]) ->
1127 | true
1128 | | _ ->
1129 | false
1130 | | Option.None ->
1131 | false
1132 | )
1133 |
1134 | []
1135 | let ``Operator or delimiter '%='`` () =
1136 | let lex = new Tokenizer()
1137 | Assert.True(
1138 | match lex.IsOperatorOrDelimiter('%', '=', ' ') with
1139 | | Option.Some(x) ->
1140 | match x(0, 0, [||]) with
1141 | | Token.ModuloAssign( _ , _ , [| |]) ->
1142 | true
1143 | | _ ->
1144 | false
1145 | | Option.None ->
1146 | false
1147 | )
1148 |
1149 | []
1150 | let ``Operator or delimiter Matrice Assign`` () =
1151 | let lex = new Tokenizer()
1152 | Assert.True(
1153 | match lex.IsOperatorOrDelimiter('@', '=', ' ') with
1154 | | Option.Some(x) ->
1155 | match x(0, 0, [||]) with
1156 | | Token.MatriceAssign( _ , _ , [| |]) ->
1157 | true
1158 | | _ ->
1159 | false
1160 | | Option.None ->
1161 | false
1162 | )
1163 |
1164 | []
1165 | let ``Operator or delimiter '&='`` () =
1166 | let lex = new Tokenizer()
1167 | Assert.True(
1168 | match lex.IsOperatorOrDelimiter('&', '=', ' ') with
1169 | | Option.Some(x) ->
1170 | match x(0, 0, [||]) with
1171 | | Token.BitAndAssign( _ , _ , [| |]) ->
1172 | true
1173 | | _ ->
1174 | false
1175 | | Option.None ->
1176 | false
1177 | )
1178 |
1179 | []
1180 | let ``Operator or delimiter '|='`` () =
1181 | let lex = new Tokenizer()
1182 | Assert.True(
1183 | match lex.IsOperatorOrDelimiter('|', '=', ' ') with
1184 | | Option.Some(x) ->
1185 | match x(0, 0, [||]) with
1186 | | Token.BitOrAssign( _ , _ , [| |]) ->
1187 | true
1188 | | _ ->
1189 | false
1190 | | Option.None ->
1191 | false
1192 | )
1193 |
1194 | []
1195 | let ``Operator or delimiter '^='`` () =
1196 | let lex = new Tokenizer()
1197 | Assert.True(
1198 | match lex.IsOperatorOrDelimiter('^', '=', ' ') with
1199 | | Option.Some(x) ->
1200 | match x(0, 0, [||]) with
1201 | | Token.BitXorAssign( _ , _ , [| |]) ->
1202 | true
1203 | | _ ->
1204 | false
1205 | | Option.None ->
1206 | false
1207 | )
1208 |
1209 | []
1210 | let ``Operator or delimiter '<<='`` () =
1211 | let lex = new Tokenizer()
1212 | Assert.True(
1213 | match lex.IsOperatorOrDelimiter('<', '<', '=') with
1214 | | Option.Some(x) ->
1215 | match x(0, 0, [||]) with
1216 | | Token.ShiftLeftAssign( _ , _ , [| |]) ->
1217 | true
1218 | | _ ->
1219 | false
1220 | | Option.None ->
1221 | false
1222 | )
1223 |
1224 | []
1225 | let ``Operator or delimiter '>>='`` () =
1226 | let lex = new Tokenizer()
1227 | Assert.True(
1228 | match lex.IsOperatorOrDelimiter('>', '>', '=') with
1229 | | Option.Some(x) ->
1230 | match x(0, 0, [||]) with
1231 | | Token.ShiftRightAssign( _ , _ , [| |]) ->
1232 | true
1233 | | _ ->
1234 | false
1235 | | Option.None ->
1236 | false
1237 | )
1238 |
1239 | []
1240 | let ``Operator or delimiter '**='`` () =
1241 | let lex = new Tokenizer()
1242 | Assert.True(
1243 | match lex.IsOperatorOrDelimiter('*', '*', '=') with
1244 | | Option.Some(x) ->
1245 | match x(0, 0, [||]) with
1246 | | Token.PowerAssign( _ , _ , [| |]) ->
1247 | true
1248 | | _ ->
1249 | false
1250 | | Option.None ->
1251 | false
1252 | )
1253 |
--------------------------------------------------------------------------------
/_config.yml:
--------------------------------------------------------------------------------
1 | theme: jekyll-theme-cayman
--------------------------------------------------------------------------------