├── .editorconfig ├── .github └── workflows │ └── node.js.yml ├── .gitignore ├── .vscode └── settings.json ├── CODE-OF-CONDUCT.md ├── ISSUE_TEMPLATE ├── LICENSE ├── README.md ├── grammars ├── csharp.tmLanguage └── csharp.tmLanguage.cson ├── gulpfile.mjs ├── nodemon.json ├── package-lock.json ├── package.json ├── src ├── csharp.tmLanguage.yml └── syntax.md ├── test ├── attribute.tests.ts ├── class.tests.ts ├── comment.tests.ts ├── constructor.tests.ts ├── delegate.tests.ts ├── destructor.tests.ts ├── enum.tests.ts ├── event.tests.ts ├── expressions.tests.ts ├── extern-alias.tests.ts ├── field.tests.ts ├── incomplete-code.tests.ts ├── indexer.tests.ts ├── interface.tests.ts ├── interpolated-string.tests.ts ├── label.tests.ts ├── literals.tests.ts ├── local.tests.ts ├── method.tests.ts ├── namespace.tests.ts ├── operator.tests.ts ├── patterns.tests.ts ├── preprocessor.tests.ts ├── property.tests.ts ├── record.tests.ts ├── statements.tests.ts ├── struct.tests.ts ├── top-level-statements.tests.ts ├── tuple.tests.ts ├── type-name.tests.ts ├── using-directive.tests.ts ├── utils │ └── tokenize.ts ├── verbatim-indentifier.tests.ts └── xml-doc-comment.tests.ts ├── tsconfig.json └── wallaby.conf.js /.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig is awesome: http://EditorConfig.org 2 | 3 | # top-most EditorConfig file 4 | root = true 5 | 6 | [{.travis.yml},package.json] 7 | indent_style = space 8 | indent_size = 2 9 | 10 | [*.json] 11 | indent_style = space 12 | indent_size = 2 -------------------------------------------------------------------------------- /.github/workflows/node.js.yml: -------------------------------------------------------------------------------- 1 | # This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions 3 | 4 | name: CI 5 | 6 | on: 7 | push: 8 | branches: [ main ] 9 | pull_request: 10 | branches: [ main ] 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ubuntu-latest 16 | 17 | steps: 18 | - uses: actions/checkout@v2 19 | - name: Use Node.js 18.x 20 | uses: actions/setup-node@v1 21 | with: 22 | node-version: '18.x' 23 | - run: npm ci # This will automatically build and run tests because the prepublish step is to run `gulp` 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | out/ 3 | .vscode/ 4 | grammars/*.json 5 | npm-debug.log* 6 | copy.sh 7 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "mochaExplorer.files": "test/**/*.ts", 3 | "mochaExplorer.require": "ts-node/register" 4 | } -------------------------------------------------------------------------------- /CODE-OF-CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | This project has adopted the code of conduct defined by the Contributor Covenant 4 | to clarify expected behavior in our community. 5 | 6 | For more information, see the [.NET Foundation Code of Conduct](https://dotnetfoundation.org/code-of-conduct). 7 | -------------------------------------------------------------------------------- /ISSUE_TEMPLATE: -------------------------------------------------------------------------------- 1 | ## Details 2 | 3 | What editor are you seeing the problem in? (e.g. Atom, Visual Studio Code, etc.) 4 | 5 | What version of the editor are you using? 6 | 7 | What color theme are you using? 8 | 9 | ## Repro 10 | 11 | Please provide a code example and (optionally) a screenshot demonstrating the problem. -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 .NET Foundation 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Development 2 | 3 | To **build and test** install Node.js do the following: 4 | 5 | * Run `npm install` to install any dependencies. 6 | * Run `npm run compile` to build and run tests. 7 | 8 | Output grammars are output in the `grammars\` directory. 9 | 10 | On Windows you may see a node-gyp error - [follow the instrutions here to resolve it](https://github.com/nodejs/node-gyp/blob/master/README.md). 11 | 12 | ## Supported outputs 13 | 14 | * `grammars\csharp.cson` - for Atom 15 | * `grammars\csharp.tmLanguage` - TextMate grammar (XML plist) 16 | 17 | 18 | ## Releasing 19 | 20 | Tags on this repo get automatically published as a GitHub release and an NPM package through Travis CI. 21 | -------------------------------------------------------------------------------- /gulpfile.mjs: -------------------------------------------------------------------------------- 1 | import { task, src, dest, series, parallel } from "gulp"; 2 | import mocha from "gulp-mocha"; 3 | import json2cson from "gulp-json2cson"; 4 | import yaml from "gulp-yaml"; 5 | import gulpTypescript from 'gulp-typescript'; 6 | const { createProject } = gulpTypescript; 7 | import { load } from "js-yaml"; 8 | import plist from 'plist'; 9 | const { build } = plist; 10 | import { readFileSync, existsSync, mkdirSync, writeFileSync } from "fs"; 11 | import { join } from "path"; 12 | import { exec } from "child_process"; 13 | 14 | const inputGrammar = "src/csharp.tmLanguage.yml"; 15 | const grammarsDirectory = "grammars/"; 16 | const jsOut = "out/"; 17 | 18 | 19 | function handleError(err) { 20 | console.log(err.toString()); 21 | process.exit(-1); 22 | } 23 | 24 | task('buildTmLanguage', done => { 25 | const text = readFileSync(inputGrammar); 26 | const jsonData = load(text); 27 | const plistData = build(jsonData); 28 | 29 | if (!existsSync(grammarsDirectory)) { 30 | mkdirSync(grammarsDirectory); 31 | } 32 | 33 | writeFileSync(join(grammarsDirectory, 'csharp.tmLanguage'), plistData); 34 | 35 | done(); 36 | }); 37 | 38 | task('buildVSCode', done => { 39 | const text = readFileSync(inputGrammar); 40 | const jsonData = load(text); 41 | 42 | if (!existsSync(grammarsDirectory)) { 43 | mkdirSync(grammarsDirectory); 44 | } 45 | 46 | // These fields aren't used. 47 | jsonData.uuid = undefined; 48 | jsonData.fileTypes = undefined; 49 | 50 | // Get the SHA of the last commit. 51 | exec("git rev-parse HEAD", (err, stdout, stderr) => { 52 | if (err) { 53 | handleErr(err); 54 | } 55 | 56 | const commitSha = stdout.trim(); 57 | 58 | // Add the additional properties used in the VSCode repo. 59 | const enhancedJson = { 60 | "information_for_contributors": [ 61 | "This file has been converted from https://github.com/dotnet/csharp-tmLanguage/blob/main/grammars/csharp.tmLanguage", 62 | "If you want to provide a fix or improvement, please create a pull request against the original repository.", 63 | "Once accepted there, we are happy to receive an update request." 64 | ], 65 | "version": `https://github.com/dotnet/csharp-tmLanguage/commit/${commitSha}`, 66 | ...jsonData 67 | } 68 | 69 | writeFileSync(join(grammarsDirectory, 'csharp.tmLanguage.json'), JSON.stringify(enhancedJson, null, '\t')); 70 | 71 | done(); 72 | }); 73 | }); 74 | 75 | task('buildAtom', () => { 76 | return src(inputGrammar) 77 | .pipe(yaml()) 78 | .pipe(json2cson()) 79 | .pipe(dest(grammarsDirectory)) 80 | .on("error", handleError); 81 | }); 82 | 83 | task('compile', () => { 84 | const tsProject = createProject("./tsconfig.json"); 85 | return tsProject.src() 86 | .pipe(tsProject()) 87 | .pipe(dest(jsOut)); 88 | }); 89 | 90 | task('test', series('compile', done => { 91 | const result = src(jsOut + "test/**/*.tests.js") 92 | .pipe(mocha()) 93 | .on("error", handleError); 94 | 95 | done(); 96 | 97 | return result; 98 | })); 99 | 100 | task('default', 101 | series( 102 | parallel('buildAtom', 'buildVSCode', 'buildTmLanguage'), 103 | 'test')); 104 | -------------------------------------------------------------------------------- /nodemon.json: -------------------------------------------------------------------------------- 1 | { 2 | "verbose": false, 3 | "watch": [ 4 | "src/*.yml" 5 | ], 6 | "ext": "yml" 7 | } -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "csharp-tmlanguage", 3 | "version": "0.1.0", 4 | "description": "Textmate grammar for C# with outputs for VSCode, Atom and TextMate.", 5 | "displayName": "csharp-tmLanguage", 6 | "license": "MIT", 7 | "icon": "images/csharpIcon.png", 8 | "bugs": { 9 | "url": "https://github.com/dotnet/csharp-tmLanguage/issues" 10 | }, 11 | "contributors": [ 12 | { 13 | "name": "Dustin Campbell" 14 | }, 15 | { 16 | "name": "Ivan Zlatev" 17 | }, 18 | { 19 | "name": "Damien Guard" 20 | } 21 | ], 22 | "scripts": { 23 | "compile": "gulp", 24 | "test": "gulp", 25 | "prepublish": "gulp", 26 | "watch": "nodemon --exec 'npx gulp buildTmLanguage'" 27 | }, 28 | "devDependencies": { 29 | "@types/chai": "4.2.19", 30 | "@types/gulp": "4.0.8", 31 | "@types/mocha": "8.2.2", 32 | "chai": "4.3.4", 33 | "gulp": "5.0.0", 34 | "gulp-json2cson": "2.0.0", 35 | "gulp-mocha": "10.0.1", 36 | "gulp-typescript": "6.0.0-alpha.1", 37 | "gulp-yaml": "2.0.4", 38 | "js-yaml": "4.1.0", 39 | "mocha": "10.4.0", 40 | "nodemon": "2.0.20", 41 | "plist": "3.0.5", 42 | "ts-node": "10.0.0", 43 | "typescript": "4.3.5", 44 | "vscode-oniguruma": "1.5.1", 45 | "vscode-textmate": "5.4.0" 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/syntax.md: -------------------------------------------------------------------------------- 1 | ## Important regular expressions: 2 | 3 | #### Identifier 4 | 5 | * Expression: `[_[:alpha:]][_[:alnum:]]*` 6 | * Matches: `_`, `Ident42` 7 | 8 | #### Type name 9 | 10 | ``` 11 | (? 12 | (?: 13 | (?:ref\s+)? # only in certain place with ref local/return 14 | (?: 15 | (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification 16 | (? # identifier + type arguments (if any) 17 | \g\s* 18 | (?\s*<(?:[^<>]|\g)+>\s*)? 19 | ) 20 | (?:\s*\.\s*\g)* | # Are there any more names being dotted into? 21 | (?\s*\((?:[^\(\)]|\g)+\)) 22 | ) 23 | (?:\s*\*\s*)* # pointer suffix? 24 | (?:\s*\?\s*)? # nullable suffix? 25 | (?:\s* # array suffix? 26 | \[ 27 | (?:\s*,\s*)* # commata for multi-dimensional arrays 28 | \] 29 | \s* 30 | (?:\?)? # arrays can be nullable reference types, they need a nullable suffix 31 | \s* 32 | )* 33 | ) 34 | ) 35 | ``` -------------------------------------------------------------------------------- /test/attribute.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Token } from './utils/tokenize'; 8 | 9 | describe("Attributes", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Attributes", () => { 13 | it("global attribute", async () => { 14 | 15 | const input = `[Foo]`; 16 | const tokens = await tokenize(input); 17 | 18 | tokens.should.deep.equal([ 19 | Token.Punctuation.OpenBracket, 20 | Token.Type("Foo"), 21 | Token.Punctuation.CloseBracket]); 22 | }); 23 | 24 | it("global attribute with specifier", async () => { 25 | 26 | const input = `[assembly: Foo]`; 27 | const tokens = await tokenize(input); 28 | 29 | tokens.should.deep.equal([ 30 | Token.Punctuation.OpenBracket, 31 | Token.Keyword.AttributeSpecifier("assembly"), 32 | Token.Punctuation.Colon, 33 | Token.Type("Foo"), 34 | Token.Punctuation.CloseBracket]); 35 | }); 36 | 37 | it("Two global attributes in same section with specifier", async () => { 38 | 39 | const input = `[module: Foo, Bar]`; 40 | const tokens = await tokenize(input); 41 | 42 | tokens.should.deep.equal([ 43 | Token.Punctuation.OpenBracket, 44 | Token.Keyword.AttributeSpecifier("module"), 45 | Token.Punctuation.Colon, 46 | Token.Type("Foo"), 47 | Token.Punctuation.Comma, 48 | Token.Type("Bar"), 49 | Token.Punctuation.CloseBracket]); 50 | }); 51 | 52 | it("Two global attributes in same section with specifier and empty argument lists", async () => { 53 | 54 | const input = `[module: Foo(), Bar()]`; 55 | const tokens = await tokenize(input); 56 | 57 | tokens.should.deep.equal([ 58 | Token.Punctuation.OpenBracket, 59 | Token.Keyword.AttributeSpecifier("module"), 60 | Token.Punctuation.Colon, 61 | Token.Type("Foo"), 62 | Token.Punctuation.OpenParen, 63 | Token.Punctuation.CloseParen, 64 | Token.Punctuation.Comma, 65 | Token.Type("Bar"), 66 | Token.Punctuation.OpenParen, 67 | Token.Punctuation.CloseParen, 68 | Token.Punctuation.CloseBracket]); 69 | }); 70 | 71 | it("Global attribute with one argument", async () => { 72 | 73 | const input = `[Foo(true)]`; 74 | const tokens = await tokenize(input); 75 | 76 | tokens.should.deep.equal([ 77 | Token.Punctuation.OpenBracket, 78 | Token.Type("Foo"), 79 | Token.Punctuation.OpenParen, 80 | Token.Literal.Boolean.True, 81 | Token.Punctuation.CloseParen, 82 | Token.Punctuation.CloseBracket]); 83 | }); 84 | 85 | it("Global attribute with two arguments", async () => { 86 | 87 | const input = `[Foo(true, 42)]`; 88 | const tokens = await tokenize(input); 89 | 90 | tokens.should.deep.equal([ 91 | Token.Punctuation.OpenBracket, 92 | Token.Type("Foo"), 93 | Token.Punctuation.OpenParen, 94 | Token.Literal.Boolean.True, 95 | Token.Punctuation.Comma, 96 | Token.Literal.Numeric.Decimal("42"), 97 | Token.Punctuation.CloseParen, 98 | Token.Punctuation.CloseBracket]); 99 | }); 100 | 101 | it("Global attribute with three arguments", async () => { 102 | 103 | const input = `[Foo(true, 42, "text")]`; 104 | const tokens = await tokenize(input); 105 | 106 | tokens.should.deep.equal([ 107 | Token.Punctuation.OpenBracket, 108 | Token.Type("Foo"), 109 | Token.Punctuation.OpenParen, 110 | Token.Literal.Boolean.True, 111 | Token.Punctuation.Comma, 112 | Token.Literal.Numeric.Decimal("42"), 113 | Token.Punctuation.Comma, 114 | Token.Punctuation.String.Begin, 115 | Token.Literal.String("text"), 116 | Token.Punctuation.String.End, 117 | Token.Punctuation.CloseParen, 118 | Token.Punctuation.CloseBracket]); 119 | }); 120 | 121 | it("Global attribute with named argument", async () => { 122 | 123 | const input = `[Foo(Bar = 42)]`; 124 | const tokens = await tokenize(input); 125 | 126 | tokens.should.deep.equal([ 127 | Token.Punctuation.OpenBracket, 128 | Token.Type("Foo"), 129 | Token.Punctuation.OpenParen, 130 | Token.Identifier.PropertyName("Bar"), 131 | Token.Operator.Assignment, 132 | Token.Literal.Numeric.Decimal("42"), 133 | Token.Punctuation.CloseParen, 134 | Token.Punctuation.CloseBracket]); 135 | }); 136 | 137 | it("Global attribute with one positional argument and one named argument", async () => { 138 | 139 | const input = `[Foo(true, Bar = 42)]`; 140 | const tokens = await tokenize(input); 141 | 142 | tokens.should.deep.equal([ 143 | Token.Punctuation.OpenBracket, 144 | Token.Type("Foo"), 145 | Token.Punctuation.OpenParen, 146 | Token.Literal.Boolean.True, 147 | Token.Punctuation.Comma, 148 | Token.Identifier.PropertyName("Bar"), 149 | Token.Operator.Assignment, 150 | Token.Literal.Numeric.Decimal("42"), 151 | Token.Punctuation.CloseParen, 152 | Token.Punctuation.CloseBracket]); 153 | }); 154 | 155 | it("Global attribute with specifier, one positional argument, and two named arguments", async () => { 156 | 157 | const input = `[module: Foo(true, Bar = 42, Baz = "hello")]`; 158 | const tokens = await tokenize(input); 159 | 160 | tokens.should.deep.equal([ 161 | Token.Punctuation.OpenBracket, 162 | Token.Keyword.AttributeSpecifier("module"), 163 | Token.Punctuation.Colon, 164 | Token.Type("Foo"), 165 | Token.Punctuation.OpenParen, 166 | Token.Literal.Boolean.True, 167 | Token.Punctuation.Comma, 168 | Token.Identifier.PropertyName("Bar"), 169 | Token.Operator.Assignment, 170 | Token.Literal.Numeric.Decimal("42"), 171 | Token.Punctuation.Comma, 172 | Token.Identifier.PropertyName("Baz"), 173 | Token.Operator.Assignment, 174 | Token.Punctuation.String.Begin, 175 | Token.Literal.String("hello"), 176 | Token.Punctuation.String.End, 177 | Token.Punctuation.CloseParen, 178 | Token.Punctuation.CloseBracket]); 179 | }); 180 | 181 | it("Generic attributes should be highlighted single type parameter", async () => { 182 | 183 | const input = `[Foo]`; 184 | const tokens = await tokenize(input); 185 | 186 | tokens.should.deep.equal([ 187 | Token.Punctuation.OpenBracket, 188 | Token.Type("Foo"), 189 | Token.Punctuation.TypeParameter.Begin, 190 | Token.Type("T1"), 191 | Token.Punctuation.TypeParameter.End, 192 | Token.Punctuation.CloseBracket]); 193 | }); 194 | 195 | it("Generic attributes should be highlighted multiple type parameters", async () => { 196 | 197 | const input = `[Foo]`; 198 | const tokens = await tokenize(input); 199 | 200 | tokens.should.deep.equal([ 201 | Token.Punctuation.OpenBracket, 202 | Token.Type("Foo"), 203 | Token.Punctuation.TypeParameter.Begin, 204 | Token.Type("T1"), 205 | Token.Punctuation.Comma, 206 | Token.Type("T2"), 207 | Token.Punctuation.TypeParameter.End, 208 | Token.Punctuation.CloseBracket]); 209 | }); 210 | 211 | it("Generic attributes should be highlighted multiple type parameters with regular arguments", async () => { 212 | 213 | const input = `[Foo(true)]`; 214 | const tokens = await tokenize(input); 215 | 216 | tokens.should.deep.equal([ 217 | Token.Punctuation.OpenBracket, 218 | Token.Type("Foo"), 219 | Token.Punctuation.TypeParameter.Begin, 220 | Token.Type("T1"), 221 | Token.Punctuation.Comma, 222 | Token.Type("T2"), 223 | Token.Punctuation.TypeParameter.End, 224 | Token.Punctuation.OpenParen, 225 | Token.Literal.Boolean.True, 226 | Token.Punctuation.CloseParen, 227 | Token.Punctuation.CloseBracket]); 228 | }); 229 | 230 | it("Generic attributes should be highlighted empty", async () => { 231 | 232 | const input = `[Foo<>]`; 233 | const tokens = await tokenize(input); 234 | 235 | tokens.should.deep.equal([ 236 | Token.Punctuation.OpenBracket, 237 | Token.Type("Foo"), 238 | Token.Punctuation.TypeParameter.Begin, 239 | Token.Punctuation.TypeParameter.End, 240 | Token.Punctuation.CloseBracket]); 241 | }); 242 | 243 | it("Generic attributes should be highlighted empty with comma", async () => { 244 | 245 | const input = `[Foo<,>]`; 246 | const tokens = await tokenize(input); 247 | 248 | tokens.should.deep.equal([ 249 | Token.Punctuation.OpenBracket, 250 | Token.Type("Foo"), 251 | Token.Punctuation.TypeParameter.Begin, 252 | Token.Punctuation.Comma, 253 | Token.Punctuation.TypeParameter.End, 254 | Token.Punctuation.CloseBracket]); 255 | }); 256 | }); 257 | }); 258 | -------------------------------------------------------------------------------- /test/constructor.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Input, Token } from './utils/tokenize'; 8 | 9 | describe("Constructors", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Constructors", () => { 13 | it("instance constructor with no parameters", async () => { 14 | const input = Input.InClass(`TestClass() { }`); 15 | const tokens = await tokenize(input); 16 | 17 | tokens.should.deep.equal([ 18 | Token.Identifier.MethodName("TestClass"), 19 | Token.Punctuation.OpenParen, 20 | Token.Punctuation.CloseParen, 21 | Token.Punctuation.OpenBrace, 22 | Token.Punctuation.CloseBrace]); 23 | }); 24 | 25 | it("public instance constructor with no parameters", async () => { 26 | const input = Input.InClass(`public TestClass() { }`); 27 | const tokens = await tokenize(input); 28 | 29 | tokens.should.deep.equal([ 30 | Token.Keyword.Modifier.Public, 31 | Token.Identifier.MethodName("TestClass"), 32 | Token.Punctuation.OpenParen, 33 | Token.Punctuation.CloseParen, 34 | Token.Punctuation.OpenBrace, 35 | Token.Punctuation.CloseBrace]); 36 | }); 37 | 38 | it("public instance constructor with one parameter", async () => { 39 | const input = Input.InClass(`public TestClass(int x) { }`); 40 | const tokens = await tokenize(input); 41 | 42 | tokens.should.deep.equal([ 43 | Token.Keyword.Modifier.Public, 44 | Token.Identifier.MethodName("TestClass"), 45 | Token.Punctuation.OpenParen, 46 | Token.PrimitiveType.Int, 47 | Token.Identifier.ParameterName("x"), 48 | Token.Punctuation.CloseParen, 49 | Token.Punctuation.OpenBrace, 50 | Token.Punctuation.CloseBrace]); 51 | }); 52 | 53 | it("public instance constructor with one ref parameter", async () => { 54 | const input = Input.InClass(`public TestClass(ref int x) { }`); 55 | const tokens = await tokenize(input); 56 | 57 | tokens.should.deep.equal([ 58 | Token.Keyword.Modifier.Public, 59 | Token.Identifier.MethodName("TestClass"), 60 | Token.Punctuation.OpenParen, 61 | Token.Keyword.Modifier.Ref, 62 | Token.PrimitiveType.Int, 63 | Token.Identifier.ParameterName("x"), 64 | Token.Punctuation.CloseParen, 65 | Token.Punctuation.OpenBrace, 66 | Token.Punctuation.CloseBrace]); 67 | }); 68 | 69 | it("instance constructor with two parameters", async () => { 70 | const input = Input.InClass(` 71 | TestClass(int x, int y) 72 | { 73 | }`); 74 | const tokens = await tokenize(input); 75 | 76 | tokens.should.deep.equal([ 77 | Token.Identifier.MethodName("TestClass"), 78 | Token.Punctuation.OpenParen, 79 | Token.PrimitiveType.Int, 80 | Token.Identifier.ParameterName("x"), 81 | Token.Punctuation.Comma, 82 | Token.PrimitiveType.Int, 83 | Token.Identifier.ParameterName("y"), 84 | Token.Punctuation.CloseParen, 85 | Token.Punctuation.OpenBrace, 86 | Token.Punctuation.CloseBrace]); 87 | }); 88 | 89 | it("instance constructor with expression body", async () => { 90 | const input = Input.InClass(`TestClass(int x, int y) => Foo();`); 91 | const tokens = await tokenize(input); 92 | 93 | tokens.should.deep.equal([ 94 | Token.Identifier.MethodName("TestClass"), 95 | Token.Punctuation.OpenParen, 96 | Token.PrimitiveType.Int, 97 | Token.Identifier.ParameterName("x"), 98 | Token.Punctuation.Comma, 99 | Token.PrimitiveType.Int, 100 | Token.Identifier.ParameterName("y"), 101 | Token.Punctuation.CloseParen, 102 | Token.Operator.Arrow, 103 | Token.Identifier.MethodName("Foo"), 104 | Token.Punctuation.OpenParen, 105 | Token.Punctuation.CloseParen, 106 | Token.Punctuation.Semicolon]); 107 | }); 108 | 109 | it("static constructor no parameters", async () => { 110 | const input = Input.InClass(`TestClass() { }`); 111 | const tokens = await tokenize(input); 112 | 113 | tokens.should.deep.equal([ 114 | Token.Identifier.MethodName("TestClass"), 115 | Token.Punctuation.OpenParen, 116 | Token.Punctuation.CloseParen, 117 | Token.Punctuation.OpenBrace, 118 | Token.Punctuation.CloseBrace]); 119 | }); 120 | 121 | it("instance constructor with 'this' initializer", async () => { 122 | const input = Input.InClass(`TestClass() : this(42) { }`); 123 | const tokens = await tokenize(input); 124 | 125 | tokens.should.deep.equal([ 126 | Token.Identifier.MethodName("TestClass"), 127 | Token.Punctuation.OpenParen, 128 | Token.Punctuation.CloseParen, 129 | Token.Punctuation.Colon, 130 | Token.Variable.This, 131 | Token.Punctuation.OpenParen, 132 | Token.Literal.Numeric.Decimal("42"), 133 | Token.Punctuation.CloseParen, 134 | Token.Punctuation.OpenBrace, 135 | Token.Punctuation.CloseBrace]); 136 | }); 137 | 138 | it("public instance constructor with 'this' initializer", async () => { 139 | const input = Input.InClass(`public TestClass() : this(42) { }`); 140 | const tokens = await tokenize(input); 141 | 142 | tokens.should.deep.equal([ 143 | Token.Keyword.Modifier.Public, 144 | Token.Identifier.MethodName("TestClass"), 145 | Token.Punctuation.OpenParen, 146 | Token.Punctuation.CloseParen, 147 | Token.Punctuation.Colon, 148 | Token.Variable.This, 149 | Token.Punctuation.OpenParen, 150 | Token.Literal.Numeric.Decimal("42"), 151 | Token.Punctuation.CloseParen, 152 | Token.Punctuation.OpenBrace, 153 | Token.Punctuation.CloseBrace]); 154 | }); 155 | 156 | it("instance constructor with 'this' initializer with ref parameter", async () => { 157 | const input = Input.InClass(`TestClass(int x) : this(ref x) { }`); 158 | const tokens = await tokenize(input); 159 | 160 | tokens.should.deep.equal([ 161 | Token.Identifier.MethodName("TestClass"), 162 | Token.Punctuation.OpenParen, 163 | Token.PrimitiveType.Int, 164 | Token.Identifier.ParameterName("x"), 165 | Token.Punctuation.CloseParen, 166 | Token.Punctuation.Colon, 167 | Token.Variable.This, 168 | Token.Punctuation.OpenParen, 169 | Token.Keyword.Modifier.Ref, 170 | Token.Variable.ReadWrite("x"), 171 | Token.Punctuation.CloseParen, 172 | Token.Punctuation.OpenBrace, 173 | Token.Punctuation.CloseBrace]); 174 | }); 175 | 176 | it("instance constructor with 'this' initializer with named parameter", async () => { 177 | const input = Input.InClass(`TestClass(int x) : this(y: x) { }`); 178 | const tokens = await tokenize(input); 179 | 180 | tokens.should.deep.equal([ 181 | Token.Identifier.MethodName("TestClass"), 182 | Token.Punctuation.OpenParen, 183 | Token.PrimitiveType.Int, 184 | Token.Identifier.ParameterName("x"), 185 | Token.Punctuation.CloseParen, 186 | Token.Punctuation.Colon, 187 | Token.Variable.This, 188 | Token.Punctuation.OpenParen, 189 | Token.Identifier.ParameterName("y"), 190 | Token.Punctuation.Colon, 191 | Token.Variable.ReadWrite("x"), 192 | Token.Punctuation.CloseParen, 193 | Token.Punctuation.OpenBrace, 194 | Token.Punctuation.CloseBrace]); 195 | }); 196 | 197 | it("instance constructor with 'base' initializer", async () => { 198 | const input = Input.InClass(`TestClass() : base(42) { }`); 199 | const tokens = await tokenize(input); 200 | 201 | tokens.should.deep.equal([ 202 | Token.Identifier.MethodName("TestClass"), 203 | Token.Punctuation.OpenParen, 204 | Token.Punctuation.CloseParen, 205 | Token.Punctuation.Colon, 206 | Token.Variable.Base, 207 | Token.Punctuation.OpenParen, 208 | Token.Literal.Numeric.Decimal("42"), 209 | Token.Punctuation.CloseParen, 210 | Token.Punctuation.OpenBrace, 211 | Token.Punctuation.CloseBrace]); 212 | }); 213 | 214 | it("instance constructor with 'base' initializer on separate line", async () => { 215 | const input = Input.InClass(` 216 | TestClass() : 217 | base(42) 218 | { 219 | }`); 220 | const tokens = await tokenize(input); 221 | 222 | tokens.should.deep.equal([ 223 | Token.Identifier.MethodName("TestClass"), 224 | Token.Punctuation.OpenParen, 225 | Token.Punctuation.CloseParen, 226 | Token.Punctuation.Colon, 227 | Token.Variable.Base, 228 | Token.Punctuation.OpenParen, 229 | Token.Literal.Numeric.Decimal("42"), 230 | Token.Punctuation.CloseParen, 231 | Token.Punctuation.OpenBrace, 232 | Token.Punctuation.CloseBrace]); 233 | }); 234 | 235 | it("Open multiline comment in front of parameter highlights properly (issue omnisharp-vscode#861)", async () => { 236 | const input = Input.InClass(` 237 | internal WaitHandle(Task self, TT.Task /*task) 238 | { 239 | this.task = task; 240 | this.selff = self; 241 | } 242 | `); 243 | const tokens = await tokenize(input); 244 | 245 | tokens.should.deep.equal([ 246 | Token.Keyword.Modifier.Internal, 247 | Token.Identifier.MethodName("WaitHandle"), 248 | Token.Punctuation.OpenParen, 249 | Token.Type("Task"), 250 | Token.Identifier.ParameterName("self"), 251 | Token.Punctuation.Comma, 252 | Token.Comment.MultiLine.Start, 253 | Token.Comment.MultiLine.Text("task)"), 254 | Token.Comment.MultiLine.Text("{"), 255 | Token.Comment.MultiLine.Text(" this.task = task;"), 256 | Token.Comment.MultiLine.Text(" this.selff = self;"), 257 | Token.Comment.MultiLine.Text("}"), 258 | Token.Comment.MultiLine.Text("") 259 | ]); 260 | }); 261 | 262 | it("Highlight properly within base constructor initializer (issue omnisharp-vscode#782)", async () => { 263 | const input = ` 264 | public class A 265 | { 266 | public A() : base( 267 | 1, 268 | "abc" 269 | new B(), 270 | new B()) { 271 | var a = 1; 272 | var b = "abc"; 273 | var c = new B(); 274 | var c = new B(); 275 | } 276 | } 277 | `; 278 | const tokens = await tokenize(input); 279 | 280 | tokens.should.deep.equal([ 281 | Token.Keyword.Modifier.Public, 282 | Token.Keyword.Definition.Class, 283 | Token.Identifier.ClassName("A"), 284 | Token.Punctuation.OpenBrace, 285 | Token.Keyword.Modifier.Public, 286 | Token.Identifier.MethodName("A"), 287 | Token.Punctuation.OpenParen, 288 | Token.Punctuation.CloseParen, 289 | Token.Punctuation.Colon, 290 | Token.Variable.Base, 291 | Token.Punctuation.OpenParen, 292 | Token.Literal.Numeric.Decimal("1"), 293 | Token.Punctuation.Comma, 294 | Token.Punctuation.String.Begin, 295 | Token.Literal.String("abc"), 296 | Token.Punctuation.String.End, 297 | Token.Operator.Expression.New, 298 | Token.Type("B"), 299 | Token.Punctuation.TypeParameter.Begin, 300 | Token.PrimitiveType.Char, 301 | Token.Punctuation.TypeParameter.End, 302 | Token.Punctuation.OpenParen, 303 | Token.Punctuation.CloseParen, 304 | Token.Punctuation.Comma, 305 | Token.Operator.Expression.New, 306 | Token.Type("B"), 307 | Token.Punctuation.TypeParameter.Begin, 308 | Token.PrimitiveType.String, 309 | Token.Punctuation.TypeParameter.End, 310 | Token.Punctuation.OpenParen, 311 | Token.Punctuation.CloseParen, 312 | Token.Punctuation.CloseParen, 313 | Token.Punctuation.OpenBrace, 314 | Token.Keyword.Definition.Var, 315 | Token.Identifier.LocalName("a"), 316 | Token.Operator.Assignment, 317 | Token.Literal.Numeric.Decimal("1"), 318 | Token.Punctuation.Semicolon, 319 | Token.Keyword.Definition.Var, 320 | Token.Identifier.LocalName("b"), 321 | Token.Operator.Assignment, 322 | Token.Punctuation.String.Begin, 323 | Token.Literal.String("abc"), 324 | Token.Punctuation.String.End, 325 | Token.Punctuation.Semicolon, 326 | Token.Keyword.Definition.Var, 327 | Token.Identifier.LocalName("c"), 328 | Token.Operator.Assignment, 329 | Token.Operator.Expression.New, 330 | Token.Type("B"), 331 | Token.Punctuation.TypeParameter.Begin, 332 | Token.PrimitiveType.Char, 333 | Token.Punctuation.TypeParameter.End, 334 | Token.Punctuation.OpenParen, 335 | Token.Punctuation.CloseParen, 336 | Token.Punctuation.Semicolon, 337 | Token.Keyword.Definition.Var, 338 | Token.Identifier.LocalName("c"), 339 | Token.Operator.Assignment, 340 | Token.Operator.Expression.New, 341 | Token.Type("B"), 342 | Token.Punctuation.TypeParameter.Begin, 343 | Token.PrimitiveType.String, 344 | Token.Punctuation.TypeParameter.End, 345 | Token.Punctuation.OpenParen, 346 | Token.Punctuation.CloseParen, 347 | Token.Punctuation.Semicolon, 348 | Token.Punctuation.CloseBrace, 349 | Token.Punctuation.CloseBrace 350 | ]); 351 | }); 352 | 353 | it("closing parenthesis of parameter list on next line", async () => { 354 | const input = Input.InClass(` 355 | public C( 356 | string s 357 | ) 358 | { 359 | }`); 360 | const tokens = await tokenize(input); 361 | 362 | tokens.should.deep.equal([ 363 | Token.Keyword.Modifier.Public, 364 | Token.Identifier.MethodName("C"), 365 | Token.Punctuation.OpenParen, 366 | 367 | Token.PrimitiveType.String, 368 | Token.Identifier.ParameterName("s"), 369 | 370 | Token.Punctuation.CloseParen, 371 | Token.Punctuation.OpenBrace, 372 | Token.Punctuation.CloseBrace 373 | ]); 374 | }); 375 | 376 | it("closing parenthesis of parameter list on next line (issue #88)", async () => { 377 | const input = Input.InClass(` 378 | public AccountController( 379 | UserManager userManager, 380 | SignInManager signInManager, 381 | ILogger logger 382 | ) 383 | { 384 | }`); 385 | const tokens = await tokenize(input); 386 | 387 | tokens.should.deep.equal([ 388 | Token.Keyword.Modifier.Public, 389 | Token.Identifier.MethodName("AccountController"), 390 | Token.Punctuation.OpenParen, 391 | 392 | Token.Type("UserManager"), 393 | Token.Punctuation.TypeParameter.Begin, 394 | Token.Type("User"), 395 | Token.Punctuation.TypeParameter.End, 396 | Token.Identifier.ParameterName("userManager"), 397 | Token.Punctuation.Comma, 398 | 399 | Token.Type("SignInManager"), 400 | Token.Punctuation.TypeParameter.Begin, 401 | Token.Type("User"), 402 | Token.Punctuation.TypeParameter.End, 403 | Token.Identifier.ParameterName("signInManager"), 404 | Token.Punctuation.Comma, 405 | 406 | Token.Type("ILogger"), 407 | Token.Punctuation.TypeParameter.Begin, 408 | Token.Type("AccountController"), 409 | Token.Punctuation.TypeParameter.End, 410 | Token.Identifier.ParameterName("logger"), 411 | 412 | Token.Punctuation.CloseParen, 413 | Token.Punctuation.OpenBrace, 414 | Token.Punctuation.CloseBrace 415 | ]); 416 | }); 417 | }); 418 | }); -------------------------------------------------------------------------------- /test/delegate.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Token } from './utils/tokenize'; 8 | 9 | describe("Delegates", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Delegates", () => { 13 | it("void delegate with no parameters", async () => { 14 | 15 | const input = `delegate void D();`; 16 | const tokens = await tokenize(input); 17 | 18 | tokens.should.deep.equal([ 19 | Token.Keyword.Definition.Delegate, 20 | Token.PrimitiveType.Void, 21 | Token.Identifier.DelegateName("D"), 22 | Token.Punctuation.OpenParen, 23 | Token.Punctuation.CloseParen, 24 | Token.Punctuation.Semicolon]); 25 | }); 26 | 27 | it("generic delegate with variance", async () => { 28 | 29 | const input = `delegate TResult D(T arg1);`; 30 | const tokens = await tokenize(input); 31 | 32 | tokens.should.deep.equal([ 33 | Token.Keyword.Definition.Delegate, 34 | Token.Type("TResult"), 35 | Token.Identifier.DelegateName("D"), 36 | Token.Punctuation.TypeParameter.Begin, 37 | Token.Keyword.Modifier.In, 38 | Token.Identifier.TypeParameterName("T"), 39 | Token.Punctuation.Comma, 40 | Token.Keyword.Modifier.Out, 41 | Token.Identifier.TypeParameterName("TResult"), 42 | Token.Punctuation.TypeParameter.End, 43 | Token.Punctuation.OpenParen, 44 | Token.Type("T"), 45 | Token.Identifier.ParameterName("arg1"), 46 | Token.Punctuation.CloseParen, 47 | Token.Punctuation.Semicolon]); 48 | }); 49 | 50 | it("generic delegate with constraints", async () => { 51 | 52 | const input = ` 53 | delegate void D() 54 | where T1 : T2; 55 | `; 56 | 57 | const tokens = await tokenize(input); 58 | 59 | tokens.should.deep.equal([ 60 | Token.Keyword.Definition.Delegate, 61 | Token.PrimitiveType.Void, 62 | Token.Identifier.DelegateName("D"), 63 | Token.Punctuation.TypeParameter.Begin, 64 | Token.Identifier.TypeParameterName("T1"), 65 | Token.Punctuation.Comma, 66 | Token.Identifier.TypeParameterName("T2"), 67 | Token.Punctuation.TypeParameter.End, 68 | Token.Punctuation.OpenParen, 69 | Token.Punctuation.CloseParen, 70 | Token.Keyword.Modifier.Where, 71 | Token.Identifier.TypeParameterName("T1"), 72 | Token.Punctuation.Colon, 73 | Token.Type("T2"), 74 | Token.Punctuation.Semicolon]); 75 | }); 76 | 77 | it("generic delegate with attributes on type parameters", async () => { 78 | 79 | const input = `delegate void D<[Foo] T1, [Bar] T2>();`; 80 | const tokens = await tokenize(input); 81 | 82 | tokens.should.deep.equal([ 83 | Token.Keyword.Definition.Delegate, 84 | Token.PrimitiveType.Void, 85 | Token.Identifier.DelegateName("D"), 86 | Token.Punctuation.TypeParameter.Begin, 87 | Token.Punctuation.OpenBracket, 88 | Token.Type("Foo"), 89 | Token.Punctuation.CloseBracket, 90 | Token.Identifier.TypeParameterName("T1"), 91 | Token.Punctuation.Comma, 92 | Token.Punctuation.OpenBracket, 93 | Token.Type("Bar"), 94 | Token.Punctuation.CloseBracket, 95 | Token.Identifier.TypeParameterName("T2"), 96 | Token.Punctuation.TypeParameter.End, 97 | Token.Punctuation.OpenParen, 98 | Token.Punctuation.CloseParen, 99 | Token.Punctuation.Semicolon]); 100 | }); 101 | 102 | it("delegate with multiple parameters", async () => { 103 | 104 | const input = `delegate int D(ref string x, out int y, params object[] z);`; 105 | const tokens = await tokenize(input); 106 | 107 | tokens.should.deep.equal([ 108 | Token.Keyword.Definition.Delegate, 109 | Token.PrimitiveType.Int, 110 | Token.Identifier.DelegateName("D"), 111 | Token.Punctuation.OpenParen, 112 | Token.Keyword.Modifier.Ref, 113 | Token.PrimitiveType.String, 114 | Token.Identifier.ParameterName("x"), 115 | Token.Punctuation.Comma, 116 | Token.Keyword.Modifier.Out, 117 | Token.PrimitiveType.Int, 118 | Token.Identifier.ParameterName("y"), 119 | Token.Punctuation.Comma, 120 | Token.Keyword.Modifier.Params, 121 | Token.PrimitiveType.Object, 122 | Token.Punctuation.OpenBracket, 123 | Token.Punctuation.CloseBracket, 124 | Token.Identifier.ParameterName("z"), 125 | Token.Punctuation.CloseParen, 126 | Token.Punctuation.Semicolon]); 127 | }); 128 | 129 | it("ref return", async () => { 130 | const input = `delegate ref int D();`; 131 | const tokens = await tokenize(input); 132 | 133 | tokens.should.deep.equal([ 134 | Token.Keyword.Definition.Delegate, 135 | Token.Keyword.Modifier.Ref, 136 | Token.PrimitiveType.Int, 137 | Token.Identifier.DelegateName("D"), 138 | Token.Punctuation.OpenParen, 139 | Token.Punctuation.CloseParen, 140 | Token.Punctuation.Semicolon]); 141 | }); 142 | 143 | it("ref readonly return", async () => { 144 | const input = `delegate ref readonly int D();`; 145 | const tokens = await tokenize(input); 146 | 147 | tokens.should.deep.equal([ 148 | Token.Keyword.Definition.Delegate, 149 | Token.Keyword.Modifier.Ref, 150 | Token.Keyword.Modifier.ReadOnly, 151 | Token.PrimitiveType.Int, 152 | Token.Identifier.DelegateName("D"), 153 | Token.Punctuation.OpenParen, 154 | Token.Punctuation.CloseParen, 155 | Token.Punctuation.Semicolon]); 156 | }); 157 | }); 158 | }); 159 | -------------------------------------------------------------------------------- /test/destructor.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Input, Token } from './utils/tokenize'; 8 | 9 | describe("Destructor", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Destructor", () => { 13 | it("declaration", async () => { 14 | 15 | const input = Input.InClass(`~TestClass() { }`); 16 | const tokens = await tokenize(input); 17 | 18 | tokens.should.deep.equal([ 19 | Token.Punctuation.Tilde, 20 | Token.Identifier.MethodName("TestClass"), 21 | Token.Punctuation.OpenParen, 22 | Token.Punctuation.CloseParen, 23 | Token.Punctuation.OpenBrace, 24 | Token.Punctuation.CloseBrace]); 25 | }); 26 | 27 | it("with expression body", async () => { 28 | 29 | const input = Input.InClass(`~TestClass() => Foo();`); 30 | const tokens = await tokenize(input); 31 | 32 | tokens.should.deep.equal([ 33 | Token.Punctuation.Tilde, 34 | Token.Identifier.MethodName("TestClass"), 35 | Token.Punctuation.OpenParen, 36 | Token.Punctuation.CloseParen, 37 | Token.Operator.Arrow, 38 | Token.Identifier.MethodName("Foo"), 39 | Token.Punctuation.OpenParen, 40 | Token.Punctuation.CloseParen, 41 | Token.Punctuation.Semicolon]); 42 | }); 43 | }); 44 | }); -------------------------------------------------------------------------------- /test/enum.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Token } from './utils/tokenize'; 8 | 9 | describe("Enums", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Enums", () => { 13 | it("simple enum", async () => { 14 | 15 | const input = `enum E { }`; 16 | const tokens = await tokenize(input); 17 | 18 | tokens.should.deep.equal([ 19 | Token.Keyword.Definition.Enum, 20 | Token.Identifier.EnumName("E"), 21 | Token.Punctuation.OpenBrace, 22 | Token.Punctuation.CloseBrace]); 23 | }); 24 | 25 | it("enum with no body", async () => { 26 | 27 | const input = `enum E;`; 28 | const tokens = await tokenize(input); 29 | 30 | tokens.should.deep.equal([ 31 | Token.Keyword.Definition.Enum, 32 | Token.Identifier.EnumName("E"), 33 | Token.Punctuation.Semicolon]); 34 | }); 35 | 36 | it("enum with base type", async () => { 37 | 38 | const input = `enum E : byte { }`; 39 | const tokens = await tokenize(input); 40 | 41 | tokens.should.deep.equal([ 42 | Token.Keyword.Definition.Enum, 43 | Token.Identifier.EnumName("E"), 44 | Token.Punctuation.Colon, 45 | Token.PrimitiveType.Byte, 46 | Token.Punctuation.OpenBrace, 47 | Token.Punctuation.CloseBrace]); 48 | }); 49 | 50 | it("enum with single member", async () => { 51 | 52 | const input = `enum E { M1 }`; 53 | const tokens = await tokenize(input); 54 | 55 | tokens.should.deep.equal([ 56 | Token.Keyword.Definition.Enum, 57 | Token.Identifier.EnumName("E"), 58 | Token.Punctuation.OpenBrace, 59 | Token.Identifier.EnumMemberName("M1"), 60 | Token.Punctuation.CloseBrace]); 61 | }); 62 | 63 | it("enum with multiple members", async () => { 64 | 65 | const input = `enum Color { Red, Green, Blue }`; 66 | const tokens = await tokenize(input); 67 | 68 | tokens.should.deep.equal([ 69 | Token.Keyword.Definition.Enum, 70 | Token.Identifier.EnumName("Color"), 71 | Token.Punctuation.OpenBrace, 72 | Token.Identifier.EnumMemberName("Red"), 73 | Token.Punctuation.Comma, 74 | Token.Identifier.EnumMemberName("Green"), 75 | Token.Punctuation.Comma, 76 | Token.Identifier.EnumMemberName("Blue"), 77 | Token.Punctuation.CloseBrace]); 78 | }); 79 | 80 | it("enum with initialized member", async () => { 81 | 82 | const input = ` 83 | enum E 84 | { 85 | Value1 = 1, 86 | Value2, 87 | Value3 88 | } 89 | `; 90 | 91 | const tokens = await tokenize(input); 92 | 93 | tokens.should.deep.equal([ 94 | Token.Keyword.Definition.Enum, 95 | Token.Identifier.EnumName("E"), 96 | Token.Punctuation.OpenBrace, 97 | Token.Identifier.EnumMemberName("Value1"), 98 | Token.Operator.Assignment, 99 | Token.Literal.Numeric.Decimal("1"), 100 | Token.Punctuation.Comma, 101 | Token.Identifier.EnumMemberName("Value2"), 102 | Token.Punctuation.Comma, 103 | Token.Identifier.EnumMemberName("Value3"), 104 | Token.Punctuation.CloseBrace]); 105 | }); 106 | 107 | it("enum members are highligted properly (issue omnisharp-vscode#1108)", async () => { 108 | 109 | const input = ` 110 | public enum TestEnum 111 | { 112 | enum1, 113 | enum2, 114 | enum3, 115 | enum4 116 | } 117 | 118 | public class TestClass 119 | { 120 | 121 | } 122 | 123 | public enum TestEnum2 124 | { 125 | enum1 = 10, 126 | enum2 = 15, 127 | } 128 | 129 | public class TestClass2 130 | { 131 | 132 | } 133 | `; 134 | 135 | const tokens = await tokenize(input); 136 | 137 | tokens.should.deep.equal([ 138 | Token.Keyword.Modifier.Public, 139 | Token.Keyword.Definition.Enum, 140 | Token.Identifier.EnumName("TestEnum"), 141 | Token.Punctuation.OpenBrace, 142 | Token.Identifier.EnumMemberName("enum1"), 143 | Token.Punctuation.Comma, 144 | Token.Identifier.EnumMemberName("enum2"), 145 | Token.Punctuation.Comma, 146 | Token.Identifier.EnumMemberName("enum3"), 147 | Token.Punctuation.Comma, 148 | Token.Identifier.EnumMemberName("enum4"), 149 | Token.Punctuation.CloseBrace, 150 | 151 | Token.Keyword.Modifier.Public, 152 | Token.Keyword.Definition.Class, 153 | Token.Identifier.ClassName("TestClass"), 154 | Token.Punctuation.OpenBrace, 155 | Token.Punctuation.CloseBrace, 156 | 157 | Token.Keyword.Modifier.Public, 158 | Token.Keyword.Definition.Enum, 159 | Token.Identifier.EnumName("TestEnum2"), 160 | Token.Punctuation.OpenBrace, 161 | Token.Identifier.EnumMemberName("enum1"), 162 | Token.Operator.Assignment, 163 | Token.Literal.Numeric.Decimal("10"), 164 | Token.Punctuation.Comma, 165 | Token.Identifier.EnumMemberName("enum2"), 166 | Token.Operator.Assignment, 167 | Token.Literal.Numeric.Decimal("15"), 168 | Token.Punctuation.Comma, 169 | Token.Punctuation.CloseBrace, 170 | 171 | Token.Keyword.Modifier.Public, 172 | Token.Keyword.Definition.Class, 173 | Token.Identifier.ClassName("TestClass2"), 174 | Token.Punctuation.OpenBrace, 175 | Token.Punctuation.CloseBrace 176 | ]); 177 | }); 178 | }); 179 | }); -------------------------------------------------------------------------------- /test/event.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Input, Token, Scope } from './utils/tokenize'; 8 | 9 | describe("Events", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Events", () => { 13 | it("declaration", async () => { 14 | const input = Input.InClass(`public event Type Event;`); 15 | const tokens = await tokenize(input, "meta.accessor."); 16 | 17 | tokens.should.deep.equal([ 18 | Token.Keyword.Modifier.Public, 19 | Token.Keyword.Definition.Event, 20 | Token.Type("Type"), 21 | Token.Identifier.EventName("Event"), 22 | Token.Punctuation.Semicolon]); 23 | }); 24 | 25 | it("declaration with multiple modifiers", async () => { 26 | const input = Input.InClass(`protected internal event Type Event;`); 27 | const tokens = await tokenize(input, "meta.accessor."); 28 | 29 | tokens.should.deep.equal([ 30 | Token.Keyword.Modifier.Protected, 31 | Token.Keyword.Modifier.Internal, 32 | Token.Keyword.Definition.Event, 33 | Token.Type("Type"), 34 | Token.Identifier.EventName("Event"), 35 | Token.Punctuation.Semicolon]); 36 | }); 37 | 38 | it("declaration with multiple declarators", async () => { 39 | const input = Input.InClass(`public event Type Event1, Event2;`); 40 | const tokens = await tokenize(input, "meta.accessor."); 41 | 42 | tokens.should.deep.equal([ 43 | Token.Keyword.Modifier.Public, 44 | Token.Keyword.Definition.Event, 45 | Token.Type("Type"), 46 | Token.Identifier.EventName("Event1"), 47 | Token.Punctuation.Comma, 48 | Token.Identifier.EventName("Event2"), 49 | Token.Punctuation.Semicolon]); 50 | }); 51 | 52 | it("generic", async () => { 53 | const input = Input.InClass(`public event EventHandler, Dictionary> Event;`); 54 | const tokens = await tokenize(input, "meta.accessor."); 55 | 56 | tokens.should.deep.equal([ 57 | Token.Keyword.Modifier.Public, 58 | Token.Keyword.Definition.Event, 59 | Token.Type("EventHandler"), 60 | Token.Punctuation.TypeParameter.Begin, 61 | Token.Type("List"), 62 | Token.Punctuation.TypeParameter.Begin, 63 | Token.Type("T"), 64 | Token.Punctuation.TypeParameter.End, 65 | Token.Punctuation.Comma, 66 | Token.Type("Dictionary"), 67 | Token.Punctuation.TypeParameter.Begin, 68 | Token.Type("T"), 69 | Token.Punctuation.Comma, 70 | Token.Type("D"), 71 | Token.Punctuation.TypeParameter.End, 72 | Token.Punctuation.TypeParameter.End, 73 | Token.Identifier.EventName("Event"), 74 | Token.Punctuation.Semicolon]); 75 | }); 76 | 77 | it("declaration with accessors", async () => { 78 | const input = Input.InClass(` 79 | public event Type Event 80 | { 81 | add { } 82 | remove { } 83 | }`); 84 | 85 | const tokens = await tokenize(input, "meta.accessor."); 86 | 87 | tokens.should.deep.equal([ 88 | Token.Keyword.Modifier.Public, 89 | Token.Keyword.Definition.Event, 90 | Token.Type("Type"), 91 | Token.Identifier.EventName("Event"), 92 | Token.Punctuation.OpenBrace, 93 | Token.Keyword.Definition.Add, 94 | Token.Punctuation.OpenBrace, 95 | Token.Punctuation.CloseBrace, 96 | Token.Keyword.Definition.Remove, 97 | Token.Punctuation.OpenBrace, 98 | Token.Punctuation.CloseBrace, 99 | Token.Punctuation.CloseBrace]); 100 | }); 101 | 102 | it("explicitly-implemented interface member", async () => { 103 | const input = Input.InClass(`event EventHandler IFoo.Event { add; remove; }`); 104 | const tokens = await tokenize(input, "meta.accessor."); 105 | 106 | tokens.should.deep.equal([ 107 | Token.Keyword.Definition.Event, 108 | Token.Type("EventHandler"), 109 | Token.Type("IFoo"), 110 | Token.Punctuation.TypeParameter.Begin, 111 | Token.PrimitiveType.String, 112 | Token.Punctuation.TypeParameter.End, 113 | Token.Punctuation.Accessor, 114 | Token.Identifier.EventName("Event"), 115 | Token.Punctuation.OpenBrace, 116 | Token.Keyword.Definition.Add, 117 | Token.Punctuation.Semicolon, 118 | Token.Keyword.Definition.Remove, 119 | Token.Punctuation.Semicolon, 120 | Token.Punctuation.CloseBrace]); 121 | }); 122 | 123 | it("declaration in interface", async () => { 124 | const input = Input.InInterface(`event EventHandler Event;`); 125 | const tokens = await tokenize(input, "meta.accessor."); 126 | 127 | tokens.should.deep.equal([ 128 | Token.Keyword.Definition.Event, 129 | Token.Type("EventHandler"), 130 | Token.Identifier.EventName("Event"), 131 | Token.Punctuation.Semicolon]); 132 | }); 133 | 134 | it("declaration in interface with properties", async () => { 135 | const input = ` 136 | interface IObj 137 | { 138 | int Prop1 139 | { 140 | get; 141 | } 142 | event EventHandler Event; 143 | int Prop2 { get; } 144 | }`; 145 | const tokens = await tokenize(input, "meta.accessor."); 146 | 147 | tokens.should.deep.equal([ 148 | Token.Keyword.Definition.Interface, 149 | Token.Identifier.InterfaceName("IObj"), 150 | Token.Punctuation.OpenBrace, 151 | Token.PrimitiveType.Int, 152 | Token.Identifier.PropertyName("Prop1"), 153 | Token.Punctuation.OpenBrace, 154 | Token.Keyword.Definition.Get, 155 | Token.Punctuation.Semicolon, 156 | Token.Punctuation.CloseBrace, 157 | Token.Keyword.Definition.Event, 158 | Token.Type("EventHandler"), 159 | Token.Identifier.EventName("Event"), 160 | Token.Punctuation.Semicolon, 161 | Token.PrimitiveType.Int, 162 | Token.Identifier.PropertyName("Prop2"), 163 | Token.Punctuation.OpenBrace, 164 | Token.Keyword.Definition.Get, 165 | Token.Punctuation.Semicolon, 166 | Token.Punctuation.CloseBrace, 167 | Token.Punctuation.CloseBrace]); 168 | }); 169 | 170 | it("declaration with attributes", async () => { 171 | const input = Input.InClass(` 172 | [event: Test] 173 | public event Action E1 174 | { 175 | [Obsolete] 176 | add { } 177 | [Obsolete] 178 | [return: Obsolete] 179 | remove { } 180 | }`); 181 | 182 | const tokens = await tokenize(input, "meta.accessor."); 183 | 184 | tokens.should.deep.equal([ 185 | Token.Punctuation.OpenBracket, 186 | Token.Keyword.AttributeSpecifier("event"), 187 | Token.Punctuation.Colon, 188 | Token.Type("Test"), 189 | Token.Punctuation.CloseBracket, 190 | Token.Keyword.Modifier.Public, 191 | Token.Keyword.Definition.Event, 192 | Token.Type("Action"), 193 | Token.Identifier.EventName("E1"), 194 | Token.Punctuation.OpenBrace, 195 | Token.Punctuation.OpenBracket, 196 | Token.Type("Obsolete"), 197 | Token.Punctuation.CloseBracket, 198 | Token.Keyword.Definition.Add, 199 | Token.Punctuation.OpenBrace, 200 | Token.Punctuation.CloseBrace, 201 | Token.Punctuation.OpenBracket, 202 | Token.Type("Obsolete"), 203 | Token.Punctuation.CloseBracket, 204 | Token.Punctuation.OpenBracket, 205 | Token.Keyword.AttributeSpecifier("return"), 206 | Token.Punctuation.Colon, 207 | Token.Type("Obsolete"), 208 | Token.Punctuation.CloseBracket, 209 | Token.Keyword.Definition.Remove, 210 | Token.Punctuation.OpenBrace, 211 | Token.Punctuation.CloseBrace, 212 | Token.Punctuation.CloseBrace 213 | ]); 214 | }); 215 | 216 | it("Expression-bodied event accessors (issue #44)", async () => { 217 | const input = Input.InClass(` 218 | event EventHandler E 219 | { 220 | add => Add(value); 221 | remove => Remove(value); 222 | } 223 | `); 224 | const tokens = await tokenize(input, "meta.accessor."); 225 | 226 | tokens.should.deep.equal([ 227 | Token.Keyword.Definition.Event, 228 | Token.Type("EventHandler"), 229 | Token.Identifier.EventName("E"), 230 | Token.Punctuation.OpenBrace, 231 | Token.Keyword.Definition.Add, 232 | Token.Operator.Arrow, 233 | ...Scope.Accessor.Setter( 234 | Token.Identifier.MethodName("Add"), 235 | Token.Punctuation.OpenParen, 236 | Token.Variable.Value, 237 | Token.Punctuation.CloseParen, 238 | ), 239 | Token.Punctuation.Semicolon, 240 | Token.Keyword.Definition.Remove, 241 | Token.Operator.Arrow, 242 | ...Scope.Accessor.Setter( 243 | Token.Identifier.MethodName("Remove"), 244 | Token.Punctuation.OpenParen, 245 | Token.Variable.Value, 246 | Token.Punctuation.CloseParen, 247 | ), 248 | Token.Punctuation.Semicolon, 249 | Token.Punctuation.CloseBrace]); 250 | }); 251 | 252 | it("comment before initializer - single line (issue #264)", async () => { 253 | const input = Input.InClass(`event EventHandler Event /* comment */ { add; remove; }`); 254 | const tokens = await tokenize(input, "meta.accessor."); 255 | 256 | tokens.should.deep.equal([ 257 | Token.Keyword.Definition.Event, 258 | Token.Type("EventHandler"), 259 | Token.Identifier.EventName("Event"), 260 | Token.Comment.MultiLine.Start, 261 | Token.Comment.MultiLine.Text(" comment "), 262 | Token.Comment.MultiLine.End, 263 | Token.Punctuation.OpenBrace, 264 | Token.Keyword.Definition.Add, 265 | Token.Punctuation.Semicolon, 266 | Token.Keyword.Definition.Remove, 267 | Token.Punctuation.Semicolon, 268 | Token.Punctuation.CloseBrace, 269 | ]); 270 | }); 271 | 272 | it("comment before initializer - multiple lines (issue #264)", async () => { 273 | const input = Input.InClass(` 274 | event EventHandler Event // comment 275 | { 276 | add; 277 | remove; 278 | }`); 279 | const tokens = await tokenize(input, "meta.accessor."); 280 | 281 | tokens.should.deep.equal([ 282 | Token.Keyword.Definition.Event, 283 | Token.Type("EventHandler"), 284 | Token.Identifier.EventName("Event"), 285 | Token.Comment.SingleLine.Start, 286 | Token.Comment.SingleLine.Text(" comment"), 287 | Token.Punctuation.OpenBrace, 288 | Token.Keyword.Definition.Add, 289 | Token.Punctuation.Semicolon, 290 | Token.Keyword.Definition.Remove, 291 | Token.Punctuation.Semicolon, 292 | Token.Punctuation.CloseBrace, 293 | ]); 294 | }); 295 | 296 | it("declaration with default value (issue #118)", async () => { 297 | const input = Input.InClass(`event EventHandler Event = null;`); 298 | const tokens = await tokenize(input, "meta.accessor."); 299 | 300 | tokens.should.deep.equal([ 301 | Token.Keyword.Definition.Event, 302 | Token.Type("EventHandler"), 303 | Token.Identifier.EventName("Event"), 304 | Token.Operator.Assignment, 305 | Token.Literal.Null, 306 | Token.Punctuation.Semicolon, 307 | ]); 308 | }); 309 | 310 | it("multiple declarations with default value (issue #118)", async () => { 311 | const input = Input.InClass(` 312 | event EventHandler Event1 = delegate { }, 313 | Event2 = () => { } 314 | , Event3 = null;`); 315 | const tokens = await tokenize(input, "meta.accessor."); 316 | 317 | tokens.should.deep.equal([ 318 | Token.Keyword.Definition.Event, 319 | Token.Type("EventHandler"), 320 | Token.Identifier.EventName("Event1"), 321 | Token.Operator.Assignment, 322 | Token.Keyword.Definition.Delegate, 323 | Token.Punctuation.OpenBrace, 324 | Token.Punctuation.CloseBrace, 325 | Token.Punctuation.Comma, 326 | Token.Identifier.EventName("Event2"), 327 | Token.Operator.Assignment, 328 | Token.Punctuation.OpenParen, 329 | Token.Punctuation.CloseParen, 330 | Token.Operator.Arrow, 331 | Token.Punctuation.OpenBrace, 332 | Token.Punctuation.CloseBrace, 333 | Token.Punctuation.Comma, 334 | Token.Identifier.EventName("Event3"), 335 | Token.Operator.Assignment, 336 | Token.Literal.Null, 337 | Token.Punctuation.Semicolon, 338 | ]); 339 | }); 340 | }); 341 | }); 342 | -------------------------------------------------------------------------------- /test/extern-alias.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Token } from './utils/tokenize'; 8 | 9 | describe("Extern aliases", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Extern aliases", () => { 13 | it("declaration", async () => { 14 | 15 | const input = ` 16 | extern alias X; 17 | extern alias Y;`; 18 | 19 | const tokens = await tokenize(input); 20 | 21 | tokens.should.deep.equal([ 22 | Token.Keyword.Directive.Extern, 23 | Token.Keyword.Directive.Alias, 24 | Token.Variable.Alias("X"), 25 | Token.Punctuation.Semicolon, 26 | Token.Keyword.Directive.Extern, 27 | Token.Keyword.Directive.Alias, 28 | Token.Variable.Alias("Y"), 29 | Token.Punctuation.Semicolon]); 30 | }); 31 | }); 32 | }); -------------------------------------------------------------------------------- /test/field.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Input, Token } from './utils/tokenize'; 8 | 9 | describe("Field", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Field", () => { 13 | it("declaration", async () => { 14 | const input = Input.InClass(` 15 | int file; 16 | private List _field; 17 | private List field; 18 | private List field123;`); 19 | 20 | const tokens = await tokenize(input); 21 | 22 | tokens.should.deep.equal([ 23 | Token.PrimitiveType.Int, 24 | Token.Identifier.FieldName("file"), 25 | Token.Punctuation.Semicolon, 26 | 27 | Token.Keyword.Modifier.Private, 28 | Token.Type("List"), 29 | Token.Identifier.FieldName("_field"), 30 | Token.Punctuation.Semicolon, 31 | 32 | Token.Keyword.Modifier.Private, 33 | Token.Type("List"), 34 | Token.Identifier.FieldName("field"), 35 | Token.Punctuation.Semicolon, 36 | 37 | Token.Keyword.Modifier.Private, 38 | Token.Type("List"), 39 | Token.Identifier.FieldName("field123"), 40 | Token.Punctuation.Semicolon]); 41 | }); 42 | 43 | it("generic", async () => { 44 | const input = Input.InClass(`private Dictionary< List, Dictionary> _field;`); 45 | const tokens = await tokenize(input); 46 | 47 | tokens.should.deep.equal([ 48 | Token.Keyword.Modifier.Private, 49 | Token.Type("Dictionary"), 50 | Token.Punctuation.TypeParameter.Begin, 51 | Token.Type("List"), 52 | Token.Punctuation.TypeParameter.Begin, 53 | Token.Type("T"), 54 | Token.Punctuation.TypeParameter.End, 55 | Token.Punctuation.Comma, 56 | Token.Type("Dictionary"), 57 | Token.Punctuation.TypeParameter.Begin, 58 | Token.Type("T"), 59 | Token.Punctuation.Comma, 60 | Token.Type("D"), 61 | Token.Punctuation.TypeParameter.End, 62 | Token.Punctuation.TypeParameter.End, 63 | Token.Identifier.FieldName("_field"), 64 | Token.Punctuation.Semicolon]); 65 | }); 66 | 67 | 68 | it("modifiers", async () => { 69 | const input = Input.InClass(` 70 | private static readonly List _field; 71 | readonly string _field2; 72 | string _field3; 73 | required int _field4;`); 74 | 75 | const tokens = await tokenize(input); 76 | 77 | tokens.should.deep.equal([ 78 | Token.Keyword.Modifier.Private, 79 | Token.Keyword.Modifier.Static, 80 | Token.Keyword.Modifier.ReadOnly, 81 | Token.Type("List"), 82 | Token.Identifier.FieldName("_field"), 83 | Token.Punctuation.Semicolon, 84 | 85 | Token.Keyword.Modifier.ReadOnly, 86 | Token.PrimitiveType.String, 87 | Token.Identifier.FieldName("_field2"), 88 | Token.Punctuation.Semicolon, 89 | 90 | Token.PrimitiveType.String, 91 | Token.Identifier.FieldName("_field3"), 92 | Token.Punctuation.Semicolon, 93 | 94 | Token.Keyword.Modifier.Required, 95 | Token.PrimitiveType.Int, 96 | Token.Identifier.FieldName("_field4"), 97 | Token.Punctuation.Semicolon]); 98 | }); 99 | 100 | it("types", async () => { 101 | const input = Input.InClass(` 102 | string field123; 103 | string[] field123;`); 104 | 105 | const tokens = await tokenize(input); 106 | 107 | tokens.should.deep.equal([ 108 | Token.PrimitiveType.String, 109 | Token.Identifier.FieldName("field123"), 110 | Token.Punctuation.Semicolon, 111 | 112 | Token.PrimitiveType.String, 113 | Token.Punctuation.OpenBracket, 114 | Token.Punctuation.CloseBracket, 115 | Token.Identifier.FieldName("field123"), 116 | Token.Punctuation.Semicolon]); 117 | }); 118 | 119 | it("assignment", async () => { 120 | const input = Input.InClass(` 121 | private string field = "hello"; 122 | const bool field = true;`); 123 | 124 | let tokens = await tokenize(input); 125 | 126 | tokens.should.deep.equal([ 127 | Token.Keyword.Modifier.Private, 128 | Token.PrimitiveType.String, 129 | Token.Identifier.FieldName("field"), 130 | Token.Operator.Assignment, 131 | Token.Punctuation.String.Begin, 132 | Token.Literal.String("hello"), 133 | Token.Punctuation.String.End, 134 | Token.Punctuation.Semicolon, 135 | 136 | Token.Keyword.Modifier.Const, 137 | Token.PrimitiveType.Bool, 138 | Token.Identifier.FieldName("field"), 139 | Token.Operator.Assignment, 140 | Token.Literal.Boolean.True, 141 | Token.Punctuation.Semicolon]); 142 | }); 143 | 144 | it("declaration with multiple declarators", async () => { 145 | const input = Input.InClass(`int x = 19, y = 23, z = 42;`); 146 | const tokens = await tokenize(input); 147 | 148 | tokens.should.deep.equal([ 149 | Token.PrimitiveType.Int, 150 | Token.Identifier.FieldName("x"), 151 | Token.Operator.Assignment, 152 | Token.Literal.Numeric.Decimal("19"), 153 | Token.Punctuation.Comma, 154 | Token.Identifier.FieldName("y"), 155 | Token.Operator.Assignment, 156 | Token.Literal.Numeric.Decimal("23"), 157 | Token.Punctuation.Comma, 158 | Token.Identifier.FieldName("z"), 159 | Token.Operator.Assignment, 160 | Token.Literal.Numeric.Decimal("42"), 161 | Token.Punctuation.Semicolon]); 162 | }); 163 | 164 | it("tuple type with no names and no modifiers", async () => { 165 | const input = Input.InClass(`(int, int) x;`); 166 | const tokens = await tokenize(input); 167 | 168 | tokens.should.deep.equal([ 169 | Token.Punctuation.OpenParen, 170 | Token.PrimitiveType.Int, 171 | Token.Punctuation.Comma, 172 | Token.PrimitiveType.Int, 173 | Token.Punctuation.CloseParen, 174 | Token.Identifier.FieldName("x"), 175 | Token.Punctuation.Semicolon]); 176 | }); 177 | 178 | it("tuple type with no names and private modifier", async () => { 179 | const input = Input.InClass(`private (int, int) x;`); 180 | const tokens = await tokenize(input); 181 | 182 | tokens.should.deep.equal([ 183 | Token.Keyword.Modifier.Private, 184 | Token.Punctuation.OpenParen, 185 | Token.PrimitiveType.Int, 186 | Token.Punctuation.Comma, 187 | Token.PrimitiveType.Int, 188 | Token.Punctuation.CloseParen, 189 | Token.Identifier.FieldName("x"), 190 | Token.Punctuation.Semicolon]); 191 | }); 192 | 193 | it("tuple type with names and no modifiers", async () => { 194 | const input = Input.InClass(`(int x, int y) z;`); 195 | const tokens = await tokenize(input); 196 | 197 | tokens.should.deep.equal([ 198 | Token.Punctuation.OpenParen, 199 | Token.PrimitiveType.Int, 200 | Token.Identifier.TupleElementName("x"), 201 | Token.Punctuation.Comma, 202 | Token.PrimitiveType.Int, 203 | Token.Identifier.TupleElementName("y"), 204 | Token.Punctuation.CloseParen, 205 | Token.Identifier.FieldName("z"), 206 | Token.Punctuation.Semicolon]); 207 | }); 208 | 209 | it("tuple type with names and private modifier", async () => { 210 | const input = Input.InClass(`private (int x, int y) z;`); 211 | const tokens = await tokenize(input); 212 | 213 | tokens.should.deep.equal([ 214 | Token.Keyword.Modifier.Private, 215 | Token.Punctuation.OpenParen, 216 | Token.PrimitiveType.Int, 217 | Token.Identifier.TupleElementName("x"), 218 | Token.Punctuation.Comma, 219 | Token.PrimitiveType.Int, 220 | Token.Identifier.TupleElementName("y"), 221 | Token.Punctuation.CloseParen, 222 | Token.Identifier.FieldName("z"), 223 | Token.Punctuation.Semicolon]); 224 | }); 225 | 226 | it("Fields with fully-qualified names are highlighted properly (issue omnisharp-vscode#1097)", async () => { 227 | const input = Input.InClass(` 228 | private CanvasGroup[] groups; 229 | private UnityEngine.UI.Image[] selectedImages; 230 | `); 231 | const tokens = await tokenize(input); 232 | 233 | tokens.should.deep.equal([ 234 | Token.Keyword.Modifier.Private, 235 | Token.Type("CanvasGroup"), 236 | Token.Punctuation.OpenBracket, 237 | Token.Punctuation.CloseBracket, 238 | Token.Identifier.FieldName("groups"), 239 | Token.Punctuation.Semicolon, 240 | Token.Keyword.Modifier.Private, 241 | Token.Type("UnityEngine"), 242 | Token.Punctuation.Accessor, 243 | Token.Type("UI"), 244 | Token.Punctuation.Accessor, 245 | Token.Type("Image"), 246 | Token.Punctuation.OpenBracket, 247 | Token.Punctuation.CloseBracket, 248 | Token.Identifier.FieldName("selectedImages"), 249 | Token.Punctuation.Semicolon 250 | ]); 251 | }); 252 | 253 | it("Fields with dictionary initializer highlights properly (issue omnisharp-vscode#1096)", async () => { 254 | const input = Input.InClass(` 255 | private readonly Dictionary languageToIndex = new Dictionary() 256 | { 257 | {"Simplified Chinese", 0}, 258 | {"English", 1}, 259 | {"Japanese", 2}, 260 | {"Korean", 3} 261 | }; 262 | `); 263 | const tokens = await tokenize(input); 264 | 265 | tokens.should.deep.equal([ 266 | Token.Keyword.Modifier.Private, 267 | Token.Keyword.Modifier.ReadOnly, 268 | Token.Type("Dictionary"), 269 | Token.Punctuation.TypeParameter.Begin, 270 | Token.PrimitiveType.String, 271 | Token.Punctuation.Comma, 272 | Token.PrimitiveType.Int, 273 | Token.Punctuation.TypeParameter.End, 274 | Token.Identifier.FieldName("languageToIndex"), 275 | Token.Operator.Assignment, 276 | Token.Operator.Expression.New, 277 | Token.Type("Dictionary"), 278 | Token.Punctuation.TypeParameter.Begin, 279 | Token.PrimitiveType.String, 280 | Token.Punctuation.Comma, 281 | Token.PrimitiveType.Int, 282 | Token.Punctuation.TypeParameter.End, 283 | Token.Punctuation.OpenParen, 284 | Token.Punctuation.CloseParen, 285 | Token.Punctuation.OpenBrace, 286 | Token.Punctuation.OpenBrace, 287 | Token.Punctuation.String.Begin, 288 | Token.Literal.String("Simplified Chinese"), 289 | Token.Punctuation.String.End, 290 | Token.Punctuation.Comma, 291 | Token.Literal.Numeric.Decimal("0"), 292 | Token.Punctuation.CloseBrace, 293 | Token.Punctuation.Comma, 294 | Token.Punctuation.OpenBrace, 295 | Token.Punctuation.String.Begin, 296 | Token.Literal.String("English"), 297 | Token.Punctuation.String.End, 298 | Token.Punctuation.Comma, 299 | Token.Literal.Numeric.Decimal("1"), 300 | Token.Punctuation.CloseBrace, 301 | Token.Punctuation.Comma, 302 | Token.Punctuation.OpenBrace, 303 | Token.Punctuation.String.Begin, 304 | Token.Literal.String("Japanese"), 305 | Token.Punctuation.String.End, 306 | Token.Punctuation.Comma, 307 | Token.Literal.Numeric.Decimal("2"), 308 | Token.Punctuation.CloseBrace, 309 | Token.Punctuation.Comma, 310 | Token.Punctuation.OpenBrace, 311 | Token.Punctuation.String.Begin, 312 | Token.Literal.String("Korean"), 313 | Token.Punctuation.String.End, 314 | Token.Punctuation.Comma, 315 | Token.Literal.Numeric.Decimal("3"), 316 | Token.Punctuation.CloseBrace, 317 | Token.Punctuation.CloseBrace, 318 | Token.Punctuation.Semicolon 319 | ]); 320 | }); 321 | 322 | it("initializer on multiple lines (issue omnisharp-vscode#316)", async () => { 323 | const input = Input.InClass(` 324 | private readonly string initSportMessageFormatString = "line1" 325 | + "line2";`); 326 | 327 | let tokens = await tokenize(input); 328 | 329 | tokens.should.deep.equal([ 330 | Token.Keyword.Modifier.Private, 331 | Token.Keyword.Modifier.ReadOnly, 332 | Token.PrimitiveType.String, 333 | Token.Identifier.FieldName("initSportMessageFormatString"), 334 | Token.Operator.Assignment, 335 | Token.Punctuation.String.Begin, 336 | Token.Literal.String("line1"), 337 | Token.Punctuation.String.End, 338 | Token.Operator.Arithmetic.Addition, 339 | Token.Punctuation.String.Begin, 340 | Token.Literal.String("line2"), 341 | Token.Punctuation.String.End, 342 | Token.Punctuation.Semicolon 343 | ]); 344 | }); 345 | 346 | it("initializer containing lambda (issue #31)", async () => { 347 | const input = ` 348 | class C 349 | { 350 | List f = new List 351 | { 352 | () => DoStuff() 353 | }; 354 | 355 | public C(int x, int y) { } 356 | }`; 357 | 358 | let tokens = await tokenize(input); 359 | 360 | tokens.should.deep.equal([ 361 | Token.Keyword.Definition.Class, 362 | Token.Identifier.ClassName("C"), 363 | Token.Punctuation.OpenBrace, 364 | Token.Type("List"), 365 | Token.Punctuation.TypeParameter.Begin, 366 | Token.Type("Action"), 367 | Token.Punctuation.TypeParameter.End, 368 | Token.Identifier.FieldName("f"), 369 | Token.Operator.Assignment, 370 | Token.Operator.Expression.New, 371 | Token.Type("List"), 372 | Token.Punctuation.TypeParameter.Begin, 373 | Token.Type("Action"), 374 | Token.Punctuation.TypeParameter.End, 375 | Token.Punctuation.OpenBrace, 376 | Token.Punctuation.OpenParen, 377 | Token.Punctuation.CloseParen, 378 | Token.Operator.Arrow, 379 | Token.Identifier.MethodName("DoStuff"), 380 | Token.Punctuation.OpenParen, 381 | Token.Punctuation.CloseParen, 382 | Token.Punctuation.CloseBrace, 383 | Token.Punctuation.Semicolon, 384 | Token.Keyword.Modifier.Public, 385 | Token.Identifier.MethodName("C"), 386 | Token.Punctuation.OpenParen, 387 | Token.PrimitiveType.Int, 388 | Token.Identifier.ParameterName("x"), 389 | Token.Punctuation.Comma, 390 | Token.PrimitiveType.Int, 391 | Token.Identifier.ParameterName("y"), 392 | Token.Punctuation.CloseParen, 393 | Token.Punctuation.OpenBrace, 394 | Token.Punctuation.CloseBrace, 395 | Token.Punctuation.CloseBrace 396 | ]); 397 | }); 398 | }); 399 | }); 400 | -------------------------------------------------------------------------------- /test/incomplete-code.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Input, Token } from './utils/tokenize'; 8 | 9 | describe("Incomplete code", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Incomplete code", () => { 13 | it("Don't eat the next lines if there isn't a semicolon (issue #15)", async () => { 14 | const input = Input.InClass(` 15 | private readonly string _color 16 | public ColorTest(string white) 17 | { 18 | _color = white; 19 | } 20 | `); 21 | 22 | let tokens = await tokenize(input); 23 | 24 | tokens.should.deep.equal([ 25 | Token.Keyword.Modifier.Private, 26 | Token.Keyword.Modifier.ReadOnly, 27 | Token.PrimitiveType.String, 28 | Token.Identifier.PropertyName("_color"), 29 | Token.Keyword.Modifier.Public, 30 | Token.Identifier.MethodName("ColorTest"), 31 | Token.Punctuation.OpenParen, 32 | Token.PrimitiveType.String, 33 | Token.Identifier.ParameterName("white"), 34 | Token.Punctuation.CloseParen, 35 | Token.Punctuation.OpenBrace, 36 | Token.Variable.ReadWrite("_color"), 37 | Token.Operator.Assignment, 38 | Token.Variable.ReadWrite("white"), 39 | Token.Punctuation.Semicolon, 40 | Token.Punctuation.CloseBrace 41 | ]); 42 | }); 43 | }); 44 | }); -------------------------------------------------------------------------------- /test/indexer.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Input, Token } from './utils/tokenize'; 8 | 9 | describe("Indexers", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Indexers", () => { 13 | it("declaration", async () => { 14 | 15 | const input = Input.InClass(` 16 | public string this[int index] 17 | { 18 | get { return index.ToString(); } 19 | }`); 20 | 21 | const tokens = await tokenize(input); 22 | 23 | tokens.should.deep.equal([ 24 | Token.Keyword.Modifier.Public, 25 | Token.PrimitiveType.String, 26 | Token.Variable.This, 27 | Token.Punctuation.OpenBracket, 28 | Token.PrimitiveType.Int, 29 | Token.Identifier.ParameterName("index"), 30 | Token.Punctuation.CloseBracket, 31 | Token.Punctuation.OpenBrace, 32 | Token.Keyword.Definition.Get, 33 | Token.Punctuation.OpenBrace, 34 | Token.Keyword.Flow.Return, 35 | Token.Variable.Object("index"), 36 | Token.Punctuation.Accessor, 37 | Token.Identifier.MethodName("ToString"), 38 | Token.Punctuation.OpenParen, 39 | Token.Punctuation.CloseParen, 40 | Token.Punctuation.Semicolon, 41 | Token.Punctuation.CloseBrace, 42 | Token.Punctuation.CloseBrace]); 43 | }); 44 | 45 | it("explicitly-implemented interface member", async () => { 46 | 47 | const input = Input.InClass(`string IFoo.this[int index];`); 48 | const tokens = await tokenize(input); 49 | 50 | tokens.should.deep.equal([ 51 | Token.PrimitiveType.String, 52 | Token.Type("IFoo"), 53 | Token.Punctuation.TypeParameter.Begin, 54 | Token.PrimitiveType.String, 55 | Token.Punctuation.TypeParameter.End, 56 | Token.Punctuation.Accessor, 57 | Token.Variable.This, 58 | Token.Punctuation.OpenBracket, 59 | Token.PrimitiveType.Int, 60 | Token.Identifier.ParameterName("index"), 61 | Token.Punctuation.CloseBracket, 62 | Token.Punctuation.Semicolon]); 63 | }); 64 | 65 | it("declaration in interface", async () => { 66 | 67 | const input = Input.InInterface(`string this[int index] { get; set; }`); 68 | const tokens = await tokenize(input); 69 | 70 | tokens.should.deep.equal([ 71 | Token.PrimitiveType.String, 72 | Token.Variable.This, 73 | Token.Punctuation.OpenBracket, 74 | Token.PrimitiveType.Int, 75 | Token.Identifier.ParameterName("index"), 76 | Token.Punctuation.CloseBracket, 77 | Token.Punctuation.OpenBrace, 78 | Token.Keyword.Definition.Get, 79 | Token.Punctuation.Semicolon, 80 | Token.Keyword.Definition.Set, 81 | Token.Punctuation.Semicolon, 82 | Token.Punctuation.CloseBrace]); 83 | }); 84 | 85 | it("declaration in interface (read-only)", async () => { 86 | 87 | const input = Input.InInterface(`string this[int index] { get; }`); 88 | const tokens = await tokenize(input); 89 | 90 | tokens.should.deep.equal([ 91 | Token.PrimitiveType.String, 92 | Token.Variable.This, 93 | Token.Punctuation.OpenBracket, 94 | Token.PrimitiveType.Int, 95 | Token.Identifier.ParameterName("index"), 96 | Token.Punctuation.CloseBracket, 97 | Token.Punctuation.OpenBrace, 98 | Token.Keyword.Definition.Get, 99 | Token.Punctuation.Semicolon, 100 | Token.Punctuation.CloseBrace]); 101 | }); 102 | 103 | it("declaration in interface (write-only)", async () => { 104 | 105 | const input = Input.InInterface(`string this[int index] { set; }`); 106 | const tokens = await tokenize(input); 107 | 108 | tokens.should.deep.equal([ 109 | Token.PrimitiveType.String, 110 | Token.Variable.This, 111 | Token.Punctuation.OpenBracket, 112 | Token.PrimitiveType.Int, 113 | Token.Identifier.ParameterName("index"), 114 | Token.Punctuation.CloseBracket, 115 | Token.Punctuation.OpenBrace, 116 | Token.Keyword.Definition.Set, 117 | Token.Punctuation.Semicolon, 118 | Token.Punctuation.CloseBrace]); 119 | }); 120 | 121 | it("parameters with default values (issue #30)", async () => { 122 | const input = Input.InClass(` 123 | int this[string p = null] { } 124 | `); 125 | const tokens = await tokenize(input); 126 | 127 | tokens.should.deep.equal([ 128 | Token.PrimitiveType.Int, 129 | Token.Variable.This, 130 | Token.Punctuation.OpenBracket, 131 | Token.PrimitiveType.String, 132 | Token.Identifier.ParameterName("p"), 133 | Token.Operator.Assignment, 134 | Token.Literal.Null, 135 | Token.Punctuation.CloseBracket, 136 | Token.Punctuation.OpenBrace, 137 | Token.Punctuation.CloseBrace 138 | ]); 139 | }); 140 | 141 | it("ref return", async () => { 142 | const input = Input.InInterface(`ref int this[int index] { get; }`); 143 | const tokens = await tokenize(input); 144 | 145 | tokens.should.deep.equal([ 146 | Token.Keyword.Modifier.Ref, 147 | Token.PrimitiveType.Int, 148 | Token.Variable.This, 149 | Token.Punctuation.OpenBracket, 150 | Token.PrimitiveType.Int, 151 | Token.Identifier.ParameterName("index"), 152 | Token.Punctuation.CloseBracket, 153 | Token.Punctuation.OpenBrace, 154 | Token.Keyword.Definition.Get, 155 | Token.Punctuation.Semicolon, 156 | Token.Punctuation.CloseBrace]); 157 | }); 158 | 159 | it("ref readonly return", async () => { 160 | const input = Input.InInterface(`ref readonly int this[int index] { get; }`); 161 | const tokens = await tokenize(input); 162 | 163 | tokens.should.deep.equal([ 164 | Token.Keyword.Modifier.Ref, 165 | Token.Keyword.Modifier.ReadOnly, 166 | Token.PrimitiveType.Int, 167 | Token.Variable.This, 168 | Token.Punctuation.OpenBracket, 169 | Token.PrimitiveType.Int, 170 | Token.Identifier.ParameterName("index"), 171 | Token.Punctuation.CloseBracket, 172 | Token.Punctuation.OpenBrace, 173 | Token.Keyword.Definition.Get, 174 | Token.Punctuation.Semicolon, 175 | Token.Punctuation.CloseBrace]); 176 | }); 177 | 178 | it("closing bracket of parameter list on next line", async () => { 179 | const input = Input.InClass(` 180 | string this[ 181 | int index 182 | ] 183 | { 184 | }`); 185 | const tokens = await tokenize(input); 186 | 187 | tokens.should.deep.equal([ 188 | Token.PrimitiveType.String, 189 | Token.Variable.This, 190 | Token.Punctuation.OpenBracket, 191 | 192 | Token.PrimitiveType.Int, 193 | Token.Identifier.ParameterName("index"), 194 | 195 | Token.Punctuation.CloseBracket, 196 | Token.Punctuation.OpenBrace, 197 | Token.Punctuation.CloseBrace 198 | ]); 199 | }); 200 | 201 | it("closing bracket of parameter list on next line with attribute", async () => { 202 | const input = Input.InClass(` 203 | string this[ 204 | [In] int index 205 | ] 206 | { 207 | }`); 208 | const tokens = await tokenize(input); 209 | 210 | tokens.should.deep.equal([ 211 | Token.PrimitiveType.String, 212 | Token.Variable.This, 213 | Token.Punctuation.OpenBracket, 214 | 215 | Token.Punctuation.OpenBracket, 216 | Token.Type("In"), 217 | Token.Punctuation.CloseBracket, 218 | 219 | Token.PrimitiveType.Int, 220 | Token.Identifier.ParameterName("index"), 221 | 222 | Token.Punctuation.CloseBracket, 223 | Token.Punctuation.OpenBrace, 224 | Token.Punctuation.CloseBrace 225 | ]); 226 | }); 227 | }); 228 | }); -------------------------------------------------------------------------------- /test/interface.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Token } from './utils/tokenize'; 8 | 9 | describe("Interfaces", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Interfaces", () => { 13 | it("simple interface", async () => { 14 | 15 | const input = `interface IFoo { }`; 16 | const tokens = await tokenize(input); 17 | 18 | tokens.should.deep.equal([ 19 | Token.Keyword.Definition.Interface, 20 | Token.Identifier.InterfaceName("IFoo"), 21 | Token.Punctuation.OpenBrace, 22 | Token.Punctuation.CloseBrace]); 23 | }); 24 | 25 | it("interface with no body", async () => { 26 | 27 | const input = `interface IFoo;`; 28 | const tokens = await tokenize(input); 29 | 30 | tokens.should.deep.equal([ 31 | Token.Keyword.Definition.Interface, 32 | Token.Identifier.InterfaceName("IFoo"), 33 | Token.Punctuation.Semicolon]); 34 | }); 35 | 36 | it("interface inheritance", async () => { 37 | 38 | const input = ` 39 | interface IFoo { } 40 | interface IBar : IFoo { } 41 | `; 42 | 43 | const tokens = await tokenize(input); 44 | 45 | tokens.should.deep.equal([ 46 | Token.Keyword.Definition.Interface, 47 | Token.Identifier.InterfaceName("IFoo"), 48 | Token.Punctuation.OpenBrace, 49 | Token.Punctuation.CloseBrace, 50 | Token.Keyword.Definition.Interface, 51 | Token.Identifier.InterfaceName("IBar"), 52 | Token.Punctuation.Colon, 53 | Token.Type("IFoo"), 54 | Token.Punctuation.OpenBrace, 55 | Token.Punctuation.CloseBrace]); 56 | }); 57 | 58 | it("generic interface", async () => { 59 | 60 | const input = `interface IFoo { }`; 61 | const tokens = await tokenize(input); 62 | 63 | tokens.should.deep.equal([ 64 | Token.Keyword.Definition.Interface, 65 | Token.Identifier.InterfaceName("IFoo"), 66 | Token.Punctuation.TypeParameter.Begin, 67 | Token.Identifier.TypeParameterName("T1"), 68 | Token.Punctuation.Comma, 69 | Token.Identifier.TypeParameterName("T2"), 70 | Token.Punctuation.TypeParameter.End, 71 | Token.Punctuation.OpenBrace, 72 | Token.Punctuation.CloseBrace]); 73 | }); 74 | 75 | it("generic interface with variance", async () => { 76 | 77 | const input = `interface IFoo { }`; 78 | const tokens = await tokenize(input); 79 | 80 | tokens.should.deep.equal([ 81 | Token.Keyword.Definition.Interface, 82 | Token.Identifier.InterfaceName("IFoo"), 83 | Token.Punctuation.TypeParameter.Begin, 84 | Token.Keyword.Modifier.In, 85 | Token.Identifier.TypeParameterName("T1"), 86 | Token.Punctuation.Comma, 87 | Token.Keyword.Modifier.Out, 88 | Token.Identifier.TypeParameterName("T2"), 89 | Token.Punctuation.TypeParameter.End, 90 | Token.Punctuation.OpenBrace, 91 | Token.Punctuation.CloseBrace]); 92 | }); 93 | 94 | it("generic interface with constraints", async () => { 95 | 96 | const input = `interface IFoo where T1 : T2 { }`; 97 | const tokens = await tokenize(input); 98 | 99 | tokens.should.deep.equal([ 100 | Token.Keyword.Definition.Interface, 101 | Token.Identifier.InterfaceName("IFoo"), 102 | Token.Punctuation.TypeParameter.Begin, 103 | Token.Identifier.TypeParameterName("T1"), 104 | Token.Punctuation.Comma, 105 | Token.Identifier.TypeParameterName("T2"), 106 | Token.Punctuation.TypeParameter.End, 107 | Token.Keyword.Modifier.Where, 108 | Token.Identifier.TypeParameterName("T1"), 109 | Token.Punctuation.Colon, 110 | Token.Type("T2"), 111 | Token.Punctuation.OpenBrace, 112 | Token.Punctuation.CloseBrace]); 113 | }); 114 | 115 | it("generic interface with abstract methods (issue #307)", async () => { 116 | 117 | const input = ` 118 | public interface IAdditionSubtraction where T : IAdditionSubtraction 119 | { 120 | public abstract static T operator -(T left, T right); 121 | abstract static T operator +(T left, T right); 122 | public abstract void M(); 123 | void N(); 124 | }`; 125 | const tokens = await tokenize(input); 126 | 127 | tokens.should.deep.equal([ 128 | Token.Keyword.Modifier.Public, 129 | Token.Keyword.Definition.Interface, 130 | Token.Identifier.InterfaceName("IAdditionSubtraction"), 131 | Token.Punctuation.TypeParameter.Begin, 132 | Token.Identifier.TypeParameterName("T"), 133 | Token.Punctuation.TypeParameter.End, 134 | Token.Keyword.Modifier.Where, 135 | Token.Identifier.TypeParameterName("T"), 136 | Token.Punctuation.Colon, 137 | Token.Type("IAdditionSubtraction"), 138 | Token.Punctuation.TypeParameter.Begin, 139 | Token.Type("T"), 140 | Token.Punctuation.TypeParameter.End, 141 | Token.Punctuation.OpenBrace, 142 | 143 | Token.Keyword.Modifier.Public, 144 | Token.Keyword.Modifier.Abstract, 145 | Token.Keyword.Modifier.Static, 146 | Token.Type("T"), 147 | Token.Keyword.Definition.Operator, 148 | Token.Identifier.MethodName("-"), 149 | Token.Punctuation.OpenParen, 150 | Token.Type("T"), 151 | Token.Identifier.ParameterName("left"), 152 | Token.Punctuation.Comma, 153 | Token.Type("T"), 154 | Token.Identifier.ParameterName("right"), 155 | Token.Punctuation.CloseParen, 156 | Token.Punctuation.Semicolon, 157 | 158 | Token.Keyword.Modifier.Abstract, 159 | Token.Keyword.Modifier.Static, 160 | Token.Type("T"), 161 | Token.Keyword.Definition.Operator, 162 | Token.Identifier.MethodName("+"), 163 | Token.Punctuation.OpenParen, 164 | Token.Type("T"), 165 | Token.Identifier.ParameterName("left"), 166 | Token.Punctuation.Comma, 167 | Token.Type("T"), 168 | Token.Identifier.ParameterName("right"), 169 | Token.Punctuation.CloseParen, 170 | Token.Punctuation.Semicolon, 171 | 172 | Token.Keyword.Modifier.Public, 173 | Token.Keyword.Modifier.Abstract, 174 | Token.PrimitiveType.Void, 175 | Token.Identifier.MethodName("M"), 176 | Token.Punctuation.OpenParen, 177 | Token.Punctuation.CloseParen, 178 | Token.Punctuation.Semicolon, 179 | 180 | Token.PrimitiveType.Void, 181 | Token.Identifier.MethodName("N"), 182 | Token.Punctuation.OpenParen, 183 | Token.Punctuation.CloseParen, 184 | Token.Punctuation.Semicolon, 185 | 186 | Token.Punctuation.CloseBrace]); 187 | }); 188 | }); 189 | }); 190 | -------------------------------------------------------------------------------- /test/interpolated-string.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Input, Token } from './utils/tokenize'; 8 | 9 | describe("Interpolated strings", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Interpolated strings", () => { 13 | it("two interpolations", async () => { 14 | 15 | const input = Input.InClass(`string test = $"hello {one} world {two}!";`); 16 | const tokens = await tokenize(input); 17 | 18 | tokens.should.deep.equal([ 19 | Token.PrimitiveType.String, 20 | Token.Identifier.FieldName("test"), 21 | Token.Operator.Assignment, 22 | Token.Punctuation.InterpolatedString.Begin, 23 | Token.Literal.String("hello "), 24 | Token.Punctuation.Interpolation.Begin, 25 | Token.Variable.ReadWrite("one"), 26 | Token.Punctuation.Interpolation.End, 27 | Token.Literal.String(" world "), 28 | Token.Punctuation.Interpolation.Begin, 29 | Token.Variable.ReadWrite("two"), 30 | Token.Punctuation.Interpolation.End, 31 | Token.Literal.String("!"), 32 | Token.Punctuation.InterpolatedString.End, 33 | Token.Punctuation.Semicolon]); 34 | }); 35 | 36 | it("no interpolations", async () => { 37 | 38 | const input = Input.InClass(`string test = $"hello world!";`); 39 | const tokens = await tokenize(input); 40 | 41 | tokens.should.deep.equal([ 42 | Token.PrimitiveType.String, 43 | Token.Identifier.FieldName("test"), 44 | Token.Operator.Assignment, 45 | Token.Punctuation.InterpolatedString.Begin, 46 | Token.Literal.String("hello world!"), 47 | Token.Punctuation.InterpolatedString.End, 48 | Token.Punctuation.Semicolon]); 49 | }); 50 | 51 | it("no interpolations due to escaped braces", async () => { 52 | 53 | const input = Input.InClass(`string test = $"hello {{one}} world {{two}}!";`); 54 | const tokens = await tokenize(input); 55 | 56 | tokens.should.deep.equal([ 57 | Token.PrimitiveType.String, 58 | Token.Identifier.FieldName("test"), 59 | Token.Operator.Assignment, 60 | Token.Punctuation.InterpolatedString.Begin, 61 | Token.Literal.String("hello {{one}} world {{two}}!"), 62 | Token.Punctuation.InterpolatedString.End, 63 | Token.Punctuation.Semicolon]); 64 | }); 65 | 66 | it("two interpolations with escaped braces", async () => { 67 | 68 | const input = Input.InClass(`string test = $"hello {{{one}}} world {{{two}}}!";`); 69 | const tokens = await tokenize(input); 70 | 71 | tokens.should.deep.equal([ 72 | Token.PrimitiveType.String, 73 | Token.Identifier.FieldName("test"), 74 | Token.Operator.Assignment, 75 | Token.Punctuation.InterpolatedString.Begin, 76 | Token.Literal.String("hello "), 77 | Token.Literal.String("{{"), 78 | Token.Punctuation.Interpolation.Begin, 79 | Token.Variable.ReadWrite("one"), 80 | Token.Punctuation.Interpolation.End, 81 | Token.Literal.String("}} world "), 82 | Token.Literal.String("{{"), 83 | Token.Punctuation.Interpolation.Begin, 84 | Token.Variable.ReadWrite("two"), 85 | Token.Punctuation.Interpolation.End, 86 | Token.Literal.String("}}!"), 87 | Token.Punctuation.InterpolatedString.End, 88 | Token.Punctuation.Semicolon]); 89 | }); 90 | 91 | it("no interpolations due to double-escaped braces", async () => { 92 | 93 | const input = Input.InClass(`string test = $"hello {{{{one}}}} world {{{{two}}}}!";`); 94 | const tokens = await tokenize(input); 95 | 96 | tokens.should.deep.equal([ 97 | Token.PrimitiveType.String, 98 | Token.Identifier.FieldName("test"), 99 | Token.Operator.Assignment, 100 | Token.Punctuation.InterpolatedString.Begin, 101 | Token.Literal.String("hello {{{{one}}}} world {{{{two}}}}!"), 102 | Token.Punctuation.InterpolatedString.End, 103 | Token.Punctuation.Semicolon]); 104 | }); 105 | 106 | it("break across two lines (non-verbatim)", async () => { 107 | 108 | const input = Input.InClass(` 109 | string test = $"hello 110 | world!";`); 111 | const tokens = await tokenize(input); 112 | 113 | tokens.should.deep.equal([ 114 | Token.PrimitiveType.String, 115 | Token.Identifier.FieldName("test"), 116 | Token.Operator.Assignment, 117 | Token.Punctuation.InterpolatedString.Begin, 118 | Token.Literal.String("hell"), 119 | 120 | // Note: Because the string ended prematurely, the rest of this line and the contents of the next are junk. 121 | Token.IllegalNewLine("o"), 122 | Token.Variable.ReadWrite("world"), 123 | Token.Operator.Logical.Not, 124 | Token.Punctuation.String.Begin, 125 | Token.IllegalNewLine(";")]); 126 | }); 127 | 128 | it("verbatim with two interpolations", async () => { 129 | 130 | const input = Input.InClass(`string test = $@"hello {one} world {two}!";`); 131 | const tokens = await tokenize(input); 132 | 133 | tokens.should.deep.equal([ 134 | Token.PrimitiveType.String, 135 | Token.Identifier.FieldName("test"), 136 | Token.Operator.Assignment, 137 | Token.Punctuation.InterpolatedString.VerbatimBegin, 138 | Token.Literal.String("hello "), 139 | Token.Punctuation.Interpolation.Begin, 140 | Token.Variable.ReadWrite("one"), 141 | Token.Punctuation.Interpolation.End, 142 | Token.Literal.String(" world "), 143 | Token.Punctuation.Interpolation.Begin, 144 | Token.Variable.ReadWrite("two"), 145 | Token.Punctuation.Interpolation.End, 146 | Token.Literal.String("!"), 147 | Token.Punctuation.InterpolatedString.End, 148 | Token.Punctuation.Semicolon]); 149 | }); 150 | 151 | it("verbatim with two interpolations (reverse)", async () => { 152 | 153 | const input = Input.InClass(`string test = @$"hello {one} world {two}!";`); 154 | const tokens = await tokenize(input); 155 | 156 | tokens.should.deep.equal([ 157 | Token.PrimitiveType.String, 158 | Token.Identifier.FieldName("test"), 159 | Token.Operator.Assignment, 160 | Token.Punctuation.InterpolatedString.VerbatimBeginReverse, 161 | Token.Literal.String("hello "), 162 | Token.Punctuation.Interpolation.Begin, 163 | Token.Variable.ReadWrite("one"), 164 | Token.Punctuation.Interpolation.End, 165 | Token.Literal.String(" world "), 166 | Token.Punctuation.Interpolation.Begin, 167 | Token.Variable.ReadWrite("two"), 168 | Token.Punctuation.Interpolation.End, 169 | Token.Literal.String("!"), 170 | Token.Punctuation.InterpolatedString.End, 171 | Token.Punctuation.Semicolon]); 172 | }); 173 | 174 | it("verbatim with two interpolations and escaped double-quotes", async () => { 175 | 176 | const input = Input.InClass(`string test = $@"hello {one} ""world"" {two}!";`); 177 | const tokens = await tokenize(input); 178 | 179 | tokens.should.deep.equal([ 180 | Token.PrimitiveType.String, 181 | Token.Identifier.FieldName("test"), 182 | Token.Operator.Assignment, 183 | Token.Punctuation.InterpolatedString.VerbatimBegin, 184 | Token.Literal.String("hello "), 185 | Token.Punctuation.Interpolation.Begin, 186 | Token.Variable.ReadWrite("one"), 187 | Token.Punctuation.Interpolation.End, 188 | Token.Literal.String(" "), 189 | Token.Literal.CharacterEscape("\"\""), 190 | Token.Literal.String("world"), 191 | Token.Literal.CharacterEscape("\"\""), 192 | Token.Literal.String(" "), 193 | Token.Punctuation.Interpolation.Begin, 194 | Token.Variable.ReadWrite("two"), 195 | Token.Punctuation.Interpolation.End, 196 | Token.Literal.String("!"), 197 | Token.Punctuation.InterpolatedString.End, 198 | Token.Punctuation.Semicolon]); 199 | }); 200 | 201 | it("verbatim with two interpolations and escaped double-quotes (reverse)", async () => { 202 | 203 | const input = Input.InClass(`string test = @$"hello {one} ""world"" {two}!";`); 204 | const tokens = await tokenize(input); 205 | 206 | tokens.should.deep.equal([ 207 | Token.PrimitiveType.String, 208 | Token.Identifier.FieldName("test"), 209 | Token.Operator.Assignment, 210 | Token.Punctuation.InterpolatedString.VerbatimBeginReverse, 211 | Token.Literal.String("hello "), 212 | Token.Punctuation.Interpolation.Begin, 213 | Token.Variable.ReadWrite("one"), 214 | Token.Punctuation.Interpolation.End, 215 | Token.Literal.String(" "), 216 | Token.Literal.CharacterEscape("\"\""), 217 | Token.Literal.String("world"), 218 | Token.Literal.CharacterEscape("\"\""), 219 | Token.Literal.String(" "), 220 | Token.Punctuation.Interpolation.Begin, 221 | Token.Variable.ReadWrite("two"), 222 | Token.Punctuation.Interpolation.End, 223 | Token.Literal.String("!"), 224 | Token.Punctuation.InterpolatedString.End, 225 | Token.Punctuation.Semicolon]); 226 | }); 227 | 228 | it("break across two lines with two interpolations (verbatim)", async () => { 229 | 230 | const input = Input.InClass(` 231 | string test = $@"hello {one} 232 | world {two}!";`); 233 | const tokens = await tokenize(input); 234 | 235 | tokens.should.deep.equal([ 236 | Token.PrimitiveType.String, 237 | Token.Identifier.FieldName("test"), 238 | Token.Operator.Assignment, 239 | Token.Punctuation.InterpolatedString.VerbatimBegin, 240 | Token.Literal.String("hello "), 241 | Token.Punctuation.Interpolation.Begin, 242 | Token.Variable.ReadWrite("one"), 243 | Token.Punctuation.Interpolation.End, 244 | Token.Literal.String("world "), 245 | Token.Punctuation.Interpolation.Begin, 246 | Token.Variable.ReadWrite("two"), 247 | Token.Punctuation.Interpolation.End, 248 | Token.Literal.String("!"), 249 | Token.Punctuation.InterpolatedString.End, 250 | Token.Punctuation.Semicolon]); 251 | }); 252 | 253 | it("break across two lines with two interpolations (verbatim / reverse)", async () => { 254 | 255 | const input = Input.InClass(` 256 | string test = @$"hello {one} 257 | world {two}!";`); 258 | const tokens = await tokenize(input); 259 | 260 | tokens.should.deep.equal([ 261 | Token.PrimitiveType.String, 262 | Token.Identifier.FieldName("test"), 263 | Token.Operator.Assignment, 264 | Token.Punctuation.InterpolatedString.VerbatimBeginReverse, 265 | Token.Literal.String("hello "), 266 | Token.Punctuation.Interpolation.Begin, 267 | Token.Variable.ReadWrite("one"), 268 | Token.Punctuation.Interpolation.End, 269 | Token.Literal.String("world "), 270 | Token.Punctuation.Interpolation.Begin, 271 | Token.Variable.ReadWrite("two"), 272 | Token.Punctuation.Interpolation.End, 273 | Token.Literal.String("!"), 274 | Token.Punctuation.InterpolatedString.End, 275 | Token.Punctuation.Semicolon]); 276 | }); 277 | 278 | it("break across two lines and start with a new line with an interpolation (verbatim)", async () => { 279 | 280 | const input = Input.InClass(` 281 | string test = $@" 282 | I am a multiline string with a 283 | {parameter} that starts after a newline! 284 | ";`); 285 | const tokens = await tokenize(input); 286 | 287 | tokens.should.deep.equal([ 288 | Token.PrimitiveType.String, 289 | Token.Identifier.FieldName("test"), 290 | Token.Operator.Assignment, 291 | Token.Punctuation.InterpolatedString.VerbatimBegin, 292 | Token.Literal.String("I am a multiline string with a"), 293 | Token.Punctuation.Interpolation.Begin, 294 | Token.Variable.ReadWrite("parameter"), 295 | Token.Punctuation.Interpolation.End, 296 | Token.Literal.String(" that starts after a newline!"), 297 | Token.Punctuation.InterpolatedString.End, 298 | Token.Punctuation.Semicolon]); 299 | }); 300 | 301 | it("break across two lines with no interpolations (verbatim)", async () => { 302 | 303 | const input = Input.InClass(` 304 | string test = $@"hello 305 | world!";`); 306 | const tokens = await tokenize(input); 307 | 308 | tokens.should.deep.equal([ 309 | Token.PrimitiveType.String, 310 | Token.Identifier.FieldName("test"), 311 | Token.Operator.Assignment, 312 | Token.Punctuation.InterpolatedString.VerbatimBegin, 313 | Token.Literal.String("hello"), 314 | Token.Literal.String("world!"), 315 | Token.Punctuation.InterpolatedString.End, 316 | Token.Punctuation.Semicolon]); 317 | }); 318 | 319 | it("break across two lines with no interpolations (verbatim / reverse)", async () => { 320 | 321 | const input = Input.InClass(` 322 | string test = @$"hello 323 | world!";`); 324 | const tokens = await tokenize(input); 325 | 326 | tokens.should.deep.equal([ 327 | Token.PrimitiveType.String, 328 | Token.Identifier.FieldName("test"), 329 | Token.Operator.Assignment, 330 | Token.Punctuation.InterpolatedString.VerbatimBeginReverse, 331 | Token.Literal.String("hello"), 332 | Token.Literal.String("world!"), 333 | Token.Punctuation.InterpolatedString.End, 334 | Token.Punctuation.Semicolon]); 335 | }); 336 | }); 337 | }); -------------------------------------------------------------------------------- /test/label.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Input, Token } from './utils/tokenize'; 8 | 9 | describe("Labels", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Labels", () => { 13 | it("declaration", async () => { 14 | const input = Input.InMethod(`Foo:`); 15 | const tokens = await tokenize(input); 16 | 17 | tokens.should.deep.equal([ 18 | Token.Identifier.LabelName("Foo"), 19 | Token.Punctuation.Colon 20 | ]); 21 | }); 22 | }); 23 | }); -------------------------------------------------------------------------------- /test/local.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Input, Token } from './utils/tokenize'; 8 | 9 | describe("Locals", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Local variables", () => { 13 | it("declaration", async () => { 14 | const input = Input.InMethod(`int x;`); 15 | const tokens = await tokenize(input); 16 | 17 | tokens.should.deep.equal([ 18 | Token.PrimitiveType.Int, 19 | Token.Identifier.LocalName("x"), 20 | Token.Punctuation.Semicolon 21 | ]); 22 | }); 23 | 24 | it("declaration with initializer", async () => { 25 | const input = Input.InMethod(`int x = 42;`); 26 | const tokens = await tokenize(input); 27 | 28 | tokens.should.deep.equal([ 29 | Token.PrimitiveType.Int, 30 | Token.Identifier.LocalName("x"), 31 | Token.Operator.Assignment, 32 | Token.Literal.Numeric.Decimal("42"), 33 | Token.Punctuation.Semicolon 34 | ]); 35 | }); 36 | 37 | it("multiple declarators", async () => { 38 | const input = Input.InMethod(`nint x, y;`); 39 | const tokens = await tokenize(input); 40 | 41 | tokens.should.deep.equal([ 42 | Token.PrimitiveType.Nint, 43 | Token.Identifier.LocalName("x"), 44 | Token.Punctuation.Comma, 45 | Token.Identifier.LocalName("y"), 46 | Token.Punctuation.Semicolon 47 | ]); 48 | }); 49 | 50 | it("multiple declarators with initializers", async () => { 51 | const input = Input.InMethod(`int x = 19, y = 23;`); 52 | const tokens = await tokenize(input); 53 | 54 | tokens.should.deep.equal([ 55 | Token.PrimitiveType.Int, 56 | Token.Identifier.LocalName("x"), 57 | Token.Operator.Assignment, 58 | Token.Literal.Numeric.Decimal("19"), 59 | Token.Punctuation.Comma, 60 | Token.Identifier.LocalName("y"), 61 | Token.Operator.Assignment, 62 | Token.Literal.Numeric.Decimal("23"), 63 | Token.Punctuation.Semicolon 64 | ]); 65 | }); 66 | 67 | it("const declaration", async () => { 68 | const input = Input.InMethod(`const int x = 42;`); 69 | const tokens = await tokenize(input); 70 | 71 | tokens.should.deep.equal([ 72 | Token.Keyword.Modifier.Const, 73 | Token.PrimitiveType.Int, 74 | Token.Identifier.LocalName("x"), 75 | Token.Operator.Assignment, 76 | Token.Literal.Numeric.Decimal("42"), 77 | Token.Punctuation.Semicolon 78 | ]); 79 | }); 80 | 81 | it("const with multiple declarators", async () => { 82 | const input = Input.InMethod(`const int x = 19, y = 23;`); 83 | const tokens = await tokenize(input); 84 | 85 | tokens.should.deep.equal([ 86 | Token.Keyword.Modifier.Const, 87 | Token.PrimitiveType.Int, 88 | Token.Identifier.LocalName("x"), 89 | Token.Operator.Assignment, 90 | Token.Literal.Numeric.Decimal("19"), 91 | Token.Punctuation.Comma, 92 | Token.Identifier.LocalName("y"), 93 | Token.Operator.Assignment, 94 | Token.Literal.Numeric.Decimal("23"), 95 | Token.Punctuation.Semicolon 96 | ]); 97 | }); 98 | 99 | it("ref local", async () => { 100 | const input = Input.InMethod(`ref int x;`); 101 | const tokens = await tokenize(input); 102 | 103 | tokens.should.deep.equal([ 104 | Token.Keyword.Modifier.Ref, 105 | Token.PrimitiveType.Int, 106 | Token.Identifier.LocalName("x"), 107 | Token.Punctuation.Semicolon 108 | ]); 109 | }); 110 | 111 | it("ref readonly local", async () => { 112 | const input = Input.InMethod(`ref readonly int x;`); 113 | const tokens = await tokenize(input); 114 | 115 | tokens.should.deep.equal([ 116 | Token.Keyword.Modifier.Ref, 117 | Token.Keyword.Modifier.ReadOnly, 118 | Token.PrimitiveType.Int, 119 | Token.Identifier.LocalName("x"), 120 | Token.Punctuation.Semicolon 121 | ]); 122 | }); 123 | 124 | it("ref local with initializer", async () => { 125 | const input = Input.InMethod(`ref int x = ref y;`); 126 | const tokens = await tokenize(input); 127 | 128 | tokens.should.deep.equal([ 129 | Token.Keyword.Modifier.Ref, 130 | Token.PrimitiveType.Int, 131 | Token.Identifier.LocalName("x"), 132 | Token.Operator.Assignment, 133 | Token.Keyword.Modifier.Ref, 134 | Token.Variable.ReadWrite("y"), 135 | Token.Punctuation.Semicolon 136 | ]); 137 | }); 138 | 139 | it("ref readonly local with initializer", async () => { 140 | const input = Input.InMethod(`ref readonly int x = ref y;`); 141 | const tokens = await tokenize(input); 142 | 143 | tokens.should.deep.equal([ 144 | Token.Keyword.Modifier.Ref, 145 | Token.Keyword.Modifier.ReadOnly, 146 | Token.PrimitiveType.Int, 147 | Token.Identifier.LocalName("x"), 148 | Token.Operator.Assignment, 149 | Token.Keyword.Modifier.Ref, 150 | Token.Variable.ReadWrite("y"), 151 | Token.Punctuation.Semicolon 152 | ]); 153 | }); 154 | 155 | it("ref readonly local var with initializer", async () => { 156 | const input = Input.InMethod(`ref readonly var x = ref y;`); 157 | const tokens = await tokenize(input); 158 | 159 | tokens.should.deep.equal([ 160 | Token.Keyword.Modifier.Ref, 161 | Token.Keyword.Modifier.ReadOnly, 162 | Token.Keyword.Definition.Var, 163 | Token.Identifier.LocalName("x"), 164 | Token.Operator.Assignment, 165 | Token.Keyword.Modifier.Ref, 166 | Token.Variable.ReadWrite("y"), 167 | Token.Punctuation.Semicolon 168 | ]); 169 | }); 170 | }); 171 | 172 | describe("Local functions", () => { 173 | it("local function declaration with arrow body", async () => { 174 | const input = Input.InMethod(`nuint Add(nuint x, uint y) => x + y;`); 175 | const tokens = await tokenize(input); 176 | 177 | tokens.should.deep.equal([ 178 | Token.PrimitiveType.Nuint, 179 | Token.Identifier.MethodName("Add"), 180 | Token.Punctuation.OpenParen, 181 | Token.PrimitiveType.Nuint, 182 | Token.Identifier.ParameterName("x"), 183 | Token.Punctuation.Comma, 184 | Token.PrimitiveType.UInt, 185 | Token.Identifier.ParameterName("y"), 186 | Token.Punctuation.CloseParen, 187 | Token.Operator.Arrow, 188 | Token.Variable.ReadWrite("x"), 189 | Token.Operator.Arithmetic.Addition, 190 | Token.Variable.ReadWrite("y"), 191 | Token.Punctuation.Semicolon 192 | ]); 193 | }); 194 | 195 | it("local function declaration with block definition", async () => { 196 | const input = Input.InMethod(` 197 | int Add(int x, int y) 198 | { 199 | return x + y; 200 | }`); 201 | const tokens = await tokenize(input); 202 | 203 | tokens.should.deep.equal([ 204 | Token.PrimitiveType.Int, 205 | Token.Identifier.MethodName("Add"), 206 | Token.Punctuation.OpenParen, 207 | Token.PrimitiveType.Int, 208 | Token.Identifier.ParameterName("x"), 209 | Token.Punctuation.Comma, 210 | Token.PrimitiveType.Int, 211 | Token.Identifier.ParameterName("y"), 212 | Token.Punctuation.CloseParen, 213 | Token.Punctuation.OpenBrace, 214 | Token.Keyword.Flow.Return, 215 | Token.Variable.ReadWrite("x"), 216 | Token.Operator.Arithmetic.Addition, 217 | Token.Variable.ReadWrite("y"), 218 | Token.Punctuation.Semicolon, 219 | Token.Punctuation.CloseBrace 220 | ]); 221 | }); 222 | 223 | it("local function declaration with async modifier", async () => { 224 | const input = Input.InMethod(`async void Foo() { }`); 225 | const tokens = await tokenize(input); 226 | 227 | tokens.should.deep.equal([ 228 | Token.Keyword.Modifier.Async, 229 | Token.PrimitiveType.Void, 230 | Token.Identifier.MethodName("Foo"), 231 | Token.Punctuation.OpenParen, 232 | Token.Punctuation.CloseParen, 233 | Token.Punctuation.OpenBrace, 234 | Token.Punctuation.CloseBrace 235 | ]); 236 | }); 237 | 238 | it("local function declaration with unsafe modifier", async () => { 239 | const input = Input.InMethod(`unsafe void Foo() { }`); 240 | const tokens = await tokenize(input); 241 | 242 | tokens.should.deep.equal([ 243 | Token.Keyword.Modifier.Unsafe, 244 | Token.PrimitiveType.Void, 245 | Token.Identifier.MethodName("Foo"), 246 | Token.Punctuation.OpenParen, 247 | Token.Punctuation.CloseParen, 248 | Token.Punctuation.OpenBrace, 249 | Token.Punctuation.CloseBrace 250 | ]); 251 | }); 252 | 253 | it("local function declaration with static modifier", async () => { 254 | const input = Input.InMethod(`static void Foo() { }`); 255 | const tokens = await tokenize(input); 256 | 257 | tokens.should.deep.equal([ 258 | Token.Keyword.Modifier.Static, 259 | Token.PrimitiveType.Void, 260 | Token.Identifier.MethodName("Foo"), 261 | Token.Punctuation.OpenParen, 262 | Token.Punctuation.CloseParen, 263 | Token.Punctuation.OpenBrace, 264 | Token.Punctuation.CloseBrace 265 | ]); 266 | }); 267 | 268 | it("local function declaration with extern modifier", async () => { 269 | const input = Input.InMethod(`extern static void Foo() { }`); 270 | const tokens = await tokenize(input); 271 | 272 | tokens.should.deep.equal([ 273 | Token.Keyword.Modifier.Extern, 274 | Token.Keyword.Modifier.Static, 275 | Token.PrimitiveType.Void, 276 | Token.Identifier.MethodName("Foo"), 277 | Token.Punctuation.OpenParen, 278 | Token.Punctuation.CloseParen, 279 | Token.Punctuation.OpenBrace, 280 | Token.Punctuation.CloseBrace 281 | ]); 282 | }); 283 | }); 284 | }); -------------------------------------------------------------------------------- /test/namespace.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Token } from './utils/tokenize'; 8 | 9 | describe("Namespace", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Block-Scoped Namespace", () => { 13 | it("has a namespace keyword and a name", async () => { 14 | 15 | const input = ` 16 | namespace TestNamespace 17 | { 18 | }`; 19 | let tokens = await tokenize(input); 20 | 21 | tokens.should.deep.equal([ 22 | Token.Keyword.Definition.Namespace, 23 | Token.Identifier.NamespaceName("TestNamespace"), 24 | Token.Punctuation.OpenBrace, 25 | Token.Punctuation.CloseBrace]); 26 | }); 27 | 28 | it("has a namespace keyword and a dotted name", async () => { 29 | 30 | const input = ` 31 | namespace Test.Namespace 32 | { 33 | }`; 34 | let tokens = await tokenize(input); 35 | 36 | tokens.should.deep.equal([ 37 | Token.Keyword.Definition.Namespace, 38 | Token.Identifier.NamespaceName("Test"), 39 | Token.Punctuation.Accessor, 40 | Token.Identifier.NamespaceName("Namespace"), 41 | Token.Punctuation.OpenBrace, 42 | Token.Punctuation.CloseBrace]); 43 | }); 44 | 45 | it("can be nested", async () => { 46 | 47 | const input = ` 48 | namespace TestNamespace 49 | { 50 | namespace NestedNamespace { 51 | 52 | } 53 | }`; 54 | let tokens = await tokenize(input); 55 | 56 | tokens.should.deep.equal([ 57 | Token.Keyword.Definition.Namespace, 58 | Token.Identifier.NamespaceName("TestNamespace"), 59 | Token.Punctuation.OpenBrace, 60 | 61 | Token.Keyword.Definition.Namespace, 62 | Token.Identifier.NamespaceName("NestedNamespace"), 63 | Token.Punctuation.OpenBrace, 64 | 65 | Token.Punctuation.CloseBrace, 66 | Token.Punctuation.CloseBrace]); 67 | }); 68 | 69 | it("can contain using statements", async () => { 70 | 71 | const input = ` 72 | using UsingOne; 73 | using one = UsingOne.Something; 74 | 75 | namespace TestNamespace 76 | { 77 | using UsingTwo; 78 | using two = UsingTwo.Something; 79 | 80 | namespace NestedNamespace 81 | { 82 | using UsingThree; 83 | using three = UsingThree.Something; 84 | } 85 | }`; 86 | let tokens = await tokenize(input); 87 | 88 | tokens.should.deep.equal([ 89 | Token.Keyword.Directive.Using, 90 | Token.Identifier.NamespaceName("UsingOne"), 91 | Token.Punctuation.Semicolon, 92 | 93 | Token.Keyword.Directive.Using, 94 | Token.Identifier.AliasName("one"), 95 | Token.Operator.Assignment, 96 | Token.Type("UsingOne"), 97 | Token.Punctuation.Accessor, 98 | Token.Type("Something"), 99 | Token.Punctuation.Semicolon, 100 | 101 | Token.Keyword.Definition.Namespace, 102 | Token.Identifier.NamespaceName("TestNamespace"), 103 | Token.Punctuation.OpenBrace, 104 | 105 | Token.Keyword.Directive.Using, 106 | Token.Identifier.NamespaceName("UsingTwo"), 107 | Token.Punctuation.Semicolon, 108 | 109 | Token.Keyword.Directive.Using, 110 | Token.Identifier.AliasName("two"), 111 | Token.Operator.Assignment, 112 | Token.Type("UsingTwo"), 113 | Token.Punctuation.Accessor, 114 | Token.Type("Something"), 115 | Token.Punctuation.Semicolon, 116 | 117 | Token.Keyword.Definition.Namespace, 118 | Token.Identifier.NamespaceName("NestedNamespace"), 119 | Token.Punctuation.OpenBrace, 120 | 121 | Token.Keyword.Directive.Using, 122 | Token.Identifier.NamespaceName("UsingThree"), 123 | Token.Punctuation.Semicolon, 124 | 125 | Token.Keyword.Directive.Using, 126 | Token.Identifier.AliasName("three"), 127 | Token.Operator.Assignment, 128 | Token.Type("UsingThree"), 129 | Token.Punctuation.Accessor, 130 | Token.Type("Something"), 131 | Token.Punctuation.Semicolon, 132 | 133 | Token.Punctuation.CloseBrace, 134 | Token.Punctuation.CloseBrace]); 135 | }); 136 | }); 137 | 138 | describe("File-Scoped Namespace", () => { 139 | it("has a namespace keyword and a name", async () => { 140 | 141 | const input = ` 142 | namespace TestNamespace;`; 143 | let tokens = await tokenize(input); 144 | 145 | tokens.should.deep.equal([ 146 | Token.Keyword.Definition.Namespace, 147 | Token.Identifier.NamespaceName("TestNamespace"), 148 | Token.Punctuation.Semicolon]); 149 | }); 150 | 151 | it("has a namespace keyword and a dotted name", async () => { 152 | 153 | const input = ` 154 | namespace Test.Namespace;`; 155 | let tokens = await tokenize(input); 156 | 157 | tokens.should.deep.equal([ 158 | Token.Keyword.Definition.Namespace, 159 | Token.Identifier.NamespaceName("Test"), 160 | Token.Punctuation.Accessor, 161 | Token.Identifier.NamespaceName("Namespace"), 162 | Token.Punctuation.Semicolon]); 163 | }); 164 | }); 165 | }); -------------------------------------------------------------------------------- /test/record.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Input, Token, NamespaceStyle } from './utils/tokenize'; 8 | 9 | describe("Record", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Record", () => { 13 | for (const namespaceStyle of [NamespaceStyle.BlockScoped, NamespaceStyle.FileScoped]) { 14 | const styleName = namespaceStyle == NamespaceStyle.BlockScoped 15 | ? "Block-Scoped" 16 | : "File-Scoped"; 17 | 18 | 19 | it(`record keyword and storage modifiers (${styleName} Namespace)`, async () => { 20 | 21 | const input = Input.InNamespace(` 22 | public record PublicRecord { } 23 | 24 | record DefaultRecord { } 25 | 26 | internal record InternalRecord { } 27 | 28 | static record DefaultStaticRecord { } 29 | 30 | public static record PublicStaticRecord { } 31 | 32 | sealed record DefaultSealedRecord { } 33 | 34 | public sealed record PublicSealedRecord { } 35 | 36 | public abstract record PublicAbstractRecord { } 37 | 38 | abstract record DefaultAbstractRecord { }`, namespaceStyle); 39 | 40 | const tokens = await tokenize(input); 41 | 42 | tokens.should.deep.equal([ 43 | Token.Keyword.Modifier.Public, 44 | Token.Keyword.Definition.Record, 45 | Token.Identifier.ClassName("PublicRecord"), 46 | Token.Punctuation.OpenBrace, 47 | Token.Punctuation.CloseBrace, 48 | 49 | Token.Keyword.Definition.Record, 50 | Token.Identifier.ClassName("DefaultRecord"), 51 | Token.Punctuation.OpenBrace, 52 | Token.Punctuation.CloseBrace, 53 | 54 | Token.Keyword.Modifier.Internal, 55 | Token.Keyword.Definition.Record, 56 | Token.Identifier.ClassName("InternalRecord"), 57 | Token.Punctuation.OpenBrace, 58 | Token.Punctuation.CloseBrace, 59 | 60 | Token.Keyword.Modifier.Static, 61 | Token.Keyword.Definition.Record, 62 | Token.Identifier.ClassName("DefaultStaticRecord"), 63 | Token.Punctuation.OpenBrace, 64 | Token.Punctuation.CloseBrace, 65 | 66 | Token.Keyword.Modifier.Public, 67 | Token.Keyword.Modifier.Static, 68 | Token.Keyword.Definition.Record, 69 | Token.Identifier.ClassName("PublicStaticRecord"), 70 | Token.Punctuation.OpenBrace, 71 | Token.Punctuation.CloseBrace, 72 | 73 | Token.Keyword.Modifier.Sealed, 74 | Token.Keyword.Definition.Record, 75 | Token.Identifier.ClassName("DefaultSealedRecord"), 76 | Token.Punctuation.OpenBrace, 77 | Token.Punctuation.CloseBrace, 78 | 79 | Token.Keyword.Modifier.Public, 80 | Token.Keyword.Modifier.Sealed, 81 | Token.Keyword.Definition.Record, 82 | Token.Identifier.ClassName("PublicSealedRecord"), 83 | Token.Punctuation.OpenBrace, 84 | Token.Punctuation.CloseBrace, 85 | 86 | Token.Keyword.Modifier.Public, 87 | Token.Keyword.Modifier.Abstract, 88 | Token.Keyword.Definition.Record, 89 | Token.Identifier.ClassName("PublicAbstractRecord"), 90 | Token.Punctuation.OpenBrace, 91 | Token.Punctuation.CloseBrace, 92 | 93 | Token.Keyword.Modifier.Abstract, 94 | Token.Keyword.Definition.Record, 95 | Token.Identifier.ClassName("DefaultAbstractRecord"), 96 | Token.Punctuation.OpenBrace, 97 | Token.Punctuation.CloseBrace]); 98 | }); 99 | 100 | it(`generics in identifier (${styleName} Namespace)`, async () => { 101 | 102 | const input = Input.InNamespace(`record Dictionary { }`, namespaceStyle); 103 | const tokens = await tokenize(input); 104 | 105 | tokens.should.deep.equal([ 106 | Token.Keyword.Definition.Record, 107 | Token.Identifier.ClassName("Dictionary"), 108 | Token.Punctuation.TypeParameter.Begin, 109 | Token.Identifier.TypeParameterName("TKey"), 110 | Token.Punctuation.Comma, 111 | Token.Identifier.TypeParameterName("TValue"), 112 | Token.Punctuation.TypeParameter.End, 113 | Token.Punctuation.OpenBrace, 114 | Token.Punctuation.CloseBrace]); 115 | }); 116 | 117 | it(`inheritance (${styleName} Namespace)`, async () => { 118 | 119 | const input = Input.InNamespace(` 120 | record PublicRecord : IInterface, IInterfaceTwo { } 121 | record PublicRecord : Root.IInterface, Something.IInterfaceTwo { } 122 | record PublicRecord : Dictionary>, IMap> { }`, namespaceStyle); 123 | 124 | const tokens = await tokenize(input); 125 | 126 | tokens.should.deep.equal([ 127 | Token.Keyword.Definition.Record, 128 | Token.Identifier.ClassName("PublicRecord"), 129 | Token.Punctuation.Colon, 130 | Token.Type("IInterface"), 131 | Token.Punctuation.Comma, 132 | Token.Type("IInterfaceTwo"), 133 | Token.Punctuation.OpenBrace, 134 | Token.Punctuation.CloseBrace, 135 | 136 | Token.Keyword.Definition.Record, 137 | Token.Identifier.ClassName("PublicRecord"), 138 | Token.Punctuation.TypeParameter.Begin, 139 | Token.Identifier.TypeParameterName("T"), 140 | Token.Punctuation.TypeParameter.End, 141 | Token.Punctuation.Colon, 142 | Token.Type("Root"), 143 | Token.Punctuation.Accessor, 144 | Token.Type("IInterface"), 145 | Token.Punctuation.TypeParameter.Begin, 146 | Token.Type("Something"), 147 | Token.Punctuation.Accessor, 148 | Token.Type("Nested"), 149 | Token.Punctuation.TypeParameter.End, 150 | Token.Punctuation.Comma, 151 | Token.Type("Something"), 152 | Token.Punctuation.Accessor, 153 | Token.Type("IInterfaceTwo"), 154 | Token.Punctuation.OpenBrace, 155 | Token.Punctuation.CloseBrace, 156 | 157 | Token.Keyword.Definition.Record, 158 | Token.Identifier.ClassName("PublicRecord"), 159 | Token.Punctuation.TypeParameter.Begin, 160 | Token.Identifier.TypeParameterName("T"), 161 | Token.Punctuation.TypeParameter.End, 162 | Token.Punctuation.Colon, 163 | Token.Type("Dictionary"), 164 | Token.Punctuation.TypeParameter.Begin, 165 | Token.Type("T"), 166 | Token.Punctuation.Comma, 167 | Token.Type("Dictionary"), 168 | Token.Punctuation.TypeParameter.Begin, 169 | Token.PrimitiveType.String, 170 | Token.Punctuation.Comma, 171 | Token.PrimitiveType.String, 172 | Token.Punctuation.TypeParameter.End, 173 | Token.Punctuation.TypeParameter.End, 174 | Token.Punctuation.Comma, 175 | Token.Type("IMap"), 176 | Token.Punctuation.TypeParameter.Begin, 177 | Token.Type("T"), 178 | Token.Punctuation.Comma, 179 | Token.Type("Dictionary"), 180 | Token.Punctuation.TypeParameter.Begin, 181 | Token.PrimitiveType.String, 182 | Token.Punctuation.Comma, 183 | Token.PrimitiveType.String, 184 | Token.Punctuation.TypeParameter.End, 185 | Token.Punctuation.TypeParameter.End, 186 | Token.Punctuation.OpenBrace, 187 | Token.Punctuation.CloseBrace]); 188 | }); 189 | 190 | it(`generic constraints (${styleName} Namespace)`, async () => { 191 | 192 | const input = Input.InNamespace(` 193 | record PublicRecord where T : ISomething { } 194 | record PublicRecord : Dictionary[]>, ISomething 195 | where T : ICar, new() 196 | where X : struct 197 | { 198 | }`, namespaceStyle); 199 | const tokens = await tokenize(input); 200 | 201 | tokens.should.deep.equal([ 202 | Token.Keyword.Definition.Record, 203 | Token.Identifier.ClassName("PublicRecord"), 204 | Token.Punctuation.TypeParameter.Begin, 205 | Token.Identifier.TypeParameterName("T"), 206 | Token.Punctuation.TypeParameter.End, 207 | Token.Keyword.Modifier.Where, 208 | Token.Identifier.TypeParameterName("T"), 209 | Token.Punctuation.Colon, 210 | Token.Type("ISomething"), 211 | Token.Punctuation.OpenBrace, 212 | Token.Punctuation.CloseBrace, 213 | 214 | Token.Keyword.Definition.Record, 215 | Token.Identifier.ClassName("PublicRecord"), 216 | Token.Punctuation.TypeParameter.Begin, 217 | Token.Identifier.TypeParameterName("T"), 218 | Token.Punctuation.Comma, 219 | Token.Identifier.TypeParameterName("X"), 220 | Token.Punctuation.TypeParameter.End, 221 | Token.Punctuation.Colon, 222 | Token.Type("Dictionary"), 223 | Token.Punctuation.TypeParameter.Begin, 224 | Token.Type("T"), 225 | Token.Punctuation.Comma, 226 | Token.Type("List"), 227 | Token.Punctuation.TypeParameter.Begin, 228 | Token.PrimitiveType.String, 229 | Token.Punctuation.TypeParameter.End, 230 | Token.Punctuation.OpenBracket, 231 | Token.Punctuation.CloseBracket, 232 | Token.Punctuation.TypeParameter.End, 233 | Token.Punctuation.Comma, 234 | Token.Type("ISomething"), 235 | Token.Keyword.Modifier.Where, 236 | Token.Identifier.TypeParameterName("T"), 237 | Token.Punctuation.Colon, 238 | Token.Type("ICar"), 239 | Token.Punctuation.Comma, 240 | Token.Operator.Expression.New, 241 | Token.Punctuation.OpenParen, 242 | Token.Punctuation.CloseParen, 243 | Token.Keyword.Modifier.Where, 244 | Token.Identifier.TypeParameterName("X"), 245 | Token.Punctuation.Colon, 246 | Token.Keyword.Definition.Struct, 247 | Token.Punctuation.OpenBrace, 248 | Token.Punctuation.CloseBrace]); 249 | }); 250 | 251 | it(`nested record (${styleName} Namespace)`, async () => { 252 | 253 | const input = Input.InNamespace(` 254 | record Klass 255 | { 256 | record Nested 257 | { 258 | 259 | } 260 | }`, namespaceStyle); 261 | const tokens = await tokenize(input); 262 | 263 | tokens.should.deep.equal([ 264 | Token.Keyword.Definition.Record, 265 | Token.Identifier.ClassName("Klass"), 266 | Token.Punctuation.OpenBrace, 267 | 268 | Token.Keyword.Definition.Record, 269 | Token.Identifier.ClassName("Nested"), 270 | Token.Punctuation.OpenBrace, 271 | Token.Punctuation.CloseBrace, 272 | 273 | Token.Punctuation.CloseBrace]); 274 | }); 275 | 276 | it(`nested record with modifier (${styleName} Namespace)`, async () => { 277 | 278 | const input = Input.InNamespace(` 279 | record Klass 280 | { 281 | public record Nested 282 | { 283 | 284 | } 285 | }`, namespaceStyle); 286 | const tokens = await tokenize(input); 287 | 288 | tokens.should.deep.equal([ 289 | Token.Keyword.Definition.Record, 290 | Token.Identifier.ClassName("Klass"), 291 | Token.Punctuation.OpenBrace, 292 | 293 | Token.Keyword.Modifier.Public, 294 | Token.Keyword.Definition.Record, 295 | Token.Identifier.ClassName("Nested"), 296 | Token.Punctuation.OpenBrace, 297 | Token.Punctuation.CloseBrace, 298 | 299 | Token.Punctuation.CloseBrace]); 300 | }); 301 | 302 | it(`unsafe record (${styleName} Namespace)`, async () => { 303 | const input = Input.InNamespace(` 304 | unsafe record C 305 | { 306 | }`, namespaceStyle); 307 | const tokens = await tokenize(input); 308 | 309 | tokens.should.deep.equal([ 310 | Token.Keyword.Modifier.Unsafe, 311 | Token.Keyword.Definition.Record, 312 | Token.Identifier.ClassName("C"), 313 | Token.Punctuation.OpenBrace, 314 | Token.Punctuation.CloseBrace]); 315 | }); 316 | 317 | it(`primary constructor record (${styleName} Namespace)`, async () => { 318 | 319 | const input = Input.InNamespace(` 320 | record Person(string name, int age); 321 | record Person2(string name, int age) { }` 322 | , namespaceStyle); 323 | const tokens = await tokenize(input); 324 | 325 | tokens.should.deep.equal([ 326 | Token.Keyword.Definition.Record, 327 | Token.Identifier.ClassName("Person"), 328 | Token.Punctuation.OpenParen, 329 | Token.PrimitiveType.String, 330 | Token.Identifier.ParameterName("name"), 331 | Token.Punctuation.Comma, 332 | Token.PrimitiveType.Int, 333 | Token.Identifier.ParameterName("age"), 334 | Token.Punctuation.CloseParen, 335 | Token.Punctuation.Semicolon, 336 | Token.Keyword.Definition.Record, 337 | Token.Identifier.ClassName("Person2"), 338 | Token.Punctuation.OpenParen, 339 | Token.PrimitiveType.String, 340 | Token.Identifier.ParameterName("name"), 341 | Token.Punctuation.Comma, 342 | Token.PrimitiveType.Int, 343 | Token.Identifier.ParameterName("age"), 344 | Token.Punctuation.CloseParen, 345 | Token.Punctuation.OpenBrace, 346 | Token.Punctuation.CloseBrace]); 347 | }); 348 | 349 | it(`primary constructor record class (${styleName} Namespace)`, async () => { 350 | 351 | const input = Input.InNamespace(` 352 | record class Person(string name, int age); 353 | record class Person2(string name, int age) { }` 354 | , namespaceStyle); 355 | const tokens = await tokenize(input); 356 | 357 | tokens.should.deep.equal([ 358 | Token.Keyword.Definition.Record, 359 | Token.Keyword.Definition.Class, 360 | Token.Identifier.ClassName("Person"), 361 | Token.Punctuation.OpenParen, 362 | Token.PrimitiveType.String, 363 | Token.Identifier.ParameterName("name"), 364 | Token.Punctuation.Comma, 365 | Token.PrimitiveType.Int, 366 | Token.Identifier.ParameterName("age"), 367 | Token.Punctuation.CloseParen, 368 | Token.Punctuation.Semicolon, 369 | Token.Keyword.Definition.Record, 370 | Token.Keyword.Definition.Class, 371 | Token.Identifier.ClassName("Person2"), 372 | Token.Punctuation.OpenParen, 373 | Token.PrimitiveType.String, 374 | Token.Identifier.ParameterName("name"), 375 | Token.Punctuation.Comma, 376 | Token.PrimitiveType.Int, 377 | Token.Identifier.ParameterName("age"), 378 | Token.Punctuation.CloseParen, 379 | Token.Punctuation.OpenBrace, 380 | Token.Punctuation.CloseBrace]); 381 | }); 382 | 383 | it(`primary constructor record struct (${styleName} Namespace)`, async () => { 384 | 385 | const input = Input.InNamespace(` 386 | record struct Person(string name, int age); 387 | record struct Person2(string name, int age) { }` 388 | , namespaceStyle); 389 | const tokens = await tokenize(input); 390 | 391 | tokens.should.deep.equal([ 392 | Token.Keyword.Definition.Record, 393 | Token.Keyword.Definition.Struct, 394 | Token.Identifier.StructName("Person"), 395 | Token.Punctuation.OpenParen, 396 | Token.PrimitiveType.String, 397 | Token.Identifier.ParameterName("name"), 398 | Token.Punctuation.Comma, 399 | Token.PrimitiveType.Int, 400 | Token.Identifier.ParameterName("age"), 401 | Token.Punctuation.CloseParen, 402 | Token.Punctuation.Semicolon, 403 | Token.Keyword.Definition.Record, 404 | Token.Keyword.Definition.Struct, 405 | Token.Identifier.StructName("Person2"), 406 | Token.Punctuation.OpenParen, 407 | Token.PrimitiveType.String, 408 | Token.Identifier.ParameterName("name"), 409 | Token.Punctuation.Comma, 410 | Token.PrimitiveType.Int, 411 | Token.Identifier.ParameterName("age"), 412 | Token.Punctuation.CloseParen, 413 | Token.Punctuation.OpenBrace, 414 | Token.Punctuation.CloseBrace]); 415 | }); 416 | } 417 | }); 418 | }); 419 | -------------------------------------------------------------------------------- /test/struct.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Token, NamespaceStyle } from './utils/tokenize'; 8 | 9 | describe("Structs", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Structs", () => { 13 | for (const namespaceStyle of [NamespaceStyle.BlockScoped, NamespaceStyle.FileScoped]) { 14 | const styleName = namespaceStyle == NamespaceStyle.BlockScoped 15 | ? "Block-Scoped" 16 | : "File-Scoped"; 17 | 18 | it(`simple struct (${styleName} Namespace)`, async () => { 19 | 20 | const input = `struct S { }`; 21 | const tokens = await tokenize(input); 22 | 23 | tokens.should.deep.equal([ 24 | Token.Keyword.Definition.Struct, 25 | Token.Identifier.StructName("S"), 26 | Token.Punctuation.OpenBrace, 27 | Token.Punctuation.CloseBrace]); 28 | }); 29 | 30 | it(`struct interface implementation (${styleName} Namespace)`, async () => { 31 | 32 | const input = ` 33 | interface IFoo { } 34 | struct S : IFoo { } 35 | `; 36 | const tokens = await tokenize(input); 37 | 38 | tokens.should.deep.equal([ 39 | Token.Keyword.Definition.Interface, 40 | Token.Identifier.InterfaceName("IFoo"), 41 | Token.Punctuation.OpenBrace, 42 | Token.Punctuation.CloseBrace, 43 | Token.Keyword.Definition.Struct, 44 | Token.Identifier.StructName("S"), 45 | Token.Punctuation.Colon, 46 | Token.Type("IFoo"), 47 | Token.Punctuation.OpenBrace, 48 | Token.Punctuation.CloseBrace]); 49 | }); 50 | 51 | it(`nested struct (${styleName} Namespace)`, async () => { 52 | 53 | const input = ` 54 | class Klass 55 | { 56 | struct Nested 57 | { 58 | 59 | } 60 | }`; 61 | const tokens = await tokenize(input); 62 | 63 | tokens.should.deep.equal([ 64 | Token.Keyword.Definition.Class, 65 | Token.Identifier.ClassName("Klass"), 66 | Token.Punctuation.OpenBrace, 67 | 68 | Token.Keyword.Definition.Struct, 69 | Token.Identifier.StructName("Nested"), 70 | Token.Punctuation.OpenBrace, 71 | Token.Punctuation.CloseBrace, 72 | 73 | Token.Punctuation.CloseBrace]); 74 | }); 75 | 76 | it(`nested struct with modifier (${styleName} Namespace)`, async () => { 77 | 78 | const input = ` 79 | class Klass 80 | { 81 | public struct Nested 82 | { 83 | 84 | } 85 | }`; 86 | const tokens = await tokenize(input); 87 | 88 | tokens.should.deep.equal([ 89 | Token.Keyword.Definition.Class, 90 | Token.Identifier.ClassName("Klass"), 91 | Token.Punctuation.OpenBrace, 92 | 93 | Token.Keyword.Modifier.Public, 94 | Token.Keyword.Definition.Struct, 95 | Token.Identifier.StructName("Nested"), 96 | Token.Punctuation.OpenBrace, 97 | Token.Punctuation.CloseBrace, 98 | 99 | Token.Punctuation.CloseBrace]); 100 | }); 101 | 102 | it(`generic struct (${styleName} Namespace)`, async () => { 103 | 104 | const input = ` 105 | struct S { } 106 | `; 107 | const tokens = await tokenize(input); 108 | 109 | tokens.should.deep.equal([ 110 | Token.Keyword.Definition.Struct, 111 | Token.Identifier.StructName("S"), 112 | Token.Punctuation.TypeParameter.Begin, 113 | Token.Identifier.TypeParameterName("T1"), 114 | Token.Punctuation.Comma, 115 | Token.Identifier.TypeParameterName("T2"), 116 | Token.Punctuation.TypeParameter.End, 117 | Token.Punctuation.OpenBrace, 118 | Token.Punctuation.CloseBrace]); 119 | }); 120 | 121 | it(`generic struct with constraints (${styleName} Namespace)`, async () => { 122 | 123 | const input = ` 124 | struct S where T1 : T2 { } 125 | `; 126 | const tokens = await tokenize(input); 127 | 128 | tokens.should.deep.equal([ 129 | Token.Keyword.Definition.Struct, 130 | Token.Identifier.StructName("S"), 131 | Token.Punctuation.TypeParameter.Begin, 132 | Token.Identifier.TypeParameterName("T1"), 133 | Token.Punctuation.Comma, 134 | Token.Identifier.TypeParameterName("T2"), 135 | Token.Punctuation.TypeParameter.End, 136 | Token.Keyword.Modifier.Where, 137 | Token.Identifier.TypeParameterName("T1"), 138 | Token.Punctuation.Colon, 139 | Token.Type("T2"), 140 | Token.Punctuation.OpenBrace, 141 | Token.Punctuation.CloseBrace]); 142 | }); 143 | 144 | it(`ref struct (${styleName} Namespace)`, async () => { 145 | const input = `ref struct S {}`; 146 | const tokens = await tokenize(input); 147 | 148 | tokens.should.deep.equal([ 149 | Token.Keyword.Modifier.Ref, 150 | Token.Keyword.Definition.Struct, 151 | Token.Identifier.StructName("S"), 152 | Token.Punctuation.OpenBrace, 153 | Token.Punctuation.CloseBrace]); 154 | }); 155 | 156 | it(`readonly ref struct(${styleName} Namespace)`, async () => { 157 | const input = `readonly ref struct S {}`; 158 | const tokens = await tokenize(input); 159 | 160 | tokens.should.deep.equal([ 161 | Token.Keyword.Modifier.ReadOnly, 162 | Token.Keyword.Modifier.Ref, 163 | Token.Keyword.Definition.Struct, 164 | Token.Identifier.StructName("S"), 165 | Token.Punctuation.OpenBrace, 166 | Token.Punctuation.CloseBrace]); 167 | }); 168 | 169 | it(`primary constructor struct (${styleName} Namespace)`, async () => { 170 | 171 | const input = ` 172 | struct Person(string name, int age); 173 | struct Person2(string name, int age) { } `; 174 | const tokens = await tokenize(input); 175 | 176 | tokens.should.deep.equal([ 177 | Token.Keyword.Definition.Struct, 178 | Token.Identifier.StructName("Person"), 179 | Token.Punctuation.OpenParen, 180 | Token.PrimitiveType.String, 181 | Token.Identifier.ParameterName("name"), 182 | Token.Punctuation.Comma, 183 | Token.PrimitiveType.Int, 184 | Token.Identifier.ParameterName("age"), 185 | Token.Punctuation.CloseParen, 186 | Token.Punctuation.Semicolon, 187 | Token.Keyword.Definition.Struct, 188 | Token.Identifier.StructName("Person2"), 189 | Token.Punctuation.OpenParen, 190 | Token.PrimitiveType.String, 191 | Token.Identifier.ParameterName("name"), 192 | Token.Punctuation.Comma, 193 | Token.PrimitiveType.Int, 194 | Token.Identifier.ParameterName("age"), 195 | Token.Punctuation.CloseParen, 196 | Token.Punctuation.OpenBrace, 197 | Token.Punctuation.CloseBrace]); 198 | }); 199 | } 200 | }); 201 | }); 202 | -------------------------------------------------------------------------------- /test/type-name.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Input, Token } from './utils/tokenize'; 8 | 9 | describe("Type names", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Type names", () => { 13 | it("built-in type - object", async () => { 14 | const input = Input.InClass(`object x;`); 15 | const tokens = await tokenize(input); 16 | 17 | tokens.should.deep.equal([ 18 | Token.PrimitiveType.Object, 19 | Token.Identifier.FieldName("x"), 20 | Token.Punctuation.Semicolon]); 21 | }); 22 | 23 | it("qualified name - System.Object", async () => { 24 | const input = Input.InClass(`System.Object x;`); 25 | const tokens = await tokenize(input); 26 | 27 | tokens.should.deep.equal([ 28 | Token.Type("System"), 29 | Token.Punctuation.Accessor, 30 | Token.Type("Object"), 31 | Token.Identifier.FieldName("x"), 32 | Token.Punctuation.Semicolon]); 33 | }); 34 | 35 | it("globally-qualified name - global::System.Object", async () => { 36 | const input = Input.InClass(`global::System.Object x;`); 37 | const tokens = await tokenize(input); 38 | 39 | tokens.should.deep.equal([ 40 | Token.Identifier.AliasName("global"), 41 | Token.Punctuation.ColonColon, 42 | Token.Type("System"), 43 | Token.Punctuation.Accessor, 44 | Token.Type("Object"), 45 | Token.Identifier.FieldName("x"), 46 | Token.Punctuation.Semicolon]); 47 | }); 48 | 49 | it("tuple type - (int, int)", async () => { 50 | const input = Input.InClass(`(int, int) x;`); 51 | const tokens = await tokenize(input); 52 | 53 | tokens.should.deep.equal([ 54 | Token.Punctuation.OpenParen, 55 | Token.PrimitiveType.Int, 56 | Token.Punctuation.Comma, 57 | Token.PrimitiveType.Int, 58 | Token.Punctuation.CloseParen, 59 | Token.Identifier.FieldName("x"), 60 | Token.Punctuation.Semicolon]); 61 | }); 62 | 63 | it("tuple type with element names - (int i, int j)", async () => { 64 | const input = Input.InClass(`(int i, int j) x;`); 65 | const tokens = await tokenize(input); 66 | 67 | tokens.should.deep.equal([ 68 | Token.Punctuation.OpenParen, 69 | Token.PrimitiveType.Int, 70 | Token.Identifier.TupleElementName("i"), 71 | Token.Punctuation.Comma, 72 | Token.PrimitiveType.Int, 73 | Token.Identifier.TupleElementName("j"), 74 | Token.Punctuation.CloseParen, 75 | Token.Identifier.FieldName("x"), 76 | Token.Punctuation.Semicolon]); 77 | }); 78 | 79 | it("nested tuple type - (int, (int, int))", async () => { 80 | const input = Input.InClass(`(int, (int, int)) x;`); 81 | const tokens = await tokenize(input); 82 | 83 | tokens.should.deep.equal([ 84 | Token.Punctuation.OpenParen, 85 | Token.PrimitiveType.Int, 86 | Token.Punctuation.Comma, 87 | Token.Punctuation.OpenParen, 88 | Token.PrimitiveType.Int, 89 | Token.Punctuation.Comma, 90 | Token.PrimitiveType.Int, 91 | Token.Punctuation.CloseParen, 92 | Token.Punctuation.CloseParen, 93 | Token.Identifier.FieldName("x"), 94 | Token.Punctuation.Semicolon]); 95 | }); 96 | 97 | it("nested tuple type with element names - (int i, (int j, int k))", async () => { 98 | const input = Input.InClass(`(int i, (int j, int k)) x;`); 99 | const tokens = await tokenize(input); 100 | 101 | tokens.should.deep.equal([ 102 | Token.Punctuation.OpenParen, 103 | Token.PrimitiveType.Int, 104 | Token.Identifier.TupleElementName("i"), 105 | Token.Punctuation.Comma, 106 | Token.Punctuation.OpenParen, 107 | Token.PrimitiveType.Int, 108 | Token.Identifier.TupleElementName("j"), 109 | Token.Punctuation.Comma, 110 | Token.PrimitiveType.Int, 111 | Token.Identifier.TupleElementName("k"), 112 | Token.Punctuation.CloseParen, 113 | Token.Punctuation.CloseParen, 114 | Token.Identifier.FieldName("x"), 115 | Token.Punctuation.Semicolon]); 116 | }); 117 | 118 | it("nullable tuple type - (int, int)?", async () => { 119 | const input = Input.InClass(`(int, int)? x;`); 120 | const tokens = await tokenize(input); 121 | 122 | tokens.should.deep.equal([ 123 | Token.Punctuation.OpenParen, 124 | Token.PrimitiveType.Int, 125 | Token.Punctuation.Comma, 126 | Token.PrimitiveType.Int, 127 | Token.Punctuation.CloseParen, 128 | Token.Punctuation.QuestionMark, 129 | Token.Identifier.FieldName("x"), 130 | Token.Punctuation.Semicolon]); 131 | }); 132 | 133 | it("array tuple type - (int, int)[]", async () => { 134 | const input = Input.InClass(`(int, int)[] x;`); 135 | const tokens = await tokenize(input); 136 | 137 | tokens.should.deep.equal([ 138 | Token.Punctuation.OpenParen, 139 | Token.PrimitiveType.Int, 140 | Token.Punctuation.Comma, 141 | Token.PrimitiveType.Int, 142 | Token.Punctuation.CloseParen, 143 | Token.Punctuation.OpenBracket, 144 | Token.Punctuation.CloseBracket, 145 | Token.Identifier.FieldName("x"), 146 | Token.Punctuation.Semicolon]); 147 | }); 148 | 149 | it("generic type - List", async () => { 150 | const input = Input.InClass(`List x;`); 151 | const tokens = await tokenize(input); 152 | 153 | tokens.should.deep.equal([ 154 | Token.Type("List"), 155 | Token.Punctuation.TypeParameter.Begin, 156 | Token.PrimitiveType.Int, 157 | Token.Punctuation.TypeParameter.End, 158 | Token.Identifier.FieldName("x"), 159 | Token.Punctuation.Semicolon]); 160 | }); 161 | 162 | it("generic type with tuple - List<(int, int)>", async () => { 163 | const input = Input.InClass(`List<(int, int)> x;`); 164 | const tokens = await tokenize(input); 165 | 166 | tokens.should.deep.equal([ 167 | Token.Type("List"), 168 | Token.Punctuation.TypeParameter.Begin, 169 | Token.Punctuation.OpenParen, 170 | Token.PrimitiveType.Int, 171 | Token.Punctuation.Comma, 172 | Token.PrimitiveType.Int, 173 | Token.Punctuation.CloseParen, 174 | Token.Punctuation.TypeParameter.End, 175 | Token.Identifier.FieldName("x"), 176 | Token.Punctuation.Semicolon]); 177 | }); 178 | 179 | it("generic type with tuple with element names - List<(int i, int j)>", async () => { 180 | const input = Input.InClass(`List<(int i, int j)> x;`); 181 | const tokens = await tokenize(input); 182 | 183 | tokens.should.deep.equal([ 184 | Token.Type("List"), 185 | Token.Punctuation.TypeParameter.Begin, 186 | Token.Punctuation.OpenParen, 187 | Token.PrimitiveType.Int, 188 | Token.Identifier.TupleElementName("i"), 189 | Token.Punctuation.Comma, 190 | Token.PrimitiveType.Int, 191 | Token.Identifier.TupleElementName("j"), 192 | Token.Punctuation.CloseParen, 193 | Token.Punctuation.TypeParameter.End, 194 | Token.Identifier.FieldName("x"), 195 | Token.Punctuation.Semicolon]); 196 | }); 197 | 198 | it("generic type with multiple parameters - Dictionary", async () => { 199 | const input = Input.InClass(`Dictionary x;`); 200 | const tokens = await tokenize(input); 201 | 202 | tokens.should.deep.equal([ 203 | Token.Type("Dictionary"), 204 | Token.Punctuation.TypeParameter.Begin, 205 | Token.PrimitiveType.Int, 206 | Token.Punctuation.Comma, 207 | Token.PrimitiveType.Int, 208 | Token.Punctuation.TypeParameter.End, 209 | Token.Identifier.FieldName("x"), 210 | Token.Punctuation.Semicolon]); 211 | }); 212 | 213 | it("qualified generic type - System.Collections.Generic.List", async () => { 214 | const input = Input.InClass(`System.Collections.Generic.List x;`); 215 | const tokens = await tokenize(input); 216 | 217 | tokens.should.deep.equal([ 218 | Token.Type("System"), 219 | Token.Punctuation.Accessor, 220 | Token.Type("Collections"), 221 | Token.Punctuation.Accessor, 222 | Token.Type("Generic"), 223 | Token.Punctuation.Accessor, 224 | Token.Type("List"), 225 | Token.Punctuation.TypeParameter.Begin, 226 | Token.PrimitiveType.Int, 227 | Token.Punctuation.TypeParameter.End, 228 | Token.Identifier.FieldName("x"), 229 | Token.Punctuation.Semicolon]); 230 | }); 231 | 232 | it("generic type with nested type - List.Enumerator", async () => { 233 | const input = Input.InClass(`List.Enumerator x;`); 234 | const tokens = await tokenize(input); 235 | 236 | tokens.should.deep.equal([ 237 | Token.Type("List"), 238 | Token.Punctuation.TypeParameter.Begin, 239 | Token.PrimitiveType.Int, 240 | Token.Punctuation.TypeParameter.End, 241 | Token.Punctuation.Accessor, 242 | Token.Type("Enumerator"), 243 | Token.Identifier.FieldName("x"), 244 | Token.Punctuation.Semicolon]); 245 | }); 246 | 247 | it("nullable type - int?", async () => { 248 | const input = Input.InClass(`int? x;`); 249 | const tokens = await tokenize(input); 250 | 251 | tokens.should.deep.equal([ 252 | Token.PrimitiveType.Int, 253 | Token.Punctuation.QuestionMark, 254 | Token.Identifier.FieldName("x"), 255 | Token.Punctuation.Semicolon]); 256 | }); 257 | 258 | it("nullable array type - int[]?", async () => { 259 | const input = Input.InClass(`int[]? x;`); 260 | const tokens = await tokenize(input); 261 | 262 | tokens.should.deep.equal([ 263 | Token.PrimitiveType.Int, 264 | Token.Punctuation.OpenBracket, 265 | Token.Punctuation.CloseBracket, 266 | Token.Punctuation.QuestionMark, 267 | Token.Identifier.FieldName("x"), 268 | Token.Punctuation.Semicolon]); 269 | }); 270 | 271 | it("ref local type - ref int", async () => { 272 | const input = Input.InMethod(`ref int x;`); 273 | const tokens = await tokenize(input); 274 | 275 | tokens.should.deep.equal([ 276 | Token.Keyword.Modifier.Ref, 277 | Token.PrimitiveType.Int, 278 | Token.Identifier.LocalName("x"), 279 | Token.Punctuation.Semicolon]); 280 | }); 281 | 282 | it("ref implicit local type - ref int", async () => { 283 | const input = Input.InMethod(`ref var x;`); 284 | const tokens = await tokenize(input); 285 | 286 | tokens.should.deep.equal([ 287 | Token.Keyword.Modifier.Ref, 288 | Token.Keyword.Definition.Var, 289 | Token.Identifier.LocalName("x"), 290 | Token.Punctuation.Semicolon]); 291 | }); 292 | 293 | it("assignments new object creation expression", async () => { 294 | const input = Input.InMethod(`x = new List();`); 295 | const tokens = await tokenize(input); 296 | 297 | tokens.should.deep.equal([ 298 | Token.Variable.ReadWrite("x"), 299 | Token.Operator.Assignment, 300 | Token.Operator.Expression.New, 301 | Token.Type("List"), 302 | Token.Punctuation.TypeParameter.Begin, 303 | Token.PrimitiveType.Int, 304 | Token.Punctuation.TypeParameter.End, 305 | Token.Punctuation.OpenParen, 306 | Token.Punctuation.CloseParen, 307 | Token.Punctuation.Semicolon]); 308 | }); 309 | 310 | it("assignments target-typed new object creation expression", async () => { 311 | const input = Input.InMethod(`List x = new();`); 312 | const tokens = await tokenize(input); 313 | 314 | tokens.should.deep.equal([ 315 | Token.Type("List"), 316 | Token.Punctuation.TypeParameter.Begin, 317 | Token.PrimitiveType.Int, 318 | Token.Punctuation.TypeParameter.End, 319 | Token.Identifier.LocalName("x"), 320 | Token.Operator.Assignment, 321 | Token.Operator.Expression.New, 322 | Token.Punctuation.OpenParen, 323 | Token.Punctuation.CloseParen, 324 | Token.Punctuation.Semicolon]); 325 | }); 326 | 327 | it("assignments new array creation expression", async () => { 328 | const input = Input.InMethod(`x = new string[4];`); 329 | const tokens = await tokenize(input); 330 | 331 | tokens.should.deep.equal([ 332 | Token.Variable.ReadWrite("x"), 333 | Token.Operator.Assignment, 334 | Token.Operator.Expression.New, 335 | Token.PrimitiveType.String, 336 | Token.Punctuation.OpenBracket, 337 | Token.Literal.Numeric.Decimal("4"), 338 | Token.Punctuation.CloseBracket, 339 | Token.Punctuation.Semicolon]); 340 | }); 341 | 342 | it("assignments new anonymous object creation expression", async () => { 343 | const input = Input.InMethod(`x = new { Length = 5 };`); 344 | const tokens = await tokenize(input); 345 | 346 | tokens.should.deep.equal([ 347 | Token.Variable.ReadWrite("x"), 348 | Token.Operator.Assignment, 349 | Token.Operator.Expression.New, 350 | Token.Punctuation.OpenBrace, 351 | Token.Variable.ReadWrite("Length"), 352 | Token.Operator.Assignment, 353 | Token.Literal.Numeric.Decimal("5"), 354 | Token.Punctuation.CloseBrace, 355 | Token.Punctuation.Semicolon]); 356 | }); 357 | }); 358 | }); -------------------------------------------------------------------------------- /test/using-directive.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Token } from './utils/tokenize'; 8 | 9 | describe("Using directives", () => { 10 | before(() => { should(); }); 11 | 12 | describe("Using directives", () => { 13 | it("using namespace", async () => { 14 | 15 | const input = ` 16 | using System; 17 | using System.Collections.Generic;`; 18 | const tokens = await tokenize(input); 19 | 20 | tokens.should.deep.equal([ 21 | Token.Keyword.Directive.Using, 22 | Token.Identifier.NamespaceName("System"), 23 | Token.Punctuation.Semicolon, 24 | Token.Keyword.Directive.Using, 25 | Token.Identifier.NamespaceName("System"), 26 | Token.Punctuation.Accessor, 27 | Token.Identifier.NamespaceName("Collections"), 28 | Token.Punctuation.Accessor, 29 | Token.Identifier.NamespaceName("Generic"), 30 | Token.Punctuation.Semicolon, 31 | ]); 32 | }); 33 | 34 | it("using static type", async () => { 35 | 36 | const input = `using static System.Console;`; 37 | const tokens = await tokenize(input); 38 | 39 | tokens.should.deep.equal([ 40 | Token.Keyword.Directive.Using, 41 | Token.Keyword.Directive.Static, 42 | Token.Type("System"), 43 | Token.Punctuation.Accessor, 44 | Token.Type("Console"), 45 | Token.Punctuation.Semicolon]); 46 | }); 47 | 48 | it("namespace alias", async () => { 49 | 50 | const input = `using S = System;`; 51 | const tokens = await tokenize(input); 52 | 53 | tokens.should.deep.equal([ 54 | Token.Keyword.Directive.Using, 55 | Token.Identifier.AliasName("S"), 56 | Token.Operator.Assignment, 57 | Token.Type("System"), 58 | Token.Punctuation.Semicolon]); 59 | }); 60 | 61 | it("type alias", async () => { 62 | 63 | const input = `using C = System.Console;`; 64 | const tokens = await tokenize(input); 65 | 66 | tokens.should.deep.equal([ 67 | Token.Keyword.Directive.Using, 68 | Token.Identifier.AliasName("C"), 69 | Token.Operator.Assignment, 70 | Token.Type("System"), 71 | Token.Punctuation.Accessor, 72 | Token.Type("Console"), 73 | Token.Punctuation.Semicolon]); 74 | }); 75 | 76 | it("type alias with generic type", async () => { 77 | 78 | const input = `using IntList = System.Collections.Generic.List;`; 79 | const tokens = await tokenize(input); 80 | 81 | tokens.should.deep.equal([ 82 | Token.Keyword.Directive.Using, 83 | Token.Identifier.AliasName("IntList"), 84 | Token.Operator.Assignment, 85 | Token.Type("System"), 86 | Token.Punctuation.Accessor, 87 | Token.Type("Collections"), 88 | Token.Punctuation.Accessor, 89 | Token.Type("Generic"), 90 | Token.Punctuation.Accessor, 91 | Token.Type("List"), 92 | Token.Punctuation.TypeParameter.Begin, 93 | Token.Type("System"), 94 | Token.Punctuation.Accessor, 95 | Token.Type("Int32"), 96 | Token.Punctuation.TypeParameter.End, 97 | Token.Punctuation.Semicolon]); 98 | }); 99 | 100 | it("type alias with nested generic types", async () => { 101 | 102 | const input = `using X = System.Collections.Generic.Dictionary>;`; 103 | const tokens = await tokenize(input); 104 | 105 | tokens.should.deep.equal([ 106 | Token.Keyword.Directive.Using, 107 | Token.Identifier.AliasName("X"), 108 | Token.Operator.Assignment, 109 | Token.Type("System"), 110 | Token.Punctuation.Accessor, 111 | Token.Type("Collections"), 112 | Token.Punctuation.Accessor, 113 | Token.Type("Generic"), 114 | Token.Punctuation.Accessor, 115 | Token.Type("Dictionary"), 116 | Token.Punctuation.TypeParameter.Begin, 117 | Token.Type("System"), 118 | Token.Punctuation.Accessor, 119 | Token.Type("Int32"), 120 | Token.Punctuation.Comma, 121 | Token.Type("System"), 122 | Token.Punctuation.Accessor, 123 | Token.Type("Collections"), 124 | Token.Punctuation.Accessor, 125 | Token.Type("Generic"), 126 | Token.Punctuation.Accessor, 127 | Token.Type("List"), 128 | Token.Punctuation.TypeParameter.Begin, 129 | Token.Type("System"), 130 | Token.Punctuation.Accessor, 131 | Token.Type("String"), 132 | Token.Punctuation.TypeParameter.End, 133 | Token.Punctuation.TypeParameter.End, 134 | Token.Punctuation.Semicolon]); 135 | }); 136 | 137 | it("type alias with nested generic types and comments interspersed", async () => { 138 | 139 | const input = `using X =/**/Dictionary/**//**/>/**/;//end`; 140 | const tokens = await tokenize(input); 141 | 142 | tokens.should.deep.equal([ 143 | Token.Keyword.Directive.Using, 144 | Token.Identifier.AliasName("X"), 145 | Token.Operator.Assignment, 146 | Token.Comment.MultiLine.Start, 147 | Token.Comment.MultiLine.End, 148 | Token.Type("Dictionary"), 149 | Token.Comment.MultiLine.Start, 150 | Token.Comment.MultiLine.End, 151 | Token.Punctuation.TypeParameter.Begin, 152 | Token.Comment.MultiLine.Start, 153 | Token.Comment.MultiLine.End, 154 | Token.PrimitiveType.Int, 155 | Token.Comment.MultiLine.Start, 156 | Token.Comment.MultiLine.End, 157 | Token.Punctuation.Comma, 158 | Token.Comment.MultiLine.Start, 159 | Token.Comment.MultiLine.End, 160 | Token.Type("List"), 161 | Token.Comment.MultiLine.Start, 162 | Token.Comment.MultiLine.End, 163 | Token.Punctuation.TypeParameter.Begin, 164 | Token.Comment.MultiLine.Start, 165 | Token.Comment.MultiLine.End, 166 | Token.PrimitiveType.String, 167 | Token.Comment.MultiLine.Start, 168 | Token.Comment.MultiLine.End, 169 | Token.Punctuation.TypeParameter.End, 170 | Token.Comment.MultiLine.Start, 171 | Token.Comment.MultiLine.End, 172 | Token.Punctuation.TypeParameter.End, 173 | Token.Comment.MultiLine.Start, 174 | Token.Comment.MultiLine.End, 175 | Token.Punctuation.Semicolon, 176 | Token.Comment.SingleLine.Start, 177 | Token.Comment.SingleLine.Text("end")]); 178 | }); 179 | 180 | describe("global using directives", () => { 181 | it("regular using", async () => { 182 | const input = ` 183 | global using System; 184 | global using System.Collections.Generic;`; 185 | const tokens = await tokenize(input); 186 | 187 | tokens.should.deep.equal([ 188 | Token.Keyword.Directive.Global, 189 | Token.Keyword.Directive.Using, 190 | Token.Identifier.NamespaceName("System"), 191 | Token.Punctuation.Semicolon, 192 | Token.Keyword.Directive.Global, 193 | Token.Keyword.Directive.Using, 194 | Token.Identifier.NamespaceName("System"), 195 | Token.Punctuation.Accessor, 196 | Token.Identifier.NamespaceName("Collections"), 197 | Token.Punctuation.Accessor, 198 | Token.Identifier.NamespaceName("Generic"), 199 | Token.Punctuation.Semicolon, 200 | ]); 201 | }); 202 | 203 | it("regular using static", async () => { 204 | const input = `global using static System.Console;`; 205 | const tokens = await tokenize(input); 206 | 207 | tokens.should.deep.equal([ 208 | Token.Keyword.Directive.Global, 209 | Token.Keyword.Directive.Using, 210 | Token.Keyword.Directive.Static, 211 | Token.Type("System"), 212 | Token.Punctuation.Accessor, 213 | Token.Type("Console"), 214 | Token.Punctuation.Semicolon 215 | ]); 216 | }); 217 | 218 | it("regular using alias", async () => { 219 | const input = `global using blah = System.Console;`; 220 | const tokens = await tokenize(input); 221 | 222 | tokens.should.deep.equal([ 223 | Token.Keyword.Directive.Global, 224 | Token.Keyword.Directive.Using, 225 | Token.Identifier.AliasName("blah"), 226 | Token.Operator.Assignment, 227 | Token.Type("System"), 228 | Token.Punctuation.Accessor, 229 | Token.Type("Console"), 230 | Token.Punctuation.Semicolon 231 | ]); 232 | }); 233 | 234 | it("unsafe using static", async () => { 235 | const input = `global using static unsafe System.Collections.Generic.List;`; 236 | const tokens = await tokenize(input); 237 | 238 | tokens.should.deep.equal([ 239 | Token.Keyword.Directive.Global, 240 | Token.Keyword.Directive.Using, 241 | Token.Keyword.Directive.Static, 242 | Token.Keyword.Modifier.Unsafe, 243 | Token.Type("System"), 244 | Token.Punctuation.Accessor, 245 | Token.Type("Collections"), 246 | Token.Punctuation.Accessor, 247 | Token.Type("Generic"), 248 | Token.Punctuation.Accessor, 249 | Token.Type("List"), 250 | Token.Punctuation.TypeParameter.Begin, 251 | Token.PrimitiveType.Int, 252 | Token.Punctuation.Asterisk, 253 | Token.Punctuation.OpenBracket, 254 | Token.Punctuation.CloseBracket, 255 | Token.Punctuation.TypeParameter.End, 256 | Token.Punctuation.Semicolon 257 | ]); 258 | }); 259 | 260 | it("unsafe using alias", async () => { 261 | const input = `global using unsafe blah = System.Collections.Generic.List;`; 262 | const tokens = await tokenize(input); 263 | 264 | tokens.should.deep.equal([ 265 | Token.Keyword.Directive.Global, 266 | Token.Keyword.Directive.Using, 267 | Token.Keyword.Modifier.Unsafe, 268 | Token.Identifier.AliasName("blah"), 269 | Token.Operator.Assignment, 270 | Token.Type("System"), 271 | Token.Punctuation.Accessor, 272 | Token.Type("Collections"), 273 | Token.Punctuation.Accessor, 274 | Token.Type("Generic"), 275 | Token.Punctuation.Accessor, 276 | Token.Type("List"), 277 | Token.Punctuation.TypeParameter.Begin, 278 | Token.PrimitiveType.Int, 279 | Token.Punctuation.Asterisk, 280 | Token.Punctuation.OpenBracket, 281 | Token.Punctuation.CloseBracket, 282 | Token.Punctuation.TypeParameter.End, 283 | Token.Punctuation.Semicolon 284 | ]); 285 | }); 286 | }); 287 | 288 | it("unsafe using static", async () => { 289 | const input = `using static unsafe System.Collections.Generic.List;`; 290 | const tokens = await tokenize(input); 291 | 292 | tokens.should.deep.equal([ 293 | Token.Keyword.Directive.Using, 294 | Token.Keyword.Directive.Static, 295 | Token.Keyword.Modifier.Unsafe, 296 | Token.Type("System"), 297 | Token.Punctuation.Accessor, 298 | Token.Type("Collections"), 299 | Token.Punctuation.Accessor, 300 | Token.Type("Generic"), 301 | Token.Punctuation.Accessor, 302 | Token.Type("List"), 303 | Token.Punctuation.TypeParameter.Begin, 304 | Token.PrimitiveType.Int, 305 | Token.Punctuation.Asterisk, 306 | Token.Punctuation.OpenBracket, 307 | Token.Punctuation.CloseBracket, 308 | Token.Punctuation.TypeParameter.End, 309 | Token.Punctuation.Semicolon 310 | ]); 311 | }); 312 | 313 | it("unsafe using alias", async () => { 314 | const input = `using unsafe blah = System.Collections.Generic.List;`; 315 | const tokens = await tokenize(input); 316 | 317 | tokens.should.deep.equal([ 318 | Token.Keyword.Directive.Using, 319 | Token.Keyword.Modifier.Unsafe, 320 | Token.Identifier.AliasName("blah"), 321 | Token.Operator.Assignment, 322 | Token.Type("System"), 323 | Token.Punctuation.Accessor, 324 | Token.Type("Collections"), 325 | Token.Punctuation.Accessor, 326 | Token.Type("Generic"), 327 | Token.Punctuation.Accessor, 328 | Token.Type("List"), 329 | Token.Punctuation.TypeParameter.Begin, 330 | Token.PrimitiveType.Int, 331 | Token.Punctuation.Asterisk, 332 | Token.Punctuation.OpenBracket, 333 | Token.Punctuation.CloseBracket, 334 | Token.Punctuation.TypeParameter.End, 335 | Token.Punctuation.Semicolon 336 | ]); 337 | }); 338 | }); 339 | }); 340 | -------------------------------------------------------------------------------- /test/xml-doc-comment.tests.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { should } from 'chai'; 7 | import { tokenize, Input, Token } from './utils/tokenize'; 8 | 9 | describe("XML Doc Comments", () => { 10 | before(() => { should(); }); 11 | 12 | describe("XML Doc Comments", () => { 13 | it("start tag", async () => { 14 | const input = `/// `; 15 | const tokens = await tokenize(input); 16 | 17 | tokens.should.deep.equal([ 18 | Token.XmlDocComment.Begin, 19 | Token.XmlDocComment.Text(" "), 20 | Token.XmlDocComment.Tag.StartTagBegin, 21 | Token.XmlDocComment.Tag.Name("summary"), 22 | Token.XmlDocComment.Tag.StartTagEnd 23 | ]); 24 | }); 25 | 26 | it("end tag", async () => { 27 | const input = `/// `; 28 | const tokens = await tokenize(input); 29 | 30 | tokens.should.deep.equal([ 31 | Token.XmlDocComment.Begin, 32 | Token.XmlDocComment.Text(" "), 33 | Token.XmlDocComment.Tag.EndTagBegin, 34 | Token.XmlDocComment.Tag.Name("summary"), 35 | Token.XmlDocComment.Tag.EndTagEnd 36 | ]); 37 | }); 38 | 39 | it("start & end tag with content", async () => { 40 | const input = ` 41 | /// 42 | /// Text 43 | /// `; 44 | const tokens = await tokenize(input); 45 | 46 | tokens.should.deep.equal([ 47 | Token.XmlDocComment.Begin, 48 | Token.XmlDocComment.Text(" "), 49 | Token.XmlDocComment.Tag.StartTagBegin, 50 | Token.XmlDocComment.Tag.Name("summary"), 51 | Token.XmlDocComment.Tag.StartTagEnd, 52 | Token.XmlDocComment.Begin, 53 | Token.XmlDocComment.Text(" Text"), 54 | Token.XmlDocComment.Begin, 55 | Token.XmlDocComment.Text(" "), 56 | Token.XmlDocComment.Tag.EndTagBegin, 57 | Token.XmlDocComment.Tag.Name("summary"), 58 | Token.XmlDocComment.Tag.EndTagEnd 59 | ]); 60 | }); 61 | 62 | it("empty tag", async () => { 63 | const input = `/// `; 64 | const tokens = await tokenize(input); 65 | 66 | tokens.should.deep.equal([ 67 | Token.XmlDocComment.Begin, 68 | Token.XmlDocComment.Text(" "), 69 | Token.XmlDocComment.Tag.EmptyTagBegin, 70 | Token.XmlDocComment.Tag.Name("summary"), 71 | Token.XmlDocComment.Tag.EmptyTagEnd 72 | ]); 73 | }); 74 | 75 | it("start tag with attribute and single-quoted string", async () => { 76 | const input = `/// `; 77 | const tokens = await tokenize(input); 78 | 79 | tokens.should.deep.equal([ 80 | Token.XmlDocComment.Begin, 81 | Token.XmlDocComment.Text(" "), 82 | Token.XmlDocComment.Tag.StartTagBegin, 83 | Token.XmlDocComment.Tag.Name("param"), 84 | Token.XmlDocComment.Attribute.Name("name"), 85 | Token.XmlDocComment.Equals, 86 | Token.XmlDocComment.String.SingleQuoted.Begin, 87 | Token.XmlDocComment.String.SingleQuoted.Text("x"), 88 | Token.XmlDocComment.String.SingleQuoted.End, 89 | Token.XmlDocComment.Tag.StartTagEnd 90 | ]); 91 | }); 92 | 93 | it("start tag with attribute and double-quoted string", async () => { 94 | const input = `/// `; 95 | const tokens = await tokenize(input); 96 | 97 | tokens.should.deep.equal([ 98 | Token.XmlDocComment.Begin, 99 | Token.XmlDocComment.Text(" "), 100 | Token.XmlDocComment.Tag.StartTagBegin, 101 | Token.XmlDocComment.Tag.Name("param"), 102 | Token.XmlDocComment.Attribute.Name("name"), 103 | Token.XmlDocComment.Equals, 104 | Token.XmlDocComment.String.DoubleQuoted.Begin, 105 | Token.XmlDocComment.String.DoubleQuoted.Text("x"), 106 | Token.XmlDocComment.String.DoubleQuoted.End, 107 | Token.XmlDocComment.Tag.StartTagEnd 108 | ]); 109 | }); 110 | 111 | it("comment", async () => { 112 | const input = `/// `; 113 | const tokens = await tokenize(input); 114 | 115 | tokens.should.deep.equal([ 116 | Token.XmlDocComment.Begin, 117 | Token.XmlDocComment.Text(" "), 118 | Token.XmlDocComment.Comment.Begin, 119 | Token.XmlDocComment.Comment.Text(" comment "), 120 | Token.XmlDocComment.Comment.End 121 | ]); 122 | }); 123 | 124 | it("cdata", async () => { 125 | const input = `/// `; 126 | const tokens = await tokenize(input); 127 | 128 | tokens.should.deep.equal([ 129 | Token.XmlDocComment.Begin, 130 | Token.XmlDocComment.Text(" "), 131 | Token.XmlDocComment.CData.Begin, 132 | Token.XmlDocComment.CData.Text("c"), 133 | Token.XmlDocComment.CData.End 134 | ]); 135 | }); 136 | 137 | it("character entity - name", async () => { 138 | const input = `/// &`; 139 | const tokens = await tokenize(input); 140 | 141 | tokens.should.deep.equal([ 142 | Token.XmlDocComment.Begin, 143 | Token.XmlDocComment.Text(" "), 144 | Token.XmlDocComment.CharacterEntity.Begin, 145 | Token.XmlDocComment.CharacterEntity.Text("amp"), 146 | Token.XmlDocComment.CharacterEntity.End 147 | ]); 148 | }); 149 | 150 | it("character entity - decimal", async () => { 151 | const input = `/// &`; 152 | const tokens = await tokenize(input); 153 | 154 | tokens.should.deep.equal([ 155 | Token.XmlDocComment.Begin, 156 | Token.XmlDocComment.Text(" "), 157 | Token.XmlDocComment.CharacterEntity.Begin, 158 | Token.XmlDocComment.CharacterEntity.Text("#0038"), 159 | Token.XmlDocComment.CharacterEntity.End 160 | ]); 161 | }); 162 | 163 | it("character entity - hdex", async () => { 164 | const input = `/// &`; 165 | const tokens = await tokenize(input); 166 | 167 | tokens.should.deep.equal([ 168 | Token.XmlDocComment.Begin, 169 | Token.XmlDocComment.Text(" "), 170 | Token.XmlDocComment.CharacterEntity.Begin, 171 | Token.XmlDocComment.CharacterEntity.Text("#x0026"), 172 | Token.XmlDocComment.CharacterEntity.End 173 | ]); 174 | }); 175 | 176 | it("XML doc comments are highlighted properly on enum members (issue omnisharp-vscode#706)", async () => { 177 | const input = ` 178 | /// This is a test Enum 179 | public enum TestEnum 180 | { 181 | /// Test Value One 182 | TestValueOne= 0, 183 | /// Test Value Two 184 | TestValueTwo = 1 185 | }`; 186 | 187 | const tokens = await tokenize(input); 188 | 189 | tokens.should.deep.equal([ 190 | Token.XmlDocComment.Begin, 191 | Token.XmlDocComment.Text(" "), 192 | Token.XmlDocComment.Tag.StartTagBegin, 193 | Token.XmlDocComment.Tag.Name("summary"), 194 | Token.XmlDocComment.Tag.StartTagEnd, 195 | Token.XmlDocComment.Text(" This is a test Enum "), 196 | Token.XmlDocComment.Tag.EndTagBegin, 197 | Token.XmlDocComment.Tag.Name("summary"), 198 | Token.XmlDocComment.Tag.EndTagEnd, 199 | Token.Keyword.Modifier.Public, 200 | Token.Keyword.Definition.Enum, 201 | Token.Identifier.EnumName("TestEnum"), 202 | Token.Punctuation.OpenBrace, 203 | Token.Comment.LeadingWhitespace(" "), 204 | Token.XmlDocComment.Begin, 205 | Token.XmlDocComment.Text(" "), 206 | Token.XmlDocComment.Tag.StartTagBegin, 207 | Token.XmlDocComment.Tag.Name("summary"), 208 | Token.XmlDocComment.Tag.StartTagEnd, 209 | Token.XmlDocComment.Text(" Test Value One "), 210 | Token.XmlDocComment.Tag.EndTagBegin, 211 | Token.XmlDocComment.Tag.Name("summary"), 212 | Token.XmlDocComment.Tag.EndTagEnd, 213 | Token.Identifier.EnumMemberName("TestValueOne"), 214 | Token.Operator.Assignment, 215 | Token.Literal.Numeric.Decimal("0"), 216 | Token.Punctuation.Comma, 217 | Token.Comment.LeadingWhitespace(" "), 218 | Token.XmlDocComment.Begin, 219 | Token.XmlDocComment.Text(" "), 220 | Token.XmlDocComment.Tag.StartTagBegin, 221 | Token.XmlDocComment.Tag.Name("summary"), 222 | Token.XmlDocComment.Tag.StartTagEnd, 223 | Token.XmlDocComment.Text(" Test Value Two "), 224 | Token.XmlDocComment.Tag.EndTagBegin, 225 | Token.XmlDocComment.Tag.Name("summary"), 226 | Token.XmlDocComment.Tag.EndTagEnd, 227 | Token.Identifier.EnumMemberName("TestValueTwo"), 228 | Token.Operator.Assignment, 229 | Token.Literal.Numeric.Decimal("1"), 230 | Token.Punctuation.CloseBrace 231 | ]); 232 | }); 233 | 234 | it("Delimited XML Doc (issue #151)", async () => { 235 | const input = ` 236 | /** 237 | * 238 | */`; 239 | const tokens = await tokenize(input); 240 | 241 | tokens.should.deep.equal([ 242 | Token.Comment.LeadingWhitespace(" "), 243 | Token.XmlDocComment.BeginDelim, 244 | Token.Comment.LeadingWhitespace(" "), 245 | Token.XmlDocComment.Delim, 246 | Token.XmlDocComment.Text(" "), 247 | Token.XmlDocComment.Tag.EmptyTagBegin, 248 | Token.XmlDocComment.Tag.Name("summary"), 249 | Token.XmlDocComment.Tag.EmptyTagEnd, 250 | Token.Comment.LeadingWhitespace(" "), 251 | Token.XmlDocComment.End, 252 | ]); 253 | }); 254 | 255 | it("Single line across multiple lines", async () => { 256 | const input = ` 257 | /// `; 260 | const tokens = await tokenize(input); 261 | 262 | tokens.should.deep.equal([ 263 | Token.Comment.LeadingWhitespace(" "), 264 | Token.XmlDocComment.Begin, 265 | Token.XmlDocComment.Text(" "), 266 | Token.XmlDocComment.Tag.StartTagBegin, 267 | Token.XmlDocComment.Tag.Name("param"), 268 | Token.Comment.LeadingWhitespace(" "), 269 | Token.XmlDocComment.Begin, 270 | Token.XmlDocComment.Attribute.Name("name"), 271 | Token.XmlDocComment.Equals, 272 | Token.XmlDocComment.String.DoubleQuoted.Begin, 273 | Token.XmlDocComment.String.DoubleQuoted.Text("value"), 274 | Token.XmlDocComment.String.DoubleQuoted.End, 275 | Token.Comment.LeadingWhitespace(" "), 276 | Token.XmlDocComment.Begin, 277 | Token.XmlDocComment.Tag.StartTagEnd 278 | ]); 279 | }); 280 | 281 | it("Delimited across multiple lines", async () => { 282 | const input = ` 283 | /** 284 | * 287 | */`; 288 | const tokens = await tokenize(input); 289 | 290 | tokens.should.deep.equal([ 291 | Token.Comment.LeadingWhitespace(" "), 292 | Token.XmlDocComment.BeginDelim, 293 | Token.Comment.LeadingWhitespace(" "), 294 | Token.XmlDocComment.Delim, 295 | Token.XmlDocComment.Text(" "), 296 | Token.XmlDocComment.Tag.StartTagBegin, 297 | Token.XmlDocComment.Tag.Name("param"), 298 | Token.Comment.LeadingWhitespace(" "), 299 | Token.XmlDocComment.Delim, 300 | Token.XmlDocComment.Attribute.Name("name"), 301 | Token.XmlDocComment.Equals, 302 | Token.XmlDocComment.String.DoubleQuoted.Begin, 303 | Token.XmlDocComment.String.DoubleQuoted.Text("value"), 304 | Token.XmlDocComment.String.DoubleQuoted.End, 305 | Token.Comment.LeadingWhitespace(" "), 306 | Token.XmlDocComment.Delim, 307 | Token.XmlDocComment.Tag.StartTagEnd, 308 | Token.Comment.LeadingWhitespace(" "), 309 | Token.XmlDocComment.End, 310 | ]); 311 | }); 312 | }); 313 | }); -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "module": "commonjs", 5 | "outDir": "out", 6 | "sourceMap": true, 7 | "rootDir": ".", 8 | "noImplicitReturns": true, 9 | "skipLibCheck": true 10 | }, 11 | "compileOnSave": true, 12 | "exclude": [ 13 | "node_modules" 14 | ] 15 | } -------------------------------------------------------------------------------- /wallaby.conf.js: -------------------------------------------------------------------------------- 1 | module.exports = function () { 2 | return { 3 | files: [ 4 | 'src/**/*.ts', 5 | 'test/**/*.ts', 6 | '!test/**/*.tests.ts', 7 | { pattern: 'src/**/*.yml', instrument: false, load: false, ignore: false }, 8 | { pattern: 'grammars/*.*', instrument: false, load: false, ignore: false } 9 | ], 10 | 11 | tests: [ 12 | 'test/**/*.tests.ts' 13 | ], 14 | 15 | env: { 16 | type: 'node' 17 | } 18 | }; 19 | }; --------------------------------------------------------------------------------