├── .gitignore
├── VSCUnity
└── vs.language.shader
│ ├── shaderMain.js
│ ├── ticino.plugin.json
│ ├── shaderDef.js
│ └── tests
│ └── shaderTests.js
├── README.md
├── LICENSE.txt
└── Assets
└── Editor
└── VSCodeUnity
├── CopyDirectory.cs
└── VSCodeUnity.cs
/.gitignore:
--------------------------------------------------------------------------------
1 | Temp
2 | ProjectSettings
3 | Library
4 | *.meta
5 | *.userprefs
6 | *.csproj
7 | *.sln
8 |
--------------------------------------------------------------------------------
/VSCUnity/vs.language.shader/shaderMain.js:
--------------------------------------------------------------------------------
1 | /*---------------------------------------------------------
2 | * Copyright (C) Microsoft Corporation. All rights reserved.
3 | *--------------------------------------------------------*/
4 | ///
5 | 'use strict';
6 | define(["require", "exports", './shaderDef', 'monaco'], function (require, exports, languageDef, monaco) {
7 | monaco.Modes.registerMonarchDefinition('shader', languageDef.language);
8 | });
9 |
--------------------------------------------------------------------------------
/VSCUnity/vs.language.shader/ticino.plugin.json:
--------------------------------------------------------------------------------
1 | {
2 | "pluginId": "vs.language.shader",
3 | "activationEvents": ["textModel:shader"],
4 | "mainModule": "./shaderMain",
5 | "contributes": {
6 | "language": [{
7 | "id": "shader",
8 | "extensions": [ ".shader" ],
9 | "aliases": [ "shader" ]
10 | }]
11 | },
12 | "scripts": {
13 | "compile": "node ../../node_modules/gulp/bin/gulp.js --gulpfile ../gulpfile.js compile --dir vs.language.shader"
14 | },
15 | "_testing": "./tests/shaderTests",
16 | "_bundling": [
17 | { "name": "./shaderMain" }
18 | ]
19 | }
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # VSCUnity
2 | Unity3D editor plugin to make Unity projects Visual Studio Code compatible and add Unity/shader syntax highlighting.
3 |
4 | Add to /Assets/Editor/VCSUnity and then choose "Update project for Visual Studio Code" from the "VS Code" menu.
5 |
6 | Choose "Add Unity symbol highlighting to Visual Studio Code" to update Visual Studio Code's C# syntax highlighting with all Unity specific types.
7 |
8 | Choose "Add Unity shader plugin to Visual Studio Code" to add Unity .shader syntax support to Visual Studio Code. (Make sure you copy the /VSCUnity folder to your Unity project's root folder first as the script expects it to be in that location when installing).
9 |
10 |
11 | Prompted from @prime_31's note that Xamerin projects open correctly in Visual Studio Code and my general desire to nuke MonoDevelop from orbit.
12 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Copyright (c) 2015, Ben Hopkins (kode80)
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions are met:
6 |
7 | * Redistributions of source code must retain the above copyright notice, this
8 | list of conditions and the following disclaimer.
9 |
10 | * Redistributions in binary form must reproduce the above copyright notice,
11 | this list of conditions and the following disclaimer in the documentation
12 | and/or other materials provided with the distribution.
13 |
14 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
15 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
17 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
18 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
20 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
21 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
22 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------
/Assets/Editor/VSCodeUnity/CopyDirectory.cs:
--------------------------------------------------------------------------------
1 | // Taken from MSDN DirectoryInfo example
2 | // https://msdn.microsoft.com/en-us/library/system.io.directoryinfo.aspx
3 |
4 | using System;
5 | using System.IO;
6 |
7 | static class CopyDirectory
8 | {
9 | public static void Copy( string sourceDirectory, string targetDirectory)
10 | {
11 | DirectoryInfo diSource = new DirectoryInfo(sourceDirectory);
12 | DirectoryInfo diTarget = new DirectoryInfo(targetDirectory);
13 |
14 | CopyAll(diSource, diTarget);
15 | }
16 |
17 | private static void CopyAll( DirectoryInfo source, DirectoryInfo target)
18 | {
19 | // Check if the target directory exists; if not, create it.
20 | if (Directory.Exists(target.FullName) == false)
21 | {
22 | Directory.CreateDirectory(target.FullName);
23 | }
24 |
25 | // Copy each file into the new directory.
26 | foreach (FileInfo fi in source.GetFiles())
27 | {
28 | Console.WriteLine(@"Copying {0}\{1}", target.FullName, fi.Name);
29 | fi.CopyTo(Path.Combine(target.FullName, fi.Name), true);
30 | }
31 |
32 | // Copy each subdirectory using recursion.
33 | foreach (DirectoryInfo diSourceSubDir in source.GetDirectories())
34 | {
35 | DirectoryInfo nextTargetSubDir =
36 | target.CreateSubdirectory(diSourceSubDir.Name);
37 | CopyAll(diSourceSubDir, nextTargetSubDir);
38 | }
39 | }
40 | }
--------------------------------------------------------------------------------
/Assets/Editor/VSCodeUnity/VSCodeUnity.cs:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2015, Ben Hopkins (kode80)
2 | // All rights reserved.
3 | //
4 | // Redistribution and use in source and binary forms, with or without modification,
5 | // are permitted provided that the following conditions are met:
6 | //
7 | // 1. Redistributions of source code must retain the above copyright notice,
8 | // this list of conditions and the following disclaimer.
9 | //
10 | // 2. Redistributions in binary form must reproduce the above copyright notice,
11 | // this list of conditions and the following disclaimer in the documentation
12 | // and/or other materials provided with the distribution.
13 | //
14 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
15 | // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
16 | // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
17 | // THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
18 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
19 | // OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
20 | // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
21 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
22 | // EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
23 |
24 | using UnityEngine;
25 | using UnityEditor;
26 | using System;
27 | using System.IO;
28 | using System.Text;
29 | using System.Reflection;
30 | using System.Collections.Generic;
31 | using System.Linq;
32 |
33 | public class VSCodeUnity
34 | {
35 | private const string VS11VersionString = "Microsoft Visual Studio Solution File, Format Version 11.00\r\n" +
36 | "# Visual Studio 2008\r\n";
37 |
38 | private const string VS12VersionString = "\r\nMicrosoft Visual Studio Solution File, Format Version 12.00\r\n" +
39 | "# Visual Studio 2012";
40 |
41 | //OSX
42 | private const string VSCCSharpDefPathOSX = "/Applications/Visual Studio Code.app/Contents/Resources/app/plugins/vs.language.csharp/csharpDef.js";
43 | private const string VSCPluginsPathOSX = "/Applications/Visual Studio Code.app/Contents/Resources/app/plugins";
44 |
45 | //Win
46 | private readonly static string winUser = Environment.UserName;
47 | private readonly static string VSCCSharpDefPathWin = "\\Users\\" + winUser + "\\AppData\\Local\\Code\\app-0.1.0\\resources\\app\\plugins\\vs.language.csharp\\csharpDef.js";
48 | private readonly static string VSCPluginsPathWin = "\\Users\\" + winUser + "\\AppData\\Local\\Code\\app-0.1.0\\resources\\app\\plugins\\";
49 |
50 |
51 | private const string UnityShaderPluginName = "vs.language.shader";
52 | private const string ModifiedFilesBackupExtension = ".VSCModifiedOriginal";
53 |
54 | [MenuItem("VS Code/Update project for Visual Studio Code")]
55 | private static void UpdateProjectForVSC()
56 | {
57 | string message = "";
58 | string path = Application.dataPath + "/..";
59 |
60 | DirectoryInfo directoryInfo = new DirectoryInfo( path);
61 | FileInfo[] files = directoryInfo.GetFiles();
62 | bool noSolutionFilesFound = true;
63 |
64 | foreach( FileInfo fileInfo in files)
65 | {
66 | if( fileInfo.Extension == ".sln")
67 | {
68 | noSolutionFilesFound = false;
69 |
70 | StreamReader reader = new StreamReader( fileInfo.ToString());
71 | string fileString = reader.ReadToEnd();
72 | reader.Close();
73 |
74 | if( fileString.Contains( VS11VersionString))
75 | {
76 | message += "\n Converting sln: " + fileInfo.Name;
77 | fileString = fileString.Replace( VS11VersionString, VS12VersionString);
78 |
79 | Stream stream = File.OpenWrite( fileInfo.ToString());
80 | StreamWriter writer = new StreamWriter( stream, new UTF8Encoding( true));
81 | writer.Write( fileString);
82 | writer.Close();
83 |
84 | if( fileInfo.Name.Contains( "-csharp"))
85 | {
86 | string oldPath = fileInfo.ToString();
87 | string newPath = oldPath.Replace( ".sln", ".sln.hide");
88 | File.Move( oldPath, newPath);
89 | }
90 | }
91 | else if( fileString.Contains( VS12VersionString))
92 | {
93 | message += "\n Skipping converted sln: " + fileInfo.Name;
94 | }
95 | else
96 | {
97 | message += "\n Skipping unknown sln format: " + fileInfo.Name;
98 | }
99 | }
100 | }
101 |
102 | if( noSolutionFilesFound)
103 | {
104 | message = "No .sln files found in project. Open a script in MonoDevelop to autogenerate .sln " +
105 | "files and then try updating for Visual Studio Code again.";
106 | }
107 |
108 | EditorUtility.DisplayDialog( "Update project", message, "Ok");
109 | }
110 |
111 | [MenuItem("VS Code/Add Unity symbol highlighting to Visual Studio Code")]
112 | private static void AddUnitySymbolsToVSC()
113 | {
114 | string cSharpDefPath = null;
115 | string message = null;
116 |
117 | if( Application.platform == RuntimePlatform.OSXEditor)
118 | {
119 | cSharpDefPath = FindCSharpDefPathOSX();
120 | }
121 | else if (Application.platform == RuntimePlatform.WindowsEditor)
122 | {
123 | cSharpDefPath = FindCSharpDefPathWin();
124 | }
125 | else
126 | {
127 | message = "Adding Unity symbols to Visual Studio Code is not currently supported on " + Application.platform;
128 | }
129 |
130 | bool supportedPlatform = message == null;
131 | if( supportedPlatform)
132 | {
133 | if( cSharpDefPath == null)
134 | {
135 | message = "Couldn't find Visual Studio Code application or CSharpDef";
136 | }
137 | else
138 | {
139 | RestoreModifiedFileBackupIfExists( cSharpDefPath);
140 | BackupFileBeforeModification( cSharpDefPath);
141 |
142 | List classNames = GetPublicClassesInNamespaces( "UnityEngine", "UnityEditor");
143 | bool success = AddClassNamesToCSharpDef( classNames, cSharpDefPath);
144 |
145 | message = success ? "Successfully added Unity symbols to Visual Studio Code" :
146 | "Couldn't add Unity symbols to Visual Studio Code, couldn't find keywords array";
147 | }
148 | }
149 |
150 | EditorUtility.DisplayDialog( "Add Unity symbols", message, "Ok");
151 | }
152 |
153 | [MenuItem("VS Code/Add Unity shader plugin to Visual Studio Code")]
154 | private static void AddUnityShaderPluginToVSC()
155 | {
156 | string message = null;
157 | string pluginSourcePath = null;
158 | string pluginInstallPath = null;
159 |
160 | //OSX
161 | if( Application.platform == RuntimePlatform.OSXEditor)
162 | {
163 | pluginSourcePath = Application.dataPath + "/../VSCUnity/" + UnityShaderPluginName;
164 | pluginInstallPath = VSCPluginsPathOSX + "/" + UnityShaderPluginName;
165 | }
166 |
167 | //Win
168 | else if( Application.platform == RuntimePlatform.WindowsEditor)
169 | {
170 | pluginSourcePath = Application.dataPath + "\\..\\VSCUnity\\" + UnityShaderPluginName;
171 | pluginInstallPath = VSCPluginsPathWin + "\\" + UnityShaderPluginName;
172 | }
173 | else
174 | {
175 | message = "Adding Unity shader plugin to Visual Studio Code is not currently supported on " + Application.platform;
176 | }
177 |
178 | bool supportedPlatform = message == null;
179 | if( supportedPlatform)
180 | {
181 | if( Directory.Exists( pluginSourcePath))
182 | {
183 | if( Directory.Exists( pluginInstallPath))
184 | {
185 | Directory.Delete( pluginInstallPath, true);
186 | }
187 |
188 | CopyDirectory.Copy( pluginSourcePath, pluginInstallPath);
189 |
190 | message = "Successfully added Unity shader plugin to Visual Studio Code";
191 | }
192 | else
193 | {
194 | message = "Couldn't find the Unity shader plugin:\n\n" + pluginSourcePath;
195 | }
196 | }
197 |
198 | EditorUtility.DisplayDialog( "Add Unity shader plugin", message, "Ok");
199 | }
200 |
201 | //OSX
202 | private static string FindCSharpDefPathOSX()
203 | {
204 | if( File.Exists( VSCCSharpDefPathOSX))
205 | {
206 | return VSCCSharpDefPathOSX;
207 | }
208 | else
209 | {
210 | string userPath = Environment.GetFolderPath( Environment.SpecialFolder.Personal);
211 | string cSharpDefUserPath = userPath + VSCCSharpDefPathOSX;
212 |
213 | if( File.Exists( cSharpDefUserPath))
214 | {
215 | return cSharpDefUserPath;
216 | }
217 | }
218 |
219 | return null;
220 | }
221 |
222 | //Win
223 | private static string FindCSharpDefPathWin()
224 | {
225 | if( File.Exists( VSCCSharpDefPathWin))
226 | {
227 | return VSCCSharpDefPathWin;
228 | }
229 | else
230 | {
231 | string userPath = Environment.GetFolderPath( Environment.SpecialFolder.Personal);
232 | string cSharpDefUserPath = userPath + VSCCSharpDefPathWin;
233 |
234 | if( File.Exists( cSharpDefUserPath))
235 | {
236 | return cSharpDefUserPath;
237 | }
238 | }
239 |
240 | return null;
241 | }
242 |
243 | private static List GetPublicClassesInNamespaces( params string[] namespaces)
244 | {
245 | List classNames = new List();
246 | Assembly[] assemblies = AppDomain.CurrentDomain.GetAssemblies();
247 | IEnumerable types;
248 | string currentNamespace;
249 | string className;
250 |
251 | for( int i=0; i t.GetTypes())
255 | .Where(t => (t.IsClass || t.IsAnsiClass) && t.Namespace == currentNamespace && t.IsPublic);
256 |
257 | foreach( Type type in types)
258 | {
259 | className = type.ToString().Replace( currentNamespace + ".", "");
260 | classNames.Add( className);
261 | }
262 | }
263 |
264 | return classNames;
265 | }
266 |
267 | private static void RestoreModifiedFileBackupIfExists( string originalPath)
268 | {
269 | string backupPath = originalPath + ModifiedFilesBackupExtension;
270 | if( File.Exists( backupPath))
271 | {
272 | File.Copy( backupPath, originalPath, true);
273 | File.Delete( backupPath);
274 | }
275 | }
276 |
277 | private static void BackupFileBeforeModification( string originalPath)
278 | {
279 | string backupPath = originalPath + ModifiedFilesBackupExtension;
280 | File.Copy( originalPath, backupPath, true);
281 | }
282 |
283 | private static bool AddClassNamesToCSharpDef( List classNames, string cSharpDefPath)
284 | {
285 | StreamReader reader = new StreamReader( cSharpDefPath);
286 | string cSharpDef = reader.ReadToEnd();
287 | reader.Close();
288 |
289 | string keywordsArrayMarker = "keywords: [";
290 | if( cSharpDef.IndexOf( keywordsArrayMarker) > -1)
291 | {
292 | string linePrefix = "\r\n\t\t\t";
293 | string keywordString = keywordsArrayMarker + linePrefix + "//-- BEGIN UNITY SYMBOLS --//";
294 |
295 | foreach( string className in classNames)
296 | {
297 | keywordString += linePrefix + "\'" + className + "\',";
298 | }
299 |
300 | keywordString += linePrefix + "//-- END UNITY SYMBOLS --//";
301 |
302 | cSharpDef = cSharpDef.Replace( keywordsArrayMarker, keywordString);
303 |
304 | Stream stream = File.OpenWrite( cSharpDefPath);
305 | StreamWriter writer = new StreamWriter( stream, new UTF8Encoding( true));
306 | writer.Write( cSharpDef);
307 | writer.Close();
308 |
309 | return true;
310 | }
311 |
312 | return false;
313 | }
314 | }
315 |
--------------------------------------------------------------------------------
/VSCUnity/vs.language.shader/shaderDef.js:
--------------------------------------------------------------------------------
1 | /*---------------------------------------------------------
2 | * Copyright (C) Microsoft Corporation. All rights reserved.
3 | *--------------------------------------------------------*/
4 | // Cpp language plugin modified by Ben Hopkins (kode80) for Unity shader support
5 | ///
6 | 'use strict';
7 | define(["require", "exports"], function (require, exports) {
8 | exports.language = {
9 | displayName: 'Unity Shader',
10 | name: 'shader',
11 | mimeTypes: [],
12 | defaultToken: '',
13 | lineComment: '//',
14 | blockCommentStart: '/*',
15 | blockCommentEnd: '*/',
16 | autoClosingPairs: [['{', '}'], ['[', ']'], ['(', ')'], ['"', '"']],
17 | keywords: [
18 | 'struct',
19 | 'discard',
20 | 'return',
21 | 'technique',
22 | 'pass',
23 | 'compile',
24 | 'trunc',
25 | 'arbfp1',
26 | 'arbvp1',
27 | 'fp20',
28 | 'fp30',
29 | 'fp40',
30 | 'glslf',
31 | 'glslg',
32 | 'glslv',
33 | 'gp4',
34 | 'gp4fp',
35 | 'gp4gp',
36 | 'gp4vp',
37 | 'hlslf',
38 | 'hlslv',
39 | 'ps_1_1',
40 | 'ps_1_2',
41 | 'ps_1_3',
42 | 'ps_2_0',
43 | 'ps_2_x',
44 | 'ps_3_0',
45 | 'ps_4_0',
46 | 'vp20',
47 | 'vp30',
48 | 'vp40',
49 | 'vs_4_0',
50 | 'gs_4_0',
51 | 'ps_2_a',
52 | 'vs_2_0',
53 | 'return',
54 | 'bool',
55 | 'const',
56 | 'static',
57 | 'uniform',
58 | 'varying',
59 | 'register',
60 | 'in',
61 | 'inout',
62 | 'interface',
63 | 'out',
64 | 'void',
65 | 'while',
66 | 'for',
67 | 'do',
68 | 'if',
69 | 'else',
70 | 'typedef',
71 | '_SEQ',
72 | '_SGE',
73 | '_SGT',
74 | '_SLE',
75 | '_SLT',
76 | '_SNE',
77 | 'HPOS',
78 | 'POSITION',
79 | 'PSIZ',
80 | 'WPOS',
81 | 'COLOR',
82 | 'COLOR0',
83 | 'COLOR1',
84 | 'COLOR2',
85 | 'COLOR3',
86 | 'COL0',
87 | 'COL1',
88 | 'BCOL0',
89 | 'BCOL1',
90 | 'FOGP',
91 | 'FOGC',
92 | 'NRML',
93 | 'NORMAL',
94 | 'TEXCOORD0',
95 | 'TEXCOORD1',
96 | 'TEXCOORD2',
97 | 'TEXCOORD3',
98 | 'TEXCOORD4',
99 | 'TEXCOORD5',
100 | 'TEXCOORD6',
101 | 'TEXCOORD7',
102 | 'TANGENT0',
103 | 'TANGENT1',
104 | 'TANGENT2',
105 | 'TANGENT3',
106 | 'TANGENT4',
107 | 'TANGENT5',
108 | 'TANGENT6',
109 | 'TANGENT7',
110 | 'TEX0',
111 | 'TEX1',
112 | 'TEX2',
113 | 'TEX3',
114 | 'TEX4',
115 | 'TEX5',
116 | 'TEX6',
117 | 'TEX7',
118 | 'DEPR',
119 | 'DEPTH',
120 | 'ATTR0',
121 | 'ATTR1',
122 | 'ATTR2',
123 | 'ATTR3',
124 | 'ATTR4',
125 | 'ATTR5',
126 | 'ATTR6',
127 | 'ATTR7',
128 | 'ATTR8',
129 | 'ATTR9',
130 | 'ATTR10',
131 | 'ATTR11',
132 | 'ATTR12',
133 | 'ATTR13',
134 | 'ATTR14',
135 | 'ATTR15',
136 | 'POINT',
137 | 'POINT_OUT',
138 | 'LINE',
139 | 'LINE_ADJ',
140 | 'LINE_OUT',
141 | 'TRIANGLE_OUT',
142 | 'TRIANGLE',
143 | 'TRIANGLE_ADJ',
144 | 'BINORMAL0',
145 | 'LPROJ_COORD',
146 | 'SV_CLIPDISTANCE',
147 | 'SV_CULLDISTANCE',
148 | 'SV_COVERAGE',
149 | 'SV_DEPTH',
150 | 'SV_DEPTHGREATEREQUAL',
151 | 'SV_DEPTHLESSEQUAL',
152 | 'SV_DISPATCHTHREADID',
153 | 'SV_DOMAINLOCATION',
154 | 'SV_GROUPID',
155 | 'SV_GROUPINDEX',
156 | 'SV_GROUPTHREADID',
157 | 'SV_GSINSTANCEID',
158 | 'SV_INNERCOVERAGE',
159 | 'SV_INSIDETESSFACTOR',
160 | 'SV_INSTANCEID',
161 | 'SV_ISFRONTFACE',
162 | 'SV_OUTPUTCONTROLPOINTID',
163 | 'SV_POSITION',
164 | 'SV_PRIMITIVEID',
165 | 'SV_RENDERTARGETARRAYINDEX',
166 | 'SV_RENDERTARGETARRAYINDEX',
167 | 'SV_SAMPLEINDEX',
168 | 'SV_STENCILREF',
169 | 'SV_TARGET',
170 | 'SV_TESSFACTOR',
171 | 'SV_VERTEXID',
172 | 'SV_VIEWPORTARRAYINDEX',
173 | 'int1',
174 | 'int2',
175 | 'int3',
176 | 'int4',
177 | 'float',
178 | 'float1',
179 | 'float2',
180 | 'float3',
181 | 'float4',
182 | 'float1x1',
183 | 'float1x2',
184 | 'float1x3',
185 | 'float1x4',
186 | 'float2x1',
187 | 'float2x2',
188 | 'float2x3',
189 | 'float2x4',
190 | 'float3x1',
191 | 'float3x2',
192 | 'float3x3',
193 | 'float3x4',
194 | 'float4x1',
195 | 'float4x2',
196 | 'float4x3',
197 | 'float4x4',
198 | 'fixed',
199 | 'fixed1',
200 | 'fixed2',
201 | 'fixed3',
202 | 'fixed4',
203 | 'half',
204 | 'half1',
205 | 'half2',
206 | 'half3',
207 | 'half4',
208 | 'string',
209 | 'int',
210 | 'sincos',
211 | 'abs',
212 | 'acos',
213 | 'asin',
214 | 'atan',
215 | 'atan2',
216 | 'ceil',
217 | 'clamp',
218 | 'cos',
219 | 'cosh',
220 | 'cross',
221 | 'ddx',
222 | 'ddy',
223 | 'degrees',
224 | 'dot',
225 | 'exp',
226 | 'exp2',
227 | 'floor',
228 | 'fmod',
229 | 'frexp',
230 | 'frac',
231 | 'isfinite',
232 | 'isinf',
233 | 'isnan',
234 | 'ldexp',
235 | 'log',
236 | 'log2',
237 | 'log10',
238 | 'max',
239 | 'min',
240 | 'mix',
241 | 'mul',
242 | 'lerp',
243 | 'modf',
244 | 'noise',
245 | 'pow',
246 | 'radians',
247 | 'round',
248 | 'rsqrt',
249 | 'sign',
250 | 'sin',
251 | 'sinh',
252 | 'smoothstep',
253 | 'step',
254 | 'sqrt',
255 | 'tan',
256 | 'tanh',
257 | 'distance',
258 | 'fresnel',
259 | 'length',
260 | 'normalize',
261 | 'reflect',
262 | 'reflectn',
263 | 'refract',
264 | 'refractn',
265 | 'tex1D',
266 | 'f1tex1D',
267 | 'f2tex1D',
268 | 'f3tex1D',
269 | 'f4tex1D',
270 | 'h1tex1D',
271 | 'h2tex1D',
272 | 'h3tex1D',
273 | 'h4tex1D',
274 | 'x1tex1D',
275 | 'x2tex1D',
276 | 'x3tex1D',
277 | 'x4tex1D',
278 | 'tex1Dbias',
279 | 'tex2Dbias',
280 | 'tex3Dbias',
281 | 'texRECTbias',
282 | 'texCUBEbias',
283 | 'tex1Dlod',
284 | 'tex2Dlod',
285 | 'tex3Dlod',
286 | 'texRECTlod',
287 | 'texCUBElod',
288 | 'tex1Dproj',
289 | 'f1tex1Dproj',
290 | 'f2tex1Dproj',
291 | 'f3tex1Dproj',
292 | 'f4tex1Dproj',
293 | 'h1tex1Dproj',
294 | 'h2tex1Dproj',
295 | 'h3tex1Dproj',
296 | 'h4tex1Dproj',
297 | 'x1tex1Dproj',
298 | 'x2tex1Dproj',
299 | 'x3tex1Dproj',
300 | 'x4tex1Dproj',
301 | 'tex2D',
302 | 'f1tex2D',
303 | 'f2tex2D',
304 | 'f3tex2D',
305 | 'f4tex2D',
306 | 'h1tex2D',
307 | 'h2tex2D',
308 | 'h3tex2D',
309 | 'h4tex2D',
310 | 'x1tex2D',
311 | 'x2tex2D',
312 | 'x3tex2D',
313 | 'x4tex2D',
314 | 'tex2Dproj',
315 | 'f1tex2Dproj',
316 | 'f2tex2Dproj',
317 | 'f3tex2Dproj',
318 | 'f4tex2Dproj',
319 | 'h1tex2Dproj',
320 | 'h2tex2Dproj',
321 | 'h3tex2Dproj',
322 | 'h4tex2Dproj',
323 | 'x1tex2Dproj',
324 | 'x2tex2Dproj',
325 | 'x3tex2Dproj',
326 | 'x4tex2Dproj',
327 | 'tex3D',
328 | 'f1tex3D',
329 | 'f2tex3D',
330 | 'f3tex3D',
331 | 'f4tex3D',
332 | 'h1tex3D',
333 | 'h2tex3D',
334 | 'h3tex3D',
335 | 'h4tex3D',
336 | 'x1tex3D',
337 | 'x2tex3D',
338 | 'x3tex3D',
339 | 'x4tex3D',
340 | 'tex3Dproj',
341 | 'f1tex3Dproj',
342 | 'f2tex3Dproj',
343 | 'f3tex3Dproj',
344 | 'f4tex3Dproj',
345 | 'h1tex3Dproj',
346 | 'h2tex3Dproj',
347 | 'h3tex3Dproj',
348 | 'h4tex3Dproj',
349 | 'x1tex3Dproj',
350 | 'x2tex3Dproj',
351 | 'x3tex3Dproj',
352 | 'x4tex3Dproj',
353 | 'tex1CUBE',
354 | 'f1texCUBE',
355 | 'f2texCUBE',
356 | 'f3texCUBE',
357 | 'f4texCUBE',
358 | 'h1texCUBE',
359 | 'h2texCUBE',
360 | 'h3texCUBE',
361 | 'h4texCUBE',
362 | 'x1texCUBE',
363 | 'x2texCUBE',
364 | 'x3texCUBE',
365 | 'x4texCUBE',
366 | 'texCUBEproj',
367 | 'f1texCUBEproj',
368 | 'f2texCUBEproj',
369 | 'f3texCUBEproj',
370 | 'f4texCUBEproj',
371 | 'h1texCUBEproj',
372 | 'h2texCUBEproj',
373 | 'h3texCUBEproj',
374 | 'h4texCUBEproj',
375 | 'x1texCUBEproj',
376 | 'x2texCUBEproj',
377 | 'x3texCUBEproj',
378 | 'x4texCUBEproj',
379 | 'f1texCUBE',
380 | 'f2texCUBE',
381 | 'f3texCUBE',
382 | 'f4texCUBE',
383 | 'h1texCUBE',
384 | 'h2texCUBE',
385 | 'h3texCUBE',
386 | 'h4texCUBE',
387 | 'x1texCUBE',
388 | 'x2texCUBE',
389 | 'x3texCUBE',
390 | 'x4texCUBE',
391 | 'texRECT',
392 | 'f1texRECT',
393 | 'f2texRECT',
394 | 'f3texRECT',
395 | 'f4texRECT',
396 | 'h1texRECT',
397 | 'h2texRECT',
398 | 'h3texRECT',
399 | 'h4texRECT',
400 | 'x1texRECT',
401 | 'x2texRECT',
402 | 'x3texRECT',
403 | 'x4texRECT',
404 | 'texRECTproj',
405 | 'f1texRECTproj',
406 | 'f2texRECTproj',
407 | 'f3texRECTproj',
408 | 'f4texRECTproj',
409 | 'h1texRECTproj',
410 | 'h2texRECTproj',
411 | 'h3texRECTproj',
412 | 'h4texRECTproj',
413 | 'x1texRECTproj',
414 | 'x2texRECTproj',
415 | 'x3texRECTproj',
416 | 'x4texRECTproj',
417 | 'f1texRECT',
418 | 'f2texRECT',
419 | 'f3texRECT',
420 | 'f4texRECT',
421 | 'h1texRECT',
422 | 'h2texRECT',
423 | 'h3texRECT',
424 | 'h4texRECT',
425 | 'x1texRECT',
426 | 'x2texRECT',
427 | 'x3texRECT',
428 | 'x4texRECT',
429 | 'texcompare2D',
430 | 'f1texcompare2D',
431 | 'f1texcompare2D',
432 | 'f1texcompare2D',
433 | 'h1texcompare2D',
434 | 'h1texcompare2D',
435 | 'h1texcompare2D',
436 | 'x1texcompare2D',
437 | 'x1texcompare2D',
438 | 'x1texcompare2D',
439 | 'pack_2half',
440 | 'unpack_2half',
441 | 'pack_4clamp1s',
442 | 'unpack_4clamp1s',
443 | 'application2vertex',
444 | 'vertex2fragment',
445 | 'sampler1D',
446 | 'sampler1DARRAY',
447 | 'sampler2D',
448 | 'sampler2DARRAY',
449 | 'sampler3D',
450 | 'samplerCUBE',
451 | 'samplerRECT',
452 | 'texture',
453 | 'texture2D',
454 | 'sampler_state',
455 | 'IN',
456 | 'OUT',
457 | 'CullMode',
458 | 'DepthEnable',
459 | 'BlendEnable',
460 | 'SetVertexShader',
461 | 'SetGeometryShader',
462 | 'SetPixelShader',
463 | 'SetRasterizerState',
464 | 'SetDepthStencilState',
465 | 'SetBlendState',
466 | 'VertexShader',
467 | 'ZEnable',
468 | 'ZWriteEnable',
469 | 'ZFunc',
470 | 'AlphaBlendEnable',
471 | 'PixelShader',
472 | 'VertexProgram',
473 | 'DepthTestEnable',
474 | 'DepthMask',
475 | 'CullFaceEnable',
476 | 'DepthFunc',
477 | 'FragmentProgram',
478 | 'DepthWriteMask',
479 | 'cullmode',
480 | 'LinearMipMapLinear',
481 | 'Linear',
482 | 'Repeat',
483 | 'DisableCulling',
484 | 'DepthEnabling',
485 | 'DisableBlend',
486 | 'true',
487 | 'LessEqual',
488 | 'none',
489 | 'false',
490 | 'None',
491 | 'RasterizerState',
492 | 'DepthStencilState',
493 | 'BlendState',
494 | 'Wrap',
495 | 'MIN_MAG_MIP_LINEAR',
496 | 'ENVIRONMENT',
497 | 'Clamp',
498 | 'Ambient',
499 | 'Position',
500 | 'Specular',
501 | 'SpecularPower',
502 | 'DIFFUSE',
503 | 'STANDARDSGLOBAL',
504 | 'LEqual',
505 | 'VIEWPORTPIXELSIZE',
506 | 'RENDERCOLORTARGET',
507 | 'RENDERDEPTHSTENCILTARGET'
508 | ],
509 | operators: [
510 | '=',
511 | '>',
512 | '<',
513 | '!',
514 | '~',
515 | '?',
516 | ':',
517 | '==',
518 | '<=',
519 | '>=',
520 | '!=',
521 | '&&',
522 | '||',
523 | '++',
524 | '--',
525 | '+',
526 | '-',
527 | '*',
528 | '/',
529 | '&',
530 | '|',
531 | '^',
532 | '%',
533 | '<<',
534 | '>>',
535 | '>>>',
536 | '+=',
537 | '-=',
538 | '*=',
539 | '/=',
540 | '&=',
541 | '|=',
542 | '^=',
543 | '%=',
544 | '<<=',
545 | '>>=',
546 | '>>>='
547 | ],
548 | // we include these common regular expressions
549 | symbols: /[=>](?!@symbols)/, '@brackets'],
562 | [/@symbols/, { cases: { '@operators': 'delimiter', '@default': '' } }],
563 | [/\d*\d+[eE]([\-+]?\d+)?(@floatsuffix)/, 'number.float'],
564 | [/\d*\.\d+([eE][\-+]?\d+)?(@floatsuffix)/, 'number.float'],
565 | [/0[xX][0-9a-fA-F']*[0-9a-fA-F](@integersuffix)/, 'number.hex'],
566 | [/0[0-7']*[0-7](@integersuffix)/, 'number.octal'],
567 | [/0[bB][0-1']*[0-1](@integersuffix)/, 'number.binary'],
568 | [/\d[\d']*\d(@integersuffix)/, 'number'],
569 | [/\d(@integersuffix)/, 'number'],
570 | [/[;,.]/, 'delimiter'],
571 | [/"([^"\\]|\\.)*$/, 'string.invalid'],
572 | [/"/, 'string', '@string'],
573 | [/'[^\\']'/, 'string'],
574 | [/(')(@escapes)(')/, ['string', 'string.escape', 'string']],
575 | [/'/, 'string.invalid']
576 | ],
577 | whitespace: [
578 | [/[ \t\r\n]+/, ''],
579 | [/\/\*\*(?!\/)/, 'comment.doc', '@doccomment'],
580 | [/\/\*/, 'comment', '@comment'],
581 | [/\/\/.*$/, 'comment'],
582 | ],
583 | comment: [
584 | [/[^\/*]+/, 'comment'],
585 | [/\/\*/, 'comment.invalid'],
586 | [/\*\//, 'comment', '@pop'],
587 | [/[\/*]/, 'comment']
588 | ],
589 | //Identical copy of comment above, except for the addition of .doc
590 | doccomment: [
591 | [/[^\/*]+/, 'comment.doc'],
592 | [/\/\*/, 'comment.doc.invalid'],
593 | [/\*\//, 'comment.doc', '@pop'],
594 | [/[\/*]/, 'comment.doc']
595 | ],
596 | string: [
597 | [/[^\\"]+/, 'string'],
598 | [/@escapes/, 'string.escape'],
599 | [/\\./, 'string.escape.invalid'],
600 | [/"/, 'string', '@pop']
601 | ],
602 | },
603 | };
604 | });
605 |
--------------------------------------------------------------------------------
/VSCUnity/vs.language.shader/tests/shaderTests.js:
--------------------------------------------------------------------------------
1 | /*---------------------------------------------------------
2 | * Copyright (C) Microsoft Corporation. All rights reserved.
3 | *--------------------------------------------------------*/
4 | ///
5 | 'use strict';
6 | define(["require", "exports", '../cppDef', 'monaco-testing'], function (require, exports, languageDef, T) {
7 | var tokenizationSupport = T.createTokenizationSupport(languageDef.language);
8 | var tokenize = T.createTokenizeFromSupport(tokenizationSupport);
9 | var assertTokens = T.assertTokens;
10 | var assertWords = T.assertWords;
11 | function assertTokensOne(textToTokenize, tokenType) {
12 | var tokens = tokenize(textToTokenize).tokens;
13 | assertTokens(tokens, [{ startIndex: 0, type: tokenType }]);
14 | }
15 | ;
16 | T.module('Syntax Highlighting - CPP');
17 | T.test('Keywords', function () {
18 | var tokens = tokenize('int _tmain(int argc, _TCHAR* argv[])').tokens;
19 | assertTokens(tokens, [
20 | { startIndex: 0, type: 'keyword.int.cpp' },
21 | { startIndex: 3, type: '' },
22 | { startIndex: 4, type: 'identifier.cpp' },
23 | { startIndex: 10, type: 'delimiter.parenthesis.cpp', bracket: 1 },
24 | { startIndex: 11, type: 'keyword.int.cpp' },
25 | { startIndex: 14, type: '' },
26 | { startIndex: 15, type: 'identifier.cpp' },
27 | { startIndex: 19, type: 'delimiter.cpp' },
28 | { startIndex: 20, type: '' },
29 | { startIndex: 21, type: 'identifier.cpp' },
30 | { startIndex: 27, type: 'delimiter.cpp' },
31 | { startIndex: 28, type: '' },
32 | { startIndex: 29, type: 'identifier.cpp' },
33 | { startIndex: 33, type: 'delimiter.square.cpp', bracket: 1 },
34 | { startIndex: 34, type: 'delimiter.square.cpp', bracket: -1 },
35 | { startIndex: 35, type: 'delimiter.parenthesis.cpp', bracket: -1 }
36 | ]);
37 | });
38 | T.test('Comments - single line', function () {
39 | var tokens = [];
40 | tokens = tokenize('//').tokens;
41 | T.equal(tokens.length, 1);
42 | assertTokens(tokens, [{ startIndex: 0, type: 'comment.cpp' }]);
43 | tokens = tokenize(' // a comment').tokens;
44 | assertTokens(tokens, [
45 | { startIndex: 0, type: '' },
46 | { startIndex: 4, type: 'comment.cpp' }
47 | ]);
48 | tokens = tokenize('// a comment').tokens;
49 | assertTokens(tokens, [{ startIndex: 0, type: 'comment.cpp' }]);
50 | tokens = tokenize('//sticky comment').tokens;
51 | assertTokens(tokens, [{ startIndex: 0, type: 'comment.cpp' }]);
52 | tokens = tokenize('/almost a comment').tokens;
53 | assertTokens(tokens, [
54 | { startIndex: 0, type: 'delimiter.cpp' },
55 | { startIndex: 1, type: 'identifier.cpp' },
56 | { startIndex: 7, type: '' },
57 | { startIndex: 8, type: 'identifier.cpp' },
58 | { startIndex: 9, type: '' },
59 | { startIndex: 10, type: 'identifier.cpp' }
60 | ]);
61 | tokens = tokenize('1 / 2; /* comment').tokens;
62 | assertTokens(tokens, [
63 | { startIndex: 0, type: 'number.cpp' },
64 | { startIndex: 1, type: '' },
65 | { startIndex: 2, type: 'delimiter.cpp' },
66 | { startIndex: 3, type: '' },
67 | { startIndex: 4, type: 'number.cpp' },
68 | { startIndex: 5, type: 'delimiter.cpp' },
69 | { startIndex: 6, type: '' },
70 | { startIndex: 7, type: 'comment.cpp' }
71 | ]);
72 | tokens = tokenize('int x = 1; // my comment // is a nice one').tokens;
73 | assertTokens(tokens, [
74 | { startIndex: 0, type: 'keyword.int.cpp' },
75 | { startIndex: 3, type: '' },
76 | { startIndex: 4, type: 'identifier.cpp' },
77 | { startIndex: 5, type: '' },
78 | { startIndex: 6, type: 'delimiter.cpp' },
79 | { startIndex: 7, type: '' },
80 | { startIndex: 8, type: 'number.cpp' },
81 | { startIndex: 9, type: 'delimiter.cpp' },
82 | { startIndex: 10, type: '' },
83 | { startIndex: 11, type: 'comment.cpp' }
84 | ]);
85 | });
86 | T.test('Comments - range comment, single line', function () {
87 | var tokens = tokenize('/* a simple comment */').tokens;
88 | assertTokens(tokens, [
89 | { startIndex: 0, type: 'comment.cpp' }
90 | ]);
91 | tokens = tokenize('int x = /* a simple comment */ 1;').tokens;
92 | assertTokens(tokens, [
93 | { startIndex: 0, type: 'keyword.int.cpp' },
94 | { startIndex: 3, type: '' },
95 | { startIndex: 4, type: 'identifier.cpp' },
96 | { startIndex: 5, type: '' },
97 | { startIndex: 6, type: 'delimiter.cpp' },
98 | { startIndex: 7, type: '' },
99 | { startIndex: 8, type: 'comment.cpp' },
100 | { startIndex: 30, type: '' },
101 | { startIndex: 31, type: 'number.cpp' },
102 | { startIndex: 32, type: 'delimiter.cpp' }
103 | ]);
104 | tokens = tokenize('int x = /* comment */ 1; */').tokens;
105 | assertTokens(tokens, [
106 | { startIndex: 0, type: 'keyword.int.cpp' },
107 | { startIndex: 3, type: '' },
108 | { startIndex: 4, type: 'identifier.cpp' },
109 | { startIndex: 5, type: '' },
110 | { startIndex: 6, type: 'delimiter.cpp' },
111 | { startIndex: 7, type: '' },
112 | { startIndex: 8, type: 'comment.cpp' },
113 | { startIndex: 21, type: '' },
114 | { startIndex: 22, type: 'number.cpp' },
115 | { startIndex: 23, type: 'delimiter.cpp' },
116 | { startIndex: 24, type: '' }
117 | ]);
118 | tokens = tokenize('x = /**/;').tokens;
119 | assertTokens(tokens, [
120 | { startIndex: 0, type: 'identifier.cpp' },
121 | { startIndex: 1, type: '' },
122 | { startIndex: 2, type: 'delimiter.cpp' },
123 | { startIndex: 3, type: '' },
124 | { startIndex: 4, type: 'comment.cpp' },
125 | { startIndex: 8, type: 'delimiter.cpp' }
126 | ]);
127 | tokens = tokenize('x = /*/;').tokens;
128 | assertTokens(tokens, [
129 | { startIndex: 0, type: 'identifier.cpp' },
130 | { startIndex: 1, type: '' },
131 | { startIndex: 2, type: 'delimiter.cpp' },
132 | { startIndex: 3, type: '' },
133 | { startIndex: 4, type: 'comment.cpp' }
134 | ]);
135 | });
136 | T.test('Numbers', function () {
137 | assertTokensOne('0', 'number.cpp');
138 | assertTokensOne('12l', 'number.cpp');
139 | assertTokensOne('34U', 'number.cpp');
140 | assertTokensOne('55LL', 'number.cpp');
141 | assertTokensOne('34ul', 'number.cpp');
142 | assertTokensOne('55llU', 'number.cpp');
143 | assertTokensOne('5\'5llU', 'number.cpp');
144 | assertTokensOne('100\'000\'000', 'number.cpp');
145 | assertTokensOne('0x100\'aafllU', 'number.hex.cpp');
146 | assertTokensOne('0342\'325', 'number.octal.cpp');
147 | assertTokensOne('0x123', 'number.hex.cpp');
148 | assertTokensOne('23.5', 'number.float.cpp');
149 | assertTokensOne('23.5e3', 'number.float.cpp');
150 | assertTokensOne('23.5E3', 'number.float.cpp');
151 | assertTokensOne('23.5F', 'number.float.cpp');
152 | assertTokensOne('23.5f', 'number.float.cpp');
153 | assertTokensOne('1.72E3F', 'number.float.cpp');
154 | assertTokensOne('1.72E3f', 'number.float.cpp');
155 | assertTokensOne('1.72e3F', 'number.float.cpp');
156 | assertTokensOne('1.72e3f', 'number.float.cpp');
157 | assertTokensOne('23.5L', 'number.float.cpp');
158 | assertTokensOne('23.5l', 'number.float.cpp');
159 | assertTokensOne('1.72E3L', 'number.float.cpp');
160 | assertTokensOne('1.72E3l', 'number.float.cpp');
161 | assertTokensOne('1.72e3L', 'number.float.cpp');
162 | assertTokensOne('1.72e3l', 'number.float.cpp');
163 | var tokens = tokenize('0+0').tokens;
164 | assertTokens(tokens, [
165 | { startIndex: 0, type: 'number.cpp' },
166 | { startIndex: 1, type: 'delimiter.cpp' },
167 | { startIndex: 2, type: 'number.cpp' }
168 | ]);
169 | tokens = tokenize('100+10').tokens;
170 | assertTokens(tokens, [
171 | { startIndex: 0, type: 'number.cpp' },
172 | { startIndex: 3, type: 'delimiter.cpp' },
173 | { startIndex: 4, type: 'number.cpp' }
174 | ]);
175 | tokens = tokenize('0 + 0').tokens;
176 | assertTokens(tokens, [
177 | { startIndex: 0, type: 'number.cpp' },
178 | { startIndex: 1, type: '' },
179 | { startIndex: 2, type: 'delimiter.cpp' },
180 | { startIndex: 3, type: '' },
181 | { startIndex: 4, type: 'number.cpp' }
182 | ]);
183 | });
184 | T.test('Monarch Generated', function () {
185 | var previousState = tokenizationSupport.getInitialState();
186 | // Line 1
187 | var src = '#include';
188 | var tokens = tokenize(src, previousState);
189 | previousState = tokens.endState;
190 | assertTokens(tokens.tokens, [
191 | { startIndex: 0, type: 'keyword.cpp' },
192 | { startIndex: 8, type: 'delimiter.angle.cpp' },
193 | { startIndex: 9, type: 'identifier.cpp' },
194 | { startIndex: 17, type: 'delimiter.angle.cpp' }
195 | ]);
196 | // Line 2
197 | src = '#include "/path/to/my/file.h"';
198 | tokens = tokenize(src, previousState);
199 | previousState = tokens.endState;
200 | assertTokens(tokens.tokens, [
201 | { startIndex: 0, type: 'keyword.cpp' },
202 | { startIndex: 8, type: '' },
203 | { startIndex: 9, type: 'string.cpp' }
204 | ]);
205 | // Line 3
206 | src = '';
207 | tokens = tokenize(src, previousState);
208 | previousState = tokens.endState;
209 | assertTokens(tokens.tokens, [
210 | ]);
211 | // Line 4
212 | src = '#ifdef VAR';
213 | tokens = tokenize(src, previousState);
214 | previousState = tokens.endState;
215 | assertTokens(tokens.tokens, [
216 | { startIndex: 0, type: 'keyword.cpp' },
217 | { startIndex: 6, type: '' },
218 | { startIndex: 7, type: 'identifier.cpp' }
219 | ]);
220 | // Line 5
221 | src = '#define SUM(A,B) (A) + (B)';
222 | tokens = tokenize(src, previousState);
223 | previousState = tokens.endState;
224 | assertTokens(tokens.tokens, [
225 | { startIndex: 0, type: 'keyword.cpp' },
226 | { startIndex: 7, type: '' },
227 | { startIndex: 8, type: 'identifier.cpp' },
228 | { startIndex: 11, type: 'delimiter.parenthesis.cpp' },
229 | { startIndex: 12, type: 'identifier.cpp' },
230 | { startIndex: 13, type: 'delimiter.cpp' },
231 | { startIndex: 14, type: 'identifier.cpp' },
232 | { startIndex: 15, type: 'delimiter.parenthesis.cpp' },
233 | { startIndex: 16, type: '' },
234 | { startIndex: 17, type: 'delimiter.parenthesis.cpp' },
235 | { startIndex: 18, type: 'identifier.cpp' },
236 | { startIndex: 19, type: 'delimiter.parenthesis.cpp' },
237 | { startIndex: 20, type: '' },
238 | { startIndex: 21, type: 'delimiter.cpp' },
239 | { startIndex: 22, type: '' },
240 | { startIndex: 23, type: 'delimiter.parenthesis.cpp' },
241 | { startIndex: 24, type: 'identifier.cpp' },
242 | { startIndex: 25, type: 'delimiter.parenthesis.cpp' }
243 | ]);
244 | // Line 6
245 | src = '';
246 | tokens = tokenize(src, previousState);
247 | previousState = tokens.endState;
248 | assertTokens(tokens.tokens, [
249 | ]);
250 | // Line 7
251 | src = 'int main(int argc, char** argv)';
252 | tokens = tokenize(src, previousState);
253 | previousState = tokens.endState;
254 | assertTokens(tokens.tokens, [
255 | { startIndex: 0, type: 'keyword.int.cpp' },
256 | { startIndex: 3, type: '' },
257 | { startIndex: 4, type: 'identifier.cpp' },
258 | { startIndex: 8, type: 'delimiter.parenthesis.cpp' },
259 | { startIndex: 9, type: 'keyword.int.cpp' },
260 | { startIndex: 12, type: '' },
261 | { startIndex: 13, type: 'identifier.cpp' },
262 | { startIndex: 17, type: 'delimiter.cpp' },
263 | { startIndex: 18, type: '' },
264 | { startIndex: 19, type: 'keyword.char.cpp' },
265 | { startIndex: 23, type: '' },
266 | { startIndex: 26, type: 'identifier.cpp' },
267 | { startIndex: 30, type: 'delimiter.parenthesis.cpp' }
268 | ]);
269 | // Line 8
270 | src = '{';
271 | tokens = tokenize(src, previousState);
272 | previousState = tokens.endState;
273 | assertTokens(tokens.tokens, [
274 | { startIndex: 0, type: 'delimiter.curly.cpp' }
275 | ]);
276 | // Line 9
277 | src = ' return 0;';
278 | tokens = tokenize(src, previousState);
279 | previousState = tokens.endState;
280 | assertTokens(tokens.tokens, [
281 | { startIndex: 0, type: '' },
282 | { startIndex: 1, type: 'keyword.return.cpp' },
283 | { startIndex: 7, type: '' },
284 | { startIndex: 8, type: 'number.cpp' },
285 | { startIndex: 9, type: 'delimiter.cpp' }
286 | ]);
287 | // Line 10
288 | src = '}';
289 | tokens = tokenize(src, previousState);
290 | previousState = tokens.endState;
291 | assertTokens(tokens.tokens, [
292 | { startIndex: 0, type: 'delimiter.curly.cpp' }
293 | ]);
294 | // Line 11
295 | src = '';
296 | tokens = tokenize(src, previousState);
297 | previousState = tokens.endState;
298 | assertTokens(tokens.tokens, [
299 | ]);
300 | // Line 12
301 | src = 'namespace TestSpace';
302 | tokens = tokenize(src, previousState);
303 | previousState = tokens.endState;
304 | assertTokens(tokens.tokens, [
305 | { startIndex: 0, type: 'keyword.namespace.cpp' },
306 | { startIndex: 9, type: '' },
307 | { startIndex: 10, type: 'identifier.cpp' }
308 | ]);
309 | // Line 13
310 | src = '{';
311 | tokens = tokenize(src, previousState);
312 | previousState = tokens.endState;
313 | assertTokens(tokens.tokens, [
314 | { startIndex: 0, type: 'delimiter.curly.cpp' }
315 | ]);
316 | // Line 14
317 | src = ' using Asdf.CDE;';
318 | tokens = tokenize(src, previousState);
319 | previousState = tokens.endState;
320 | assertTokens(tokens.tokens, [
321 | { startIndex: 0, type: '' },
322 | { startIndex: 1, type: 'keyword.using.cpp' },
323 | { startIndex: 6, type: '' },
324 | { startIndex: 7, type: 'identifier.cpp' },
325 | { startIndex: 11, type: 'delimiter.cpp' },
326 | { startIndex: 12, type: 'identifier.cpp' },
327 | { startIndex: 15, type: 'delimiter.cpp' }
328 | ]);
329 | // Line 15
330 | src = ' template ';
331 | tokens = tokenize(src, previousState);
332 | previousState = tokens.endState;
333 | assertTokens(tokens.tokens, [
334 | { startIndex: 0, type: '' },
335 | { startIndex: 1, type: 'keyword.template.cpp' },
336 | { startIndex: 9, type: '' },
337 | { startIndex: 10, type: 'delimiter.angle.cpp' },
338 | { startIndex: 11, type: 'keyword.typename.cpp' },
339 | { startIndex: 19, type: '' },
340 | { startIndex: 20, type: 'identifier.cpp' },
341 | { startIndex: 21, type: 'delimiter.angle.cpp' }
342 | ]);
343 | // Line 16
344 | src = ' class CoolClass : protected BaseClass';
345 | tokens = tokenize(src, previousState);
346 | previousState = tokens.endState;
347 | assertTokens(tokens.tokens, [
348 | { startIndex: 0, type: '' },
349 | { startIndex: 1, type: 'keyword.class.cpp' },
350 | { startIndex: 6, type: '' },
351 | { startIndex: 7, type: 'identifier.cpp' },
352 | { startIndex: 16, type: '' },
353 | { startIndex: 17, type: 'delimiter.cpp' },
354 | { startIndex: 18, type: '' },
355 | { startIndex: 19, type: 'keyword.protected.cpp' },
356 | { startIndex: 28, type: '' },
357 | { startIndex: 29, type: 'identifier.cpp' }
358 | ]);
359 | // Line 17
360 | src = ' {';
361 | tokens = tokenize(src, previousState);
362 | previousState = tokens.endState;
363 | assertTokens(tokens.tokens, [
364 | { startIndex: 0, type: '' },
365 | { startIndex: 1, type: 'delimiter.curly.cpp' }
366 | ]);
367 | // Line 18
368 | src = ' private:';
369 | tokens = tokenize(src, previousState);
370 | previousState = tokens.endState;
371 | assertTokens(tokens.tokens, [
372 | { startIndex: 0, type: '' },
373 | { startIndex: 2, type: 'keyword.private.cpp' },
374 | { startIndex: 9, type: 'delimiter.cpp' }
375 | ]);
376 | // Line 19
377 | src = ' ';
378 | tokens = tokenize(src, previousState);
379 | previousState = tokens.endState;
380 | assertTokens(tokens.tokens, [
381 | { startIndex: 0, type: '' }
382 | ]);
383 | // Line 20
384 | src = ' static T field;';
385 | tokens = tokenize(src, previousState);
386 | previousState = tokens.endState;
387 | assertTokens(tokens.tokens, [
388 | { startIndex: 0, type: '' },
389 | { startIndex: 2, type: 'keyword.static.cpp' },
390 | { startIndex: 8, type: '' },
391 | { startIndex: 9, type: 'identifier.cpp' },
392 | { startIndex: 10, type: '' },
393 | { startIndex: 11, type: 'identifier.cpp' },
394 | { startIndex: 16, type: 'delimiter.cpp' }
395 | ]);
396 | // Line 21
397 | src = ' ';
398 | tokens = tokenize(src, previousState);
399 | previousState = tokens.endState;
400 | assertTokens(tokens.tokens, [
401 | { startIndex: 0, type: '' }
402 | ]);
403 | // Line 22
404 | src = ' public:';
405 | tokens = tokenize(src, previousState);
406 | previousState = tokens.endState;
407 | assertTokens(tokens.tokens, [
408 | { startIndex: 0, type: '' },
409 | { startIndex: 2, type: 'keyword.public.cpp' },
410 | { startIndex: 8, type: 'delimiter.cpp' }
411 | ]);
412 | // Line 23
413 | src = ' ';
414 | tokens = tokenize(src, previousState);
415 | previousState = tokens.endState;
416 | assertTokens(tokens.tokens, [
417 | { startIndex: 0, type: '' }
418 | ]);
419 | // Line 24
420 | src = ' [[deprecated]]';
421 | tokens = tokenize(src, previousState);
422 | previousState = tokens.endState;
423 | assertTokens(tokens.tokens, [
424 | { startIndex: 0, type: '' },
425 | { startIndex: 2, type: 'annotation.cpp' }
426 | ]);
427 | // Line 25
428 | src = ' foo method() const override';
429 | tokens = tokenize(src, previousState);
430 | previousState = tokens.endState;
431 | assertTokens(tokens.tokens, [
432 | { startIndex: 0, type: '' },
433 | { startIndex: 2, type: 'identifier.cpp' },
434 | { startIndex: 5, type: '' },
435 | { startIndex: 6, type: 'identifier.cpp' },
436 | { startIndex: 12, type: 'delimiter.parenthesis.cpp' },
437 | { startIndex: 13, type: 'delimiter.parenthesis.cpp' },
438 | { startIndex: 14, type: '' },
439 | { startIndex: 15, type: 'keyword.const.cpp' },
440 | { startIndex: 20, type: '' },
441 | { startIndex: 21, type: 'keyword.override.cpp' }
442 | ]);
443 | // Line 26
444 | src = ' {';
445 | tokens = tokenize(src, previousState);
446 | previousState = tokens.endState;
447 | assertTokens(tokens.tokens, [
448 | { startIndex: 0, type: '' },
449 | { startIndex: 2, type: 'delimiter.curly.cpp' }
450 | ]);
451 | // Line 27
452 | src = ' auto s = new Bar();';
453 | tokens = tokenize(src, previousState);
454 | previousState = tokens.endState;
455 | assertTokens(tokens.tokens, [
456 | { startIndex: 0, type: '' },
457 | { startIndex: 3, type: 'keyword.auto.cpp' },
458 | { startIndex: 7, type: '' },
459 | { startIndex: 8, type: 'identifier.cpp' },
460 | { startIndex: 9, type: '' },
461 | { startIndex: 10, type: 'delimiter.cpp' },
462 | { startIndex: 11, type: '' },
463 | { startIndex: 12, type: 'keyword.new.cpp' },
464 | { startIndex: 15, type: '' },
465 | { startIndex: 16, type: 'identifier.cpp' },
466 | { startIndex: 19, type: 'delimiter.parenthesis.cpp' },
467 | { startIndex: 20, type: 'delimiter.parenthesis.cpp' },
468 | { startIndex: 21, type: 'delimiter.cpp' }
469 | ]);
470 | // Line 28
471 | src = ' ';
472 | tokens = tokenize(src, previousState);
473 | previousState = tokens.endState;
474 | assertTokens(tokens.tokens, [
475 | { startIndex: 0, type: '' }
476 | ]);
477 | // Line 29
478 | src = ' if (s.field) {';
479 | tokens = tokenize(src, previousState);
480 | previousState = tokens.endState;
481 | assertTokens(tokens.tokens, [
482 | { startIndex: 0, type: '' },
483 | { startIndex: 3, type: 'keyword.if.cpp' },
484 | { startIndex: 5, type: '' },
485 | { startIndex: 6, type: 'delimiter.parenthesis.cpp' },
486 | { startIndex: 7, type: 'identifier.cpp' },
487 | { startIndex: 8, type: 'delimiter.cpp' },
488 | { startIndex: 9, type: 'identifier.cpp' },
489 | { startIndex: 14, type: 'delimiter.parenthesis.cpp' },
490 | { startIndex: 15, type: '' },
491 | { startIndex: 16, type: 'delimiter.curly.cpp' }
492 | ]);
493 | // Line 30
494 | src = ' for(const auto & b : s.field) {';
495 | tokens = tokenize(src, previousState);
496 | previousState = tokens.endState;
497 | assertTokens(tokens.tokens, [
498 | { startIndex: 0, type: '' },
499 | { startIndex: 4, type: 'keyword.for.cpp' },
500 | { startIndex: 7, type: 'delimiter.parenthesis.cpp' },
501 | { startIndex: 8, type: 'keyword.const.cpp' },
502 | { startIndex: 13, type: '' },
503 | { startIndex: 14, type: 'keyword.auto.cpp' },
504 | { startIndex: 18, type: '' },
505 | { startIndex: 19, type: 'delimiter.cpp' },
506 | { startIndex: 20, type: '' },
507 | { startIndex: 21, type: 'identifier.cpp' },
508 | { startIndex: 22, type: '' },
509 | { startIndex: 23, type: 'delimiter.cpp' },
510 | { startIndex: 24, type: '' },
511 | { startIndex: 25, type: 'identifier.cpp' },
512 | { startIndex: 26, type: 'delimiter.cpp' },
513 | { startIndex: 27, type: 'identifier.cpp' },
514 | { startIndex: 32, type: 'delimiter.parenthesis.cpp' },
515 | { startIndex: 33, type: '' },
516 | { startIndex: 34, type: 'delimiter.curly.cpp' }
517 | ]);
518 | // Line 31
519 | src = ' break;';
520 | tokens = tokenize(src, previousState);
521 | previousState = tokens.endState;
522 | assertTokens(tokens.tokens, [
523 | { startIndex: 0, type: '' },
524 | { startIndex: 5, type: 'keyword.break.cpp' },
525 | { startIndex: 10, type: 'delimiter.cpp' }
526 | ]);
527 | // Line 32
528 | src = ' }';
529 | tokens = tokenize(src, previousState);
530 | previousState = tokens.endState;
531 | assertTokens(tokens.tokens, [
532 | { startIndex: 0, type: '' },
533 | { startIndex: 4, type: 'delimiter.curly.cpp' }
534 | ]);
535 | // Line 33
536 | src = ' }';
537 | tokens = tokenize(src, previousState);
538 | previousState = tokens.endState;
539 | assertTokens(tokens.tokens, [
540 | { startIndex: 0, type: '' },
541 | { startIndex: 3, type: 'delimiter.curly.cpp' }
542 | ]);
543 | // Line 34
544 | src = ' }';
545 | tokens = tokenize(src, previousState);
546 | previousState = tokens.endState;
547 | assertTokens(tokens.tokens, [
548 | { startIndex: 0, type: '' },
549 | { startIndex: 2, type: 'delimiter.curly.cpp' }
550 | ]);
551 | // Line 35
552 | src = ' ';
553 | tokens = tokenize(src, previousState);
554 | previousState = tokens.endState;
555 | assertTokens(tokens.tokens, [
556 | { startIndex: 0, type: '' }
557 | ]);
558 | // Line 36
559 | src = ' std::string s = "hello wordld\\n";';
560 | tokens = tokenize(src, previousState);
561 | previousState = tokens.endState;
562 | assertTokens(tokens.tokens, [
563 | { startIndex: 0, type: '' },
564 | { startIndex: 2, type: 'identifier.cpp' },
565 | { startIndex: 5, type: '' },
566 | { startIndex: 7, type: 'identifier.cpp' },
567 | { startIndex: 13, type: '' },
568 | { startIndex: 14, type: 'identifier.cpp' },
569 | { startIndex: 15, type: '' },
570 | { startIndex: 16, type: 'delimiter.cpp' },
571 | { startIndex: 17, type: '' },
572 | { startIndex: 18, type: 'string.cpp' },
573 | { startIndex: 31, type: 'string.escape.cpp' },
574 | { startIndex: 33, type: 'string.cpp' },
575 | { startIndex: 34, type: 'delimiter.cpp' }
576 | ]);
577 | // Line 37
578 | src = ' ';
579 | tokens = tokenize(src, previousState);
580 | previousState = tokens.endState;
581 | assertTokens(tokens.tokens, [
582 | { startIndex: 0, type: '' }
583 | ]);
584 | // Line 38
585 | src = ' int number = 123\'123\'123Ull;';
586 | tokens = tokenize(src, previousState);
587 | previousState = tokens.endState;
588 | assertTokens(tokens.tokens, [
589 | { startIndex: 0, type: '' },
590 | { startIndex: 2, type: 'keyword.int.cpp' },
591 | { startIndex: 5, type: '' },
592 | { startIndex: 6, type: 'identifier.cpp' },
593 | { startIndex: 12, type: '' },
594 | { startIndex: 13, type: 'delimiter.cpp' },
595 | { startIndex: 14, type: '' },
596 | { startIndex: 15, type: 'number.cpp' },
597 | { startIndex: 29, type: 'delimiter.cpp' }
598 | ]);
599 | // Line 39
600 | src = ' }';
601 | tokens = tokenize(src, previousState);
602 | previousState = tokens.endState;
603 | assertTokens(tokens.tokens, [
604 | { startIndex: 0, type: '' },
605 | { startIndex: 1, type: 'delimiter.curly.cpp' }
606 | ]);
607 | // Line 40
608 | src = '}';
609 | tokens = tokenize(src, previousState);
610 | previousState = tokens.endState;
611 | assertTokens(tokens.tokens, [
612 | { startIndex: 0, type: 'delimiter.curly.cpp' }
613 | ]);
614 | // Line 41
615 | src = '';
616 | tokens = tokenize(src, previousState);
617 | previousState = tokens.endState;
618 | assertTokens(tokens.tokens, [
619 | ]);
620 | // Line 42
621 | src = '#endif';
622 | tokens = tokenize(src, previousState);
623 | previousState = tokens.endState;
624 | assertTokens(tokens.tokens, [
625 | { startIndex: 0, type: 'keyword.cpp' }
626 | ]);
627 | });
628 | });
629 |
--------------------------------------------------------------------------------