├── .github ├── no-response.yml └── workflows │ └── ci.yml ├── .gitignore ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE.md ├── LICENSE.md ├── PULL_REQUEST_TEMPLATE.md ├── README.md ├── coffeelint.json ├── grammars ├── coffeescript (literate).cson └── coffeescript.cson ├── package-lock.json ├── package.json ├── settings └── language-coffee-script.cson ├── snippets └── language-coffee-script.cson └── spec ├── coffee-script-literate-spec.coffee └── coffee-script-spec.coffee /.github/no-response.yml: -------------------------------------------------------------------------------- 1 | # Configuration for probot-no-response - https://github.com/probot/no-response 2 | 3 | # Number of days of inactivity before an issue is closed for lack of response 4 | daysUntilClose: 28 5 | 6 | # Label requiring a response 7 | responseRequiredLabel: more-information-needed 8 | 9 | # Comment to post when closing an issue for lack of response. Set to `false` to disable. 10 | closeComment: > 11 | This issue has been automatically closed because there has been no response 12 | to our request for more information from the original author. With only the 13 | information that is currently in the issue, we don't have enough information 14 | to take action. Please reach out if you have or find the answers we need so 15 | that we can investigate further. 16 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: [push] 4 | 5 | env: 6 | CI: true 7 | 8 | jobs: 9 | Test: 10 | strategy: 11 | matrix: 12 | os: [ubuntu-latest, macos-latest, windows-latest] 13 | channel: [stable, beta] 14 | runs-on: ${{ matrix.os }} 15 | steps: 16 | - uses: actions/checkout@v1 17 | - uses: UziTech/action-setup-atom@v2 18 | with: 19 | version: ${{ matrix.channel }} 20 | - name: Install dependencies 21 | run: apm install 22 | - name: Run tests 23 | run: atom --test spec 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.cache 2 | node_modules 3 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | See the [Atom contributing guide](https://github.com/atom/atom/blob/master/CONTRIBUTING.md) 2 | -------------------------------------------------------------------------------- /ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 8 | 9 | ### Prerequisites 10 | 11 | * [ ] Put an X between the brackets on this line if you have done all of the following: 12 | * Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode 13 | * Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/ 14 | * Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq 15 | * Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom 16 | * Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages 17 | 18 | ### Description 19 | 20 | [Description of the issue] 21 | 22 | ### Steps to Reproduce 23 | 24 | 1. [First Step] 25 | 2. [Second Step] 26 | 3. [and so on...] 27 | 28 | **Expected behavior:** [What you expect to happen] 29 | 30 | **Actual behavior:** [What actually happens] 31 | 32 | **Reproduces how often:** [What percentage of the time does it reproduce?] 33 | 34 | ### Versions 35 | 36 | You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running. 37 | 38 | ### Additional Information 39 | 40 | Any additional information, configuration or data that might be necessary to reproduce the issue. 41 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Copyright (c) 2014 GitHub Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining 4 | a copy of this software and associated documentation files (the 5 | "Software"), to deal in the Software without restriction, including 6 | without limitation the rights to use, copy, modify, merge, publish, 7 | distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to 9 | the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be 12 | included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | 22 | -------------------------------------------------------------------- 23 | 24 | This package was derived from a TextMate bundle located at 25 | https://github.com/jashkenas/coffee-script-tmbundle and distributed under the 26 | following license, located in `LICENSE`: 27 | 28 | Copyright (c) 2009-2014 Jeremy Ashkenas 29 | 30 | Permission is hereby granted, free of charge, to any person 31 | obtaining a copy of this software and associated documentation 32 | files (the "Software"), to deal in the Software without 33 | restriction, including without limitation the rights to use, 34 | copy, modify, merge, publish, distribute, sublicense, and/or sell 35 | copies of the Software, and to permit persons to whom the 36 | Software is furnished to do so, subject to the following 37 | conditions: 38 | 39 | The above copyright notice and this permission notice shall be 40 | included in all copies or substantial portions of the Software. 41 | 42 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 43 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 44 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 45 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 46 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 47 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 48 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 49 | OTHER DEALINGS IN THE SOFTWARE. 50 | -------------------------------------------------------------------------------- /PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ### Requirements 2 | 3 | * Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion. 4 | * All new code requires tests to ensure against regressions 5 | 6 | ### Description of the Change 7 | 8 | 13 | 14 | ### Alternate Designs 15 | 16 | 17 | 18 | ### Benefits 19 | 20 | 21 | 22 | ### Possible Drawbacks 23 | 24 | 25 | 26 | ### Applicable Issues 27 | 28 | 29 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ##### Atom and all repositories under Atom will be archived on December 15, 2022. Learn more in our [official announcement](https://github.blog/2022-06-08-sunsetting-atom/) 2 | # CoffeeScript language support in Atom 3 | [![macOS Build Status](https://travis-ci.org/atom/language-coffee-script.svg?branch=master)](https://travis-ci.org/atom/language-coffee-script) 4 | [![Windows Build status](https://ci.appveyor.com/api/projects/status/4j9aak7iwn2f2x7a/branch/master?svg=true)](https://ci.appveyor.com/project/Atom/language-coffee-script/branch/master) [![Dependency Status](https://david-dm.org/atom/language-coffee-script.svg)](https://david-dm.org/atom/language-coffee-script) 5 | 6 | Adds syntax highlighting and snippets to CoffeeScript files in Atom. 7 | 8 | Originally [converted](http://flight-manual.atom.io/hacking-atom/sections/converting-from-textmate) from the [CoffeeScript TextMate bundle](https://github.com/jashkenas/coffee-script-tmbundle). 9 | 10 | Contributions are greatly appreciated. Please fork this repository and open a pull request to add snippets, make grammar tweaks, etc. 11 | -------------------------------------------------------------------------------- /coffeelint.json: -------------------------------------------------------------------------------- 1 | { 2 | "max_line_length": { 3 | "level": "ignore" 4 | }, 5 | "no_empty_param_list": { 6 | "level": "error" 7 | }, 8 | "arrow_spacing": { 9 | "level": "error" 10 | }, 11 | "no_interpolation_in_single_quotes": { 12 | "level": "error" 13 | }, 14 | "no_debugger": { 15 | "level": "error" 16 | }, 17 | "prefer_english_operator": { 18 | "level": "error" 19 | }, 20 | "colon_assignment_spacing": { 21 | "spacing": { 22 | "left": 0, 23 | "right": 1 24 | }, 25 | "level": "error" 26 | }, 27 | "braces_spacing": { 28 | "spaces": 0, 29 | "level": "error" 30 | }, 31 | "spacing_after_comma": { 32 | "level": "error" 33 | }, 34 | "no_stand_alone_at": { 35 | "level": "error" 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /grammars/coffeescript (literate).cson: -------------------------------------------------------------------------------- 1 | 'comment': 'CoffeeScript (Literate)' 2 | 'fileTypes': [ 3 | 'litcoffee' 4 | 'litcoffee.erb' 5 | 'coffee.md' 6 | ] 7 | 'name': 'CoffeeScript (Literate)' 8 | 'scopeName': 'source.litcoffee' 9 | 'firstLineMatch': '''(?x) 10 | # Hashbang 11 | ^\\#!.*(?:\\s|\\/) 12 | coffee(?:\\s.+?)?\\s(?:-l|--literate) 13 | (?:\\s|$) 14 | | 15 | # Modeline 16 | (?i: 17 | # Emacs 18 | -\\*-(?:\\s*(?=[^:;\\s]+\\s*-\\*-)|(?:.*?[;\\s]|(?<=-\\*-))mode\\s*:\\s*) 19 | litcoffee 20 | (?=[\\s;]|(?]?\\d+|m)?|\\sex)(?=:(?=\\s*set?\\s[^\\n:]+:)|:(?!\\s*set?\\s))(?:(?:\\s|\\s*:\\s*)\\w*(?:\\s*=(?:[^\\n\\\\\\s]|\\\\.)*)?)*[\\s:](?:filetype|ft|syntax)\\s*= 24 | litcoffee 25 | (?=\\s|:|$) 26 | ) 27 | ''' 28 | 'patterns': [ 29 | { 30 | 'begin': '^(?=([ ]{4}|\\t)(?!$))' 31 | 'end': '^(?!([ ]{4}|\\t))' 32 | 'name': 'markup.raw.block.markdown' 33 | 'patterns': [ 34 | { 35 | 'include': '#block_raw' 36 | } 37 | ] 38 | } 39 | { 40 | 'begin': ''' 41 | (?x)^ 42 | (?= [ ]{0,3}>. 43 | | [#]{1,6}\\s*+ 44 | | [ ]{0,3}(?[-*_])([ ]{0,2}\\k){2,}[ \\t]*+$ 45 | ) 46 | ''' 47 | 'comment': 'We could also use an empty end match and set applyEndPatternLast, but then we must be sure that the begin pattern will only match stuff matched by the sub-patterns.' 48 | 'end': ''' 49 | (?x)^ 50 | (?! [ ]{0,3}>. 51 | | [#]{1,6}\\s*+ 52 | | [ ]{0,3}(?[-*_])([ ]{0,2}\\k){2,}[ \\t]*+$ 53 | ) 54 | ''' 55 | 'name': 'meta.block-level.markdown' 56 | 'patterns': [ 57 | { 58 | 'include': '#block_quote' 59 | } 60 | { 61 | 'include': '#heading' 62 | } 63 | { 64 | 'include': '#separator' 65 | } 66 | ] 67 | } 68 | { 69 | 'begin': '^[ ]{0,3}([*+-])(?=\\s)' 70 | 'captures': 71 | '1': 72 | 'name': 'punctuation.definition.list_item.markdown' 73 | 'end': '^(?=\\S|[ ]{4,})|(?!\\G)' 74 | 'name': 'markup.list.unnumbered.markdown' 75 | 'patterns': [ 76 | { 77 | 'include': '#list-paragraph' 78 | } 79 | ] 80 | } 81 | { 82 | 'begin': '^[ ]{0,3}([0-9]+\\.)(?=\\s)' 83 | 'captures': 84 | '1': 85 | 'name': 'punctuation.definition.list_item.markdown' 86 | 'end': '^(?=\\S|[ ]{4,})|(?!\\G)' 87 | 'name': 'markup.list.numbered.markdown' 88 | 'patterns': [ 89 | { 90 | 'include': '#list-paragraph' 91 | } 92 | ] 93 | } 94 | { 95 | 'begin': '^(?=<(p|div|h[1-6]|blockquote|pre|table|dl|ol|ul|script|noscript|form|fieldset|iframe|math|ins|del)\\b)(?!.*?)' 96 | 'comment': 'Markdown formatting is disabled inside block-level tags.' 97 | 'end': '(?<=^$\\n)' 98 | 'name': 'meta.disable-markdown' 99 | 'patterns': [ 100 | { 101 | 'include': 'text.html.basic' 102 | } 103 | ] 104 | } 105 | { 106 | 'begin': '^(?=<(p|div|h[1-6]|blockquote|pre|table|dl|ol|ul|script|noscript|form|fieldset|iframe|math|ins|del)\\b)' 107 | 'comment': 'Same rule but for one line disables.' 108 | 'end': '$\\n?' 109 | 'name': 'meta.disable-markdown' 110 | 'patterns': [ 111 | { 112 | 'include': 'text.html.basic' 113 | } 114 | ] 115 | } 116 | { 117 | 'captures': 118 | '1': 119 | 'name': 'punctuation.definition.constant.markdown' 120 | '2': 121 | 'name': 'constant.other.reference.link.markdown' 122 | '3': 123 | 'name': 'punctuation.definition.constant.markdown' 124 | '4': 125 | 'name': 'punctuation.separator.key-value.markdown' 126 | '5': 127 | 'name': 'punctuation.definition.link.markdown' 128 | '6': 129 | 'name': 'markup.underline.link.markdown' 130 | '7': 131 | 'name': 'punctuation.definition.link.markdown' 132 | '8': 133 | 'name': 'string.other.link.description.title.markdown' 134 | '9': 135 | 'name': 'punctuation.definition.string.begin.markdown' 136 | '10': 137 | 'name': 'punctuation.definition.string.end.markdown' 138 | '11': 139 | 'name': 'string.other.link.description.title.markdown' 140 | '12': 141 | 'name': 'punctuation.definition.string.begin.markdown' 142 | '13': 143 | 'name': 'punctuation.definition.string.end.markdown' 144 | 'match': ''' 145 | (?x) 146 | \\s* # Leading whitespace 147 | (\\[)(.+?)(\\])(:) # Reference name 148 | [ \\t]* # Optional whitespace 149 | (?) # The url 150 | [ \\t]* # Optional whitespace 151 | (?: 152 | ((\\().+?(\\))) # Match title in quotes… 153 | | ((").+?(")) # or in parens. 154 | )? # Title is optional 155 | \\s* # Optional whitespace 156 | $ 157 | ''' 158 | 'name': 'meta.link.reference.def.markdown' 159 | } 160 | { 161 | 'begin': '^(?=\\S)(?![=-]{3,}(?=$))' 162 | 'end': '^(?:\\s*$|(?=[ ]{0,3}>.))|(?=[ \\t]*\\n)(?<=^===|^====|=====|^---|^----|-----)[ \\t]*\\n|(?=^#)' 163 | 'name': 'meta.paragraph.markdown' 164 | 'patterns': [ 165 | { 166 | 'include': '#inline' 167 | } 168 | { 169 | 'include': 'text.html.basic' 170 | } 171 | { 172 | 'captures': 173 | '1': 174 | 'name': 'punctuation.definition.heading.markdown' 175 | 'match': '^(={3,})(?=[ \\t]*$)' 176 | 'name': 'markup.heading.1.markdown' 177 | } 178 | { 179 | 'captures': 180 | '1': 181 | 'name': 'punctuation.definition.heading.markdown' 182 | 'match': '^(-{3,})(?=[ \\t]*$)' 183 | 'name': 'markup.heading.2.markdown' 184 | } 185 | ] 186 | } 187 | ] 188 | 'repository': 189 | 'ampersand': 190 | 'comment': 'Markdown will convert this for us. We match it so that the HTML grammar will not mark it up as invalid.' 191 | 'match': '&(?!([a-zA-Z0-9]+|#[0-9]+|#x[0-9a-fA-F]+);)' 192 | 'name': 'meta.other.valid-ampersand.markdown' 193 | 'block_quote': 194 | 'begin': '\\G[ ]{0,3}(>)(?!$)[ ]?' 195 | 'beginCaptures': 196 | '1': 197 | 'name': 'punctuation.definition.blockquote.markdown' 198 | 'comment': ' We terminate the block quote when seeing an empty line, a separator or a line with leading > characters. The latter is to “reset” the quote level for quoted lines.' 199 | 'end': ''' 200 | (?x)^ 201 | (?= \\s*$ 202 | | [ ]{0,3}(?[-*_])([ ]{0,2}\\k){2,}[ \\t]*+$ 203 | | [ ]{0,3}>. 204 | ) 205 | ''' 206 | 'name': 'markup.quote.markdown' 207 | 'patterns': [ 208 | { 209 | 'begin': ''' 210 | (?x)\\G 211 | (?= [ ]{0,3}>. 212 | ) 213 | ''' 214 | 'end': '^' 215 | 'patterns': [ 216 | { 217 | 'include': '#block_quote' 218 | } 219 | ] 220 | } 221 | { 222 | 'applyEndPatternLast': 1 223 | 'begin': ''' 224 | (?x)\\G 225 | (?= ([ ]{4}|\\t) 226 | | [#]{1,6}\\s*+ 227 | | [ ]{0,3}(?[-*_])([ ]{0,2}\\k){2,}[ \\t]*+$ 228 | ) 229 | ''' 230 | 'end': '^' 231 | 'patterns': [ 232 | { 233 | 'include': '#block_raw' 234 | } 235 | { 236 | 'include': '#heading' 237 | } 238 | { 239 | 'include': '#separator' 240 | } 241 | ] 242 | } 243 | { 244 | 'begin': ''' 245 | (?x)\\G 246 | (?! $ 247 | | [ ]{0,3}>. 248 | | ([ ]{4}|\\t) 249 | | [#]{1,6}\\s*+ 250 | | [ ]{0,3}(?[-*_])([ ]{0,2}\\k){2,}[ \\t]*+$ 251 | ) 252 | ''' 253 | 'end': '$|(?<=\\n)' 254 | 'patterns': [ 255 | { 256 | 'include': '#inline' 257 | } 258 | ] 259 | } 260 | ] 261 | 'block_raw': 262 | 'name': 'markup.raw.block.markdown' 263 | 'patterns': [ 264 | { 265 | 'include': '#coffee_script' 266 | } 267 | ] 268 | 'bold': 269 | 'begin': ''' 270 | (?x) 271 | (\\*\\*|__)(?=\\S) # Open 272 | (?= 273 | ( 274 | <[^>]*+> # HTML tags 275 | | (?`+)([^`]|(?!(?(?!`))`)*+\\k # Raw 276 | | \\\\[\\\\`*_{}\\[\\]()#.!+\\->]?+ # Escapes 277 | | \\[ 278 | ( 279 | (? # Named group 280 | [^\\[\\]\\\\] # Match most chars 281 | | \\\\. # Escaped chars 282 | | \\[ \\g*+ \\] # Nested brackets 283 | )*+ 284 | \\] 285 | ( 286 | ( # Reference Link 287 | [ ]? # Optional space 288 | \\[[^\\]]*+\\] # Ref name 289 | ) 290 | | 291 | ( # Inline Link 292 | \\( # Opening paren 293 | [ \\t]*+ # Optional whitespace 294 | ? # URL 295 | [ \\t]*+ # Optional whitespace 296 | ( # Optional Title 297 | (?[\'"]) 298 | (.*?) 299 | \\k<title> 300 | )? 301 | \\) 302 | ) 303 | ) 304 | ) 305 | | (?!(?<=\\S)\\1). # Everything besides 306 | )++ 307 | (?<=\\S)\\1 # Close 308 | ) 309 | ''' 310 | 'captures': 311 | '1': 312 | 'name': 'punctuation.definition.bold.markdown' 313 | 'end': '(?<=\\S)(\\1)' 314 | 'name': 'markup.bold.markdown' 315 | 'patterns': [ 316 | { 317 | 'applyEndPatternLast': 1 318 | 'begin': '(?=<[^>]*?>)' 319 | 'end': '(?<=>)' 320 | 'patterns': [ 321 | { 322 | 'include': 'text.html.basic' 323 | } 324 | ] 325 | } 326 | { 327 | 'include': '#escape' 328 | } 329 | { 330 | 'include': '#ampersand' 331 | } 332 | { 333 | 'include': '#bracket' 334 | } 335 | { 336 | 'include': '#raw' 337 | } 338 | { 339 | 'include': '#italic' 340 | } 341 | { 342 | 'include': '#image-inline' 343 | } 344 | { 345 | 'include': '#link-inline' 346 | } 347 | { 348 | 'include': '#link-inet' 349 | } 350 | { 351 | 'include': '#link-email' 352 | } 353 | { 354 | 'include': '#image-ref' 355 | } 356 | { 357 | 'include': '#link-ref-literal' 358 | } 359 | { 360 | 'include': '#link-ref' 361 | } 362 | ] 363 | 'bracket': 364 | 'comment': 'Markdown will convert this for us. We match it so that the HTML grammar will not mark it up as invalid.' 365 | 'match': '<(?![a-z/?\\$!])' 366 | 'name': 'meta.other.valid-bracket.markdown' 367 | 'coffee_script': 368 | 'patterns': [ 369 | { 370 | 'include': 'source.coffee' 371 | } 372 | ] 373 | 'escape': 374 | 'match': '\\\\[-`*_#+.!(){}\\[\\]\\\\>]' 375 | 'name': 'constant.character.escape.markdown' 376 | 'heading': 377 | 'begin': '\\G(#{1,6})(?!#)\\s*(?=\\S)' 378 | 'captures': 379 | '1': 380 | 'name': 'punctuation.definition.heading.markdown' 381 | 'contentName': 'entity.name.section.markdown' 382 | 'end': '\\s*(#*)$\\n?' 383 | 'name': 'markup.heading.markdown' 384 | 'patterns': [ 385 | { 386 | 'include': '#inline' 387 | } 388 | ] 389 | 'image-inline': 390 | 'captures': 391 | '1': 392 | 'name': 'punctuation.definition.string.begin.markdown' 393 | '2': 394 | 'name': 'string.other.link.description.markdown' 395 | '3': 396 | 'name': 'punctuation.definition.string.end.markdown' 397 | '5': 398 | 'name': 'invalid.illegal.whitespace.markdown' 399 | '6': 400 | 'name': 'punctuation.definition.metadata.markdown' 401 | '7': 402 | 'name': 'punctuation.definition.link.markdown' 403 | '8': 404 | 'name': 'markup.underline.link.image.markdown' 405 | '9': 406 | 'name': 'punctuation.definition.link.markdown' 407 | '10': 408 | 'name': 'string.other.link.description.title.markdown' 409 | '11': 410 | 'name': 'punctuation.definition.string.markdown' 411 | '12': 412 | 'name': 'punctuation.definition.string.markdown' 413 | '13': 414 | 'name': 'string.other.link.description.title.markdown' 415 | '14': 416 | 'name': 'punctuation.definition.string.markdown' 417 | '15': 418 | 'name': 'punctuation.definition.string.markdown' 419 | '16': 420 | 'name': 'punctuation.definition.metadata.markdown' 421 | 'match': ''' 422 | (?x) 423 | \\! # Images start with ! 424 | (\\[)((?<square>[^\\[\\]\\\\]|\\\\.|\\[\\g<square>*+\\])*+)(\\]) # Match the link text 425 | ([ ])? # Space not allowed 426 | (\\() # Opening paren for url 427 | (<?)(\\S+?)(>?) # The url 428 | [ \\t]* # Optional whitespace 429 | (?: 430 | ((\\().+?(\\))) # Match title in parens… 431 | | ((").+?(")) # or in quotes. 432 | )? # Title is optional 433 | \\s* # Optional whitespace 434 | (\\)) 435 | ''' 436 | 'name': 'meta.image.inline.markdown' 437 | 'image-ref': 438 | 'captures': 439 | '1': 440 | 'name': 'punctuation.definition.string.begin.markdown' 441 | '2': 442 | 'name': 'string.other.link.description.markdown' 443 | '4': 444 | 'name': 'punctuation.definition.string.begin.markdown' 445 | '5': 446 | 'name': 'punctuation.definition.constant.markdown' 447 | '6': 448 | 'name': 'constant.other.reference.link.markdown' 449 | '7': 450 | 'name': 'punctuation.definition.constant.markdown' 451 | 'match': '\\!(\\[)((?<square>[^\\[\\]\\\\]|\\\\.|\\[\\g<square>*+\\])*+)(\\])[ ]?(\\[)(.*?)(\\])' 452 | 'name': 'meta.image.reference.markdown' 453 | 'inline': 454 | 'patterns': [ 455 | { 456 | 'include': '#escape' 457 | } 458 | { 459 | 'include': '#ampersand' 460 | } 461 | { 462 | 'include': '#bracket' 463 | } 464 | { 465 | 'include': '#raw' 466 | } 467 | { 468 | 'include': '#bold' 469 | } 470 | { 471 | 'include': '#italic' 472 | } 473 | { 474 | 'include': '#line-break' 475 | } 476 | { 477 | 'include': '#image-inline' 478 | } 479 | { 480 | 'include': '#link-inline' 481 | } 482 | { 483 | 'include': '#link-inet' 484 | } 485 | { 486 | 'include': '#link-email' 487 | } 488 | { 489 | 'include': '#image-ref' 490 | } 491 | { 492 | 'include': '#link-ref-literal' 493 | } 494 | { 495 | 'include': '#link-ref' 496 | } 497 | ] 498 | 'italic': 499 | 'begin': ''' 500 | (?x) 501 | (\\*|_)(?=\\S) # Open 502 | (?= 503 | ( 504 | <[^>]*+> # HTML tags 505 | | (?<raw>`+)([^`]|(?!(?<!`)\\k<raw>(?!`))`)*+\\k<raw> # Raw 506 | | \\\\[\\\\`*_{}\\[\\]()#.!+\\->]?+ # Escapes 507 | | \\[ 508 | ( 509 | (?<square> # Named group 510 | [^\\[\\]\\\\] # Match most chars 511 | | \\\\. # Escaped chars 512 | | \\[ \\g<square>*+ \\] # Nested brackets 513 | )*+ 514 | \\] 515 | ( 516 | ( # Reference Link 517 | [ ]? # Optional space 518 | \\[[^\\]]*+\\] # Ref name 519 | ) 520 | | 521 | ( # Inline Link 522 | \\( # Opening paren 523 | [ \\t]*+ # Optional whitespace 524 | <?(.*?)>? # URL 525 | [ \\t]*+ # Optional whitespace 526 | ( # Optional Title 527 | (?<title>[\'"]) 528 | (.*?) 529 | \\k<title> 530 | )? 531 | \\) 532 | ) 533 | ) 534 | ) 535 | | \\1\\1 # Must be bold closer 536 | | (?!(?<=\\S)\\1). # Everything besides 537 | )++ 538 | (?<=\\S)\\1 # Close 539 | ) 540 | ''' 541 | 'captures': 542 | '1': 543 | 'name': 'punctuation.definition.italic.markdown' 544 | 'end': '(?<=\\S)(\\1)((?!\\1)|(?=\\1\\1))' 545 | 'name': 'markup.italic.markdown' 546 | 'patterns': [ 547 | { 548 | 'applyEndPatternLast': 1 549 | 'begin': '(?=<[^>]*?>)' 550 | 'end': '(?<=>)' 551 | 'patterns': [ 552 | { 553 | 'include': 'text.html.basic' 554 | } 555 | ] 556 | } 557 | { 558 | 'include': '#escape' 559 | } 560 | { 561 | 'include': '#ampersand' 562 | } 563 | { 564 | 'include': '#bracket' 565 | } 566 | { 567 | 'include': '#raw' 568 | } 569 | { 570 | 'include': '#bold' 571 | } 572 | { 573 | 'include': '#image-inline' 574 | } 575 | { 576 | 'include': '#link-inline' 577 | } 578 | { 579 | 'include': '#link-inet' 580 | } 581 | { 582 | 'include': '#link-email' 583 | } 584 | { 585 | 'include': '#image-ref' 586 | } 587 | { 588 | 'include': '#link-ref-literal' 589 | } 590 | { 591 | 'include': '#link-ref' 592 | } 593 | ] 594 | 'line-break': 595 | 'match': ' {2,}$' 596 | 'name': 'meta.dummy.line-break' 597 | 'link-email': 598 | 'captures': 599 | '1': 600 | 'name': 'punctuation.definition.link.markdown' 601 | '2': 602 | 'name': 'markup.underline.link.markdown' 603 | '4': 604 | 'name': 'punctuation.definition.link.markdown' 605 | 'match': '(<)((?:mailto:)?[-.\\w]+@[-a-z0-9]+(\\.[-a-z0-9]+)*\\.[a-z]+)(>)' 606 | 'name': 'meta.link.email.lt-gt.markdown' 607 | 'link-inet': 608 | 'captures': 609 | '1': 610 | 'name': 'punctuation.definition.link.markdown' 611 | '2': 612 | 'name': 'markup.underline.link.markdown' 613 | '3': 614 | 'name': 'punctuation.definition.link.markdown' 615 | 'match': '(<)((?:https?|ftp)://.*?)(>)' 616 | 'name': 'meta.link.inet.markdown' 617 | 'link-inline': 618 | 'captures': 619 | '1': 620 | 'name': 'punctuation.definition.string.begin.markdown' 621 | '2': 622 | 'name': 'string.other.link.title.markdown' 623 | '4': 624 | 'name': 'punctuation.definition.string.end.markdown' 625 | '5': 626 | 'name': 'invalid.illegal.whitespace.markdown' 627 | '6': 628 | 'name': 'punctuation.definition.metadata.markdown' 629 | '7': 630 | 'name': 'punctuation.definition.link.markdown' 631 | '8': 632 | 'name': 'markup.underline.link.markdown' 633 | '9': 634 | 'name': 'punctuation.definition.link.markdown' 635 | '10': 636 | 'name': 'string.other.link.description.title.markdown' 637 | '11': 638 | 'name': 'punctuation.definition.string.begin.markdown' 639 | '12': 640 | 'name': 'punctuation.definition.string.end.markdown' 641 | '13': 642 | 'name': 'string.other.link.description.title.markdown' 643 | '14': 644 | 'name': 'punctuation.definition.string.begin.markdown' 645 | '15': 646 | 'name': 'punctuation.definition.string.end.markdown' 647 | '16': 648 | 'name': 'punctuation.definition.metadata.markdown' 649 | 'match': ''' 650 | (?x) 651 | (\\[)((?<square>[^\\[\\]\\\\]|\\\\.|\\[\\g<square>*+\\])*+)(\\]) # Match the link text. 652 | ([ ])? # Space not allowed 653 | (\\() # Opening paren for url 654 | (<?)(.*?)(>?) # The url 655 | [ \\t]* # Optional whitespace 656 | (?: 657 | ((\\().+?(\\))) # Match title in parens… 658 | | ((").+?(")) # or in quotes. 659 | )? # Title is optional 660 | \\s* # Optional whitespace 661 | (\\)) 662 | ''' 663 | 'name': 'meta.link.inline.markdown' 664 | 'link-ref': 665 | 'captures': 666 | '1': 667 | 'name': 'punctuation.definition.string.begin.markdown' 668 | '2': 669 | 'name': 'string.other.link.title.markdown' 670 | '4': 671 | 'name': 'punctuation.definition.string.end.markdown' 672 | '5': 673 | 'name': 'punctuation.definition.constant.begin.markdown' 674 | '6': 675 | 'name': 'constant.other.reference.link.markdown' 676 | '7': 677 | 'name': 'punctuation.definition.constant.end.markdown' 678 | 'match': '(\\[)((?<square>[^\\[\\]\\\\]|\\\\.|\\[\\g<square>*+\\])*+)(\\])[ ]?(\\[)([^\\]]*+)(\\])' 679 | 'name': 'meta.link.reference.markdown' 680 | 'link-ref-literal': 681 | 'captures': 682 | '1': 683 | 'name': 'punctuation.definition.string.begin.markdown' 684 | '2': 685 | 'name': 'string.other.link.title.markdown' 686 | '4': 687 | 'name': 'punctuation.definition.string.end.markdown' 688 | '5': 689 | 'name': 'punctuation.definition.constant.begin.markdown' 690 | '6': 691 | 'name': 'punctuation.definition.constant.end.markdown' 692 | 'match': '(\\[)((?<square>[^\\[\\]\\\\]|\\\\.|\\[\\g<square>*+\\])*+)(\\])[ ]?(\\[)(\\])' 693 | 'name': 'meta.link.reference.literal.markdown' 694 | 'list-paragraph': 695 | 'patterns': [ 696 | { 697 | 'begin': '\\G\\s+(?=\\S)' 698 | 'end': '^\\s*$' 699 | 'name': 'meta.paragraph.list.markdown' 700 | 'patterns': [ 701 | { 702 | 'include': '#inline' 703 | } 704 | { 705 | 'captures': 706 | '1': 707 | 'name': 'punctuation.definition.list_item.markdown' 708 | 'comment': 'Match the list punctuation' 709 | 'match': '^\\s*([*+-]|[0-9]+\\.)' 710 | } 711 | ] 712 | } 713 | ] 714 | 'raw': 715 | 'captures': 716 | '1': 717 | 'name': 'punctuation.definition.raw.markdown' 718 | '3': 719 | 'name': 'punctuation.definition.raw.markdown' 720 | 'match': '(`+)([^`]|(?!(?<!`)\\1(?!`))`)*+(\\1)' 721 | 'name': 'markup.raw.inline.markdown' 722 | 'separator': 723 | 'match': '\\G[ ]{0,3}([-*_])([ ]{0,2}\\1){2,}[ \\t]*$\\n?' 724 | 'name': 'meta.separator.markdown' 725 | -------------------------------------------------------------------------------- /grammars/coffeescript.cson: -------------------------------------------------------------------------------- 1 | 'scopeName': 'source.coffee' 2 | 'name': 'CoffeeScript' 3 | 'fileTypes': [ 4 | 'coffee' 5 | 'Cakefile' 6 | 'coffee.erb' 7 | 'cson' 8 | '_coffee' 9 | 'cjsx' 10 | ] 11 | 'firstLineMatch': '''(?x) 12 | # Hashbang 13 | ^\\#!.*(?:\\s|\\/) 14 | coffee 15 | (?:$|\\s) 16 | | 17 | # Modeline 18 | (?i: 19 | # Emacs 20 | -\\*-(?:\\s*(?=[^:;\\s]+\\s*-\\*-)|(?:.*?[;\\s]|(?<=-\\*-))mode\\s*:\\s*) 21 | coffee 22 | (?=[\\s;]|(?<![-*])-\\*-).*?-\\*- 23 | | 24 | # Vim 25 | (?:(?:\\s|^)vi(?:m[<=>]?\\d+|m)?|\\sex)(?=:(?=\\s*set?\\s[^\\n:]+:)|:(?!\\s*set?\\s))(?:(?:\\s|\\s*:\\s*)\\w*(?:\\s*=(?:[^\\n\\\\\\s]|\\\\.)*)?)*[\\s:](?:filetype|ft|syntax)\\s*= 26 | coffee 27 | (?=\\s|:|$) 28 | ) 29 | ''' 30 | 'patterns': [ 31 | { 32 | 'include': '#jsx' 33 | } 34 | { 35 | 'match': '(new)\\s+(?:(?:(class)\\s+(\\w+(?:\\.\\w*)*)?)|(\\w+(?:\\.\\w*)*))' 36 | 'name': 'meta.class.instance.constructor.coffee' 37 | 'captures': 38 | '1': 39 | 'name': 'keyword.operator.new.coffee' 40 | '2': 41 | 'name': 'storage.type.class.coffee' 42 | '3': 43 | 'name': 'entity.name.type.instance.coffee' 44 | '4': 45 | 'name': 'entity.name.type.instance.coffee' 46 | } 47 | { 48 | 'begin': '\'\'\'' 49 | 'beginCaptures': 50 | '0': 51 | 'name': 'punctuation.definition.string.begin.coffee' 52 | 'end': '\'\'\'' 53 | 'endCaptures': 54 | '0': 55 | 'name': 'punctuation.definition.string.end.coffee' 56 | 'name': 'string.quoted.single.heredoc.coffee' 57 | 'patterns': [ 58 | { 59 | 'captures': 60 | '1': 61 | 'name': 'punctuation.definition.escape.backslash.coffee' 62 | 'match': '(\\\\).' 63 | 'name': 'constant.character.escape.backslash.coffee' 64 | } 65 | ] 66 | } 67 | { 68 | 'begin': '"""' 69 | 'beginCaptures': 70 | '0': 71 | 'name': 'punctuation.definition.string.begin.coffee' 72 | 'end': '"""' 73 | 'endCaptures': 74 | '0': 75 | 'name': 'punctuation.definition.string.end.coffee' 76 | 'name': 'string.quoted.double.heredoc.coffee' 77 | 'patterns': [ 78 | { 79 | 'captures': 80 | '1': 81 | 'name': 'punctuation.definition.escape.backslash.coffee' 82 | 'match': '(\\\\).' 83 | 'name': 'constant.character.escape.backslash.coffee' 84 | } 85 | { 86 | 'include': '#interpolated_coffee' 87 | } 88 | ] 89 | } 90 | { 91 | 'match': '(`)(.*)(`)' 92 | 'name': 'string.quoted.script.coffee' 93 | 'captures': 94 | '1': 95 | 'name': 'punctuation.definition.string.begin.coffee' 96 | '2': 97 | 'name': 'source.js.embedded.coffee' 98 | 'patterns': [ 99 | { 100 | 'include': 'source.js' 101 | } 102 | ] 103 | '3': 104 | 'name': 'punctuation.definition.string.end.coffee' 105 | } 106 | { 107 | 'begin': '(?<!#)###(?!#)' 108 | 'beginCaptures': 109 | '0': 110 | 'name': 'punctuation.definition.comment.coffee' 111 | 'end': '###' 112 | 'endCaptures': 113 | '0': 114 | 'name': 'punctuation.definition.comment.coffee' 115 | 'name': 'comment.block.coffee' 116 | 'patterns': [ 117 | { 118 | 'match': '(?<=^|\\s)@\\w*(?=\\s)' 119 | 'name': 'storage.type.annotation.coffee' 120 | } 121 | ] 122 | } 123 | { 124 | 'begin': '#' 125 | 'beginCaptures': 126 | '0': 127 | 'name': 'punctuation.definition.comment.coffee' 128 | 'end': '$' 129 | 'name': 'comment.line.number-sign.coffee' 130 | } 131 | { 132 | 'begin': '///' 133 | 'end': '(///)[gimuy]*' 134 | 'name': 'string.regexp.multiline.coffee' 135 | 'beginCaptures': 136 | '0': 137 | 'name': 'punctuation.definition.string.begin.coffee' 138 | 'endCaptures': 139 | '1': 140 | 'name': 'punctuation.definition.string.end.coffee' 141 | 'patterns': [ 142 | { 143 | 'include': '#heregexp' 144 | } 145 | ] 146 | } 147 | { 148 | 'begin': '(?<![\\w$])(/)(?=(?![/*+?])(.+)(/)[gimuy]*(?!\\s*[\\w$/(]))' 149 | 'beginCaptures': 150 | '1': 151 | 'name': 'punctuation.definition.string.begin.coffee' 152 | 'end': '(/)[gimuy]*(?!\\s*[\\w$/(])' 153 | 'endCaptures': 154 | '1': 155 | 'name': 'punctuation.definition.string.end.coffee' 156 | 'name': 'string.regexp.coffee' 157 | 'patterns': [ 158 | { 159 | 'include': 'source.js.regexp' 160 | } 161 | ] 162 | } 163 | { 164 | 'match': '\\b(?<![\\.\\$])(break|by|catch|continue|else|finally|for|in|of|if|return|switch|then|throw|try|unless|when|while|until|loop|do|export|import|default|from|as|yield|async|await|(?<=for)\\s+own)(?!\\s*:)\\b' 165 | 'name': 'keyword.control.coffee' 166 | } 167 | { 168 | 'match': '\\b(?<![\\.\\$])(delete|instanceof|new|typeof)(?!\\s*:)\\b' 169 | 'name': 'keyword.operator.$1.coffee' 170 | } 171 | { 172 | 'match': '\\b(?<![\\.\\$])(case|function|var|void|with|const|let|enum|native|__hasProp|__extends|__slice|__bind|__indexOf|implements|interface|package|private|protected|public|static)(?!\\s*:)\\b' 173 | 'name': 'keyword.reserved.coffee' 174 | } 175 | { 176 | # a: -> ... 177 | # a: (args) -> ... 178 | 'begin': '''(?x) 179 | (?<=\\s|^)((@)?[a-zA-Z_$][\\w$]*) 180 | \\s*([:=])\\s* 181 | (?=(\\([^\\(\\)]*\\)\\s*)?[=-]>) 182 | ''' 183 | 'beginCaptures': 184 | '1': 185 | 'name': 'entity.name.function.coffee' 186 | '2': 187 | 'name': 'variable.other.readwrite.instance.coffee' 188 | '3': 189 | 'name': 'keyword.operator.assignment.coffee' 190 | 'end': '[=-]>' 191 | 'endCaptures': 192 | '0': 193 | 'name': 'storage.type.function.coffee' 194 | 'name': 'meta.function.coffee' 195 | 'patterns': [ 196 | { 197 | 'include': '#function_params' 198 | } 199 | ] 200 | } 201 | { 202 | # "a": -> ... 203 | # "a": (args) -> ... 204 | 'begin': '''(?x) 205 | (?<=\\s|^)(?:((\')([^\']*?)(\'))|((")([^"]*?)("))) 206 | \\s*([:=])\\s* 207 | (?=(\\([^\\(\\)]*\\)\\s*)?[=-]>) 208 | ''' 209 | 'beginCaptures': 210 | '1': 211 | 'name': 'string.quoted.single.coffee' 212 | '2': 213 | 'name': 'punctuation.definition.string.begin.coffee' 214 | '3': 215 | 'name': 'entity.name.function.coffee' 216 | '4': 217 | 'name': 'punctuation.definition.string.end.coffee' 218 | '5': 219 | 'name': 'string.quoted.double.coffee' 220 | '6': 221 | 'name': 'punctuation.definition.string.begin.coffee' 222 | '7': 223 | 'name': 'entity.name.function.coffee' 224 | '8': 225 | 'name': 'punctuation.definition.string.end.coffee' 226 | '9': 227 | 'name': 'keyword.operator.assignment.coffee' 228 | 'end': '[=-]>' 229 | 'endCaptures': 230 | '0': 231 | 'name': 'storage.type.function.coffee' 232 | 'name': 'meta.function.coffee' 233 | 'patterns': [ 234 | { 235 | 'include': '#function_params' 236 | } 237 | ] 238 | } 239 | { 240 | # (args) -> ... 241 | # -> ... 242 | 'begin': '(?=(\\([^\\(\\)]*\\)\\s*)?[=-]>)' 243 | 'end': '[=-]>' 244 | 'endCaptures': 245 | '0': 246 | 'name': 'storage.type.function.coffee' 247 | 'name': 'meta.function.inline.coffee' 248 | 'patterns': [ 249 | { 250 | 'include': '#function_params' 251 | } 252 | ] 253 | } 254 | { 255 | 'begin': '(?<=\\s|^)({)(?=[^\'"#]+?}[\\s\\]}]*=)' 256 | 'beginCaptures': 257 | '1': 258 | 'name': 'punctuation.definition.destructuring.begin.bracket.curly.coffee' 259 | 'end': '}' 260 | 'endCaptures': 261 | '0': 262 | 'name': 'punctuation.definition.destructuring.end.bracket.curly.coffee' 263 | 'name': 'meta.variable.assignment.destructured.object.coffee' 264 | 'patterns': [ 265 | { 266 | 'include': '$self' 267 | } 268 | { 269 | 'match': '[a-zA-Z$_]\\w*' 270 | 'name': 'variable.assignment.coffee' 271 | } 272 | ] 273 | } 274 | { 275 | 'begin': '(?<=\\s|^)(\\[)(?=[^\'"#]+?\\][\\s\\]}]*=)' 276 | 'beginCaptures': 277 | '1': 278 | 'name': 'punctuation.definition.destructuring.begin.bracket.square.coffee' 279 | 'end': '\\]' 280 | 'endCaptures': 281 | '0': 282 | 'name': 'punctuation.definition.destructuring.end.bracket.square.coffee' 283 | 'name': 'meta.variable.assignment.destructured.array.coffee' 284 | 'patterns': [ 285 | { 286 | 'include': '$self' 287 | } 288 | { 289 | 'match': '[a-zA-Z$_]\\w*' 290 | 'name': 'variable.assignment.coffee' 291 | } 292 | ] 293 | } 294 | { 295 | 'match': '\\b(?<!\\.|::)(true|on|yes)(?!\\s*[:=][^=])\\b' 296 | 'name': 'constant.language.boolean.true.coffee' 297 | } 298 | { 299 | 'match': '\\b(?<!\\.|::)(false|off|no)(?!\\s*[:=][^=])\\b' 300 | 'name': 'constant.language.boolean.false.coffee' 301 | } 302 | { 303 | 'match': '\\b(?<!\\.|::)null(?!\\s*[:=][^=])\\b' 304 | 'name': 'constant.language.null.coffee' 305 | } 306 | { 307 | 'match': '\\b(?<!\\.|::)extends(?!\\s*[:=])\\b' 308 | 'name': 'variable.language.coffee' 309 | } 310 | { 311 | 'match': '(?<!\\.)\\b(?<!\\$)(super|this|arguments)(?!\\s*[:=][^=]|\\$)\\b' 312 | 'name': 'variable.language.$1.coffee' 313 | } 314 | { 315 | 'captures': 316 | '1': 317 | 'name': 'storage.type.class.coffee' 318 | '2': 319 | 'name': 'keyword.control.inheritance.coffee' 320 | '3': 321 | 'name': 'entity.other.inherited-class.coffee' 322 | 'match': '(?<=\\s|^|\\[|\\()(class)\\s+(extends)\\s+(@?[a-zA-Z\\$\\._][\\w\\.]*)' 323 | 'name': 'meta.class.coffee' 324 | } 325 | { 326 | 'captures': 327 | '1': 328 | 'name': 'storage.type.class.coffee' 329 | '2': 330 | 'name': 'entity.name.type.class.coffee' 331 | '3': 332 | 'name': 'keyword.control.inheritance.coffee' 333 | '4': 334 | 'name': 'entity.other.inherited-class.coffee' 335 | 'match': '(?<=\\s|^|\\[|\\()(class\\b)\\s+(@?[a-zA-Z\\$_][\\w\\.]*)?(?:\\s+(extends)\\s+(@?[a-zA-Z\\$\\._][\\w\\.]*))?' 336 | 'name': 'meta.class.coffee' 337 | } 338 | { 339 | 'match': '\\b(debugger|\\\\)\\b' 340 | 'name': 'keyword.other.coffee' 341 | } 342 | { 343 | 'match': '\\b(Array|ArrayBuffer|Blob|Boolean|Date|document|Function|Int(8|16|32|64)Array|Math|Map|Number|Object|Proxy|RegExp|Set|String|WeakMap|window|Uint(8|16|32|64)Array|XMLHttpRequest)\\b' 344 | 'name': 'support.class.coffee' 345 | } 346 | { 347 | 'match': '\\b(console)\\b' 348 | 'name': 'entity.name.type.object.coffee' 349 | } 350 | { 351 | 'match': '((?<=console\\.)(debug|warn|info|log|error|time|timeEnd|assert))\\b' 352 | 'name': 'support.function.console.coffee' 353 | } 354 | { 355 | 'match': '((?<=\\.)(apply|call|concat|every|filter|forEach|from|hasOwnProperty|indexOf|isPrototypeOf|join|lastIndexOf|map|of|pop|propertyIsEnumerable|push|reduce(Right)?|reverse|shift|slice|some|sort|splice|to(Locale)?String|unshift|valueOf))\\b' 356 | 'name': 'support.function.method.array.coffee' 357 | } 358 | { 359 | 'match': '((?<=Array\\.)(isArray))\\b' 360 | 'name': 'support.function.static.array.coffee' 361 | } 362 | { 363 | 'match': '((?<=Object\\.)(create|definePropert(ies|y)|freeze|getOwnProperty(Descriptors?|Names)|getProperty(Descriptor|Names)|getPrototypeOf|is(Extensible|Frozen|Sealed)?|isnt|keys|preventExtensions|seal))\\b' 364 | 'name': 'support.function.static.object.coffee' 365 | } 366 | { 367 | 'match': '((?<=Math\\.)(abs|acos|acosh|asin|asinh|atan|atan2|atanh|ceil|cos|cosh|exp|expm1|floor|hypot|log|log10|log1p|log2|max|min|pow|random|round|sign|sin|sinh|sqrt|tan|tanh|trunc))\\b' 368 | 'name': 'support.function.static.math.coffee' 369 | } 370 | { 371 | 'match': '((?<=Number\\.)(is(Finite|Integer|NaN)|toInteger))\\b' 372 | 'name': 'support.function.static.number.coffee' 373 | } 374 | { 375 | 'match': '(?<!\\.)\\b(module|exports|__filename|__dirname|global|process)(?!\\s*:)\\b' 376 | 'name': 'support.variable.coffee' 377 | } 378 | { 379 | 'match': '\\b(Infinity|NaN|undefined)\\b' 380 | 'name': 'constant.language.coffee' 381 | } 382 | { 383 | 'include': '#operators' 384 | } 385 | { 386 | 'include': '#method_calls' 387 | } 388 | { 389 | 'include': '#function_calls' 390 | } 391 | { 392 | 'include': '#numbers' 393 | } 394 | { 395 | 'include': '#objects' 396 | } 397 | { 398 | 'include': '#properties' 399 | } 400 | { 401 | 'match': '::' 402 | 'name': 'keyword.operator.prototype.coffee' 403 | } 404 | { 405 | 'match': '(?<!\\$)\\b[0-9]+[\\w$]*' 406 | 'name': 'invalid.illegal.identifier.coffee' 407 | } 408 | { 409 | 'match': ';' 410 | 'name': 'punctuation.terminator.statement.coffee' 411 | } 412 | { 413 | 'match': ',' 414 | 'name': 'punctuation.separator.delimiter.coffee' 415 | } 416 | { 417 | 'begin': '{' 418 | 'beginCaptures': 419 | '0': 420 | 'name': 'meta.brace.curly.coffee' 421 | 'end': '}' 422 | 'endCaptures': 423 | '0': 424 | 'name': 'meta.brace.curly.coffee' 425 | 'patterns': [ 426 | { 427 | 'include': '$self' 428 | } 429 | ] 430 | } 431 | { 432 | 'begin': '\\[' 433 | 'beginCaptures': 434 | '0': 435 | 'name': 'punctuation.definition.array.begin.bracket.square.coffee' 436 | 'end': '\\]' 437 | 'endCaptures': 438 | '0': 439 | 'name': 'punctuation.definition.array.end.bracket.square.coffee' 440 | 'patterns': [ 441 | { 442 | 'match': '(?<!\\.)\\.{3}' # ... 443 | 'name': 'keyword.operator.slice.exclusive.coffee' 444 | } 445 | { 446 | 'match': '(?<!\\.)\\.{2}' # .. 447 | 'name': 'keyword.operator.slice.inclusive.coffee' 448 | } 449 | { 450 | 'include': '$self' 451 | } 452 | ] 453 | } 454 | { 455 | 'begin': '\\(' 456 | 'beginCaptures': 457 | '0': 458 | 'name': 'meta.brace.round.coffee' 459 | 'end': '\\)' 460 | 'endCaptures': 461 | '0': 462 | 'name': 'meta.brace.round.coffee' 463 | 'patterns': [ 464 | { 465 | 'include': '$self' 466 | } 467 | ] 468 | } 469 | { 470 | 'include': '#instance_variable' 471 | } 472 | { 473 | 'include': '#single_quoted_string' 474 | } 475 | { 476 | 'include': '#double_quoted_string' 477 | } 478 | ] 479 | 'repository': 480 | 'arguments': 481 | 'patterns': [ 482 | { 483 | 'begin': '\\(' 484 | 'beginCaptures': 485 | '0': 486 | 'name': 'punctuation.definition.arguments.begin.bracket.round.coffee' 487 | 'end': '\\)' 488 | 'endCaptures': 489 | '0': 490 | 'name': 'punctuation.definition.arguments.end.bracket.round.coffee' 491 | 'name': 'meta.arguments.coffee' 492 | 'patterns': [ 493 | { 494 | 'include': '$self' 495 | } 496 | ] 497 | } 498 | { 499 | 'begin': '(?=(@|@?[\\w$]+|[=-]>|\\-\\d|\\[|{|\"|\'))' 500 | 'end': '(?=\\s*(?<![\\w$])(of|in|then|is|isnt|and|or|for|else|when|if|unless|by|instanceof)(?![\\w$]))|(?=\\s*(}|\\]|\\)|#|$))' 501 | 'name': 'meta.arguments.coffee' 502 | 'patterns': [ 503 | { 504 | 'include': '$self' 505 | } 506 | ] 507 | } 508 | ] 509 | 'double_quoted_string': 510 | 'patterns': [ 511 | { 512 | 'begin': '"' 513 | 'beginCaptures': 514 | '0': 515 | 'name': 'punctuation.definition.string.begin.coffee' 516 | 'end': '"' 517 | 'endCaptures': 518 | '0': 519 | 'name': 'punctuation.definition.string.end.coffee' 520 | 'name': 'string.quoted.double.coffee' 521 | 'patterns': [ 522 | { 523 | 'captures': 524 | '1': 525 | 'name': 'punctuation.definition.escape.backslash.coffee' 526 | 'match': '(\\\\)(x[0-9A-Fa-f]{2}|[0-2][0-7]{0,2}|3[0-6][0-7]|37[0-7]?|[4-7][0-7]?|.)' 527 | 'name': 'constant.character.escape.backslash.coffee' 528 | } 529 | { 530 | 'include': '#interpolated_coffee' 531 | } 532 | ] 533 | } 534 | ] 535 | 'function_calls': 536 | 'patterns': [ 537 | { 538 | # functionCall(arg1, "arg2", [...]) 539 | 'begin': '(@)?([\\w$]+)(?=\\()' 540 | 'beginCaptures': 541 | '1': 542 | 'name': 'variable.other.readwrite.instance.coffee' 543 | '2': 544 | 'patterns': [ 545 | { 546 | 'include': '#function_names' 547 | } 548 | ] 549 | 'end': '(?<=\\))' 550 | 'name': 'meta.function-call.coffee' 551 | 'patterns': [ 552 | { 553 | 'include': '#arguments' 554 | } 555 | ] 556 | } 557 | { 558 | # functionCall arg1, "arg2", [...] 559 | 'begin': '''(?x) 560 | (@)?([\\w$]+) 561 | \\s* 562 | (?=\\s+(?!(?<![\\w$])(of|in|then|is|isnt|and|or|for|else|when|if|unless|by|instanceof)(?![\\w$]))(?=(@?[\\w$]+|[=-]>|\\-\\d|\\[|{|\"|\'))) 563 | ''' 564 | 'beginCaptures': 565 | '1': 566 | 'name': 'variable.other.readwrite.instance.coffee' 567 | '2': 568 | 'patterns': [ 569 | { 570 | 'include': '#function_names' 571 | } 572 | ] 573 | 'end': '(?=\\s*(?<![\\w$])(of|in|then|is|isnt|and|or|for|else|when|if|unless|by|instanceof)(?![\\w$]))|(?=\\s*(}|\\]|\\)|#|$))' 574 | 'name': 'meta.function-call.coffee' 575 | 'patterns': [ 576 | { 577 | 'include': '#arguments' 578 | } 579 | ] 580 | } 581 | ] 582 | 'function_names': 583 | 'patterns': [ 584 | { 585 | 'match': '''(?x) 586 | \\b(isNaN|isFinite|eval|uneval|parseInt|parseFloat|decodeURI| 587 | decodeURIComponent|encodeURI|encodeURIComponent|escape|unescape| 588 | require|set(Interval|Timeout)|clear(Interval|Timeout))\\b 589 | ''' 590 | 'name': 'support.function.coffee' 591 | } 592 | { 593 | 'match': "[a-zA-Z_$][\\w$]*" 594 | 'name': 'entity.name.function.coffee' 595 | } 596 | { 597 | 'match': '\\d[\\w$]*' 598 | 'name': 'invalid.illegal.identifier.coffee' 599 | } 600 | ] 601 | 'function_params': 602 | 'patterns': [ 603 | { 604 | 'begin': '\\(' 605 | 'beginCaptures': 606 | '0': 607 | 'name': 'punctuation.definition.parameters.begin.bracket.round.coffee' 608 | 'end': '\\)' 609 | 'endCaptures': 610 | '0': 611 | 'name': 'punctuation.definition.parameters.end.bracket.round.coffee' 612 | 'name': 'meta.parameters.coffee' 613 | 'patterns': [ 614 | { 615 | 'match': '([a-zA-Z_$][\\w$]*)(\\.\\.\\.)?' 616 | 'captures': 617 | '1': 618 | 'name': 'variable.parameter.function.coffee' 619 | '2': 620 | 'name': 'keyword.operator.splat.coffee' 621 | } 622 | { 623 | 'match': '(@(?:[a-zA-Z_$][\\w$]*)?)(\\.\\.\\.)?' 624 | 'captures': 625 | '1': 626 | 'name': 'variable.parameter.function.readwrite.instance.coffee' 627 | '2': 628 | 'name': 'keyword.operator.splat.coffee' 629 | } 630 | { 631 | 'include': '$self' 632 | } 633 | ] 634 | } 635 | ] 636 | 'embedded_comment': 637 | 'patterns': [ 638 | { 639 | 'captures': 640 | '1': 641 | 'name': 'punctuation.definition.comment.coffee' 642 | 'match': '(?<!\\\\)(#).*$\\n?' 643 | 'name': 'comment.line.number-sign.coffee' 644 | } 645 | ] 646 | 'instance_variable': 647 | 'patterns': [ 648 | { 649 | 'match': '(@)([a-zA-Z_\\$]\\w*)?' 650 | 'name': 'variable.other.readwrite.instance.coffee' 651 | } 652 | ] 653 | 'interpolated_coffee': 654 | 'patterns': [ 655 | { 656 | 'begin': '\\#\\{' 657 | 'captures': 658 | '0': 659 | 'name': 'punctuation.section.embedded.coffee' 660 | 'end': '\\}' 661 | 'name': 'source.coffee.embedded.source' 662 | 'patterns': [ 663 | { 664 | 'include': '$self' 665 | } 666 | ] 667 | } 668 | ] 669 | 'method_calls': 670 | 'patterns': [ 671 | { 672 | # .methodCall(arg1, "arg2", [...]) 673 | 'begin': '(?:(\\.)|(::))\\s*([\\w$]+)\\s*(?=\\()' 674 | 'beginCaptures': 675 | '1': 676 | 'name': 'punctuation.separator.method.period.coffee' 677 | '2': 678 | 'name': 'keyword.operator.prototype.coffee' 679 | '3': 680 | 'patterns': [ 681 | { 682 | 'include': '#method_names' 683 | } 684 | ] 685 | 'end': '(?<=\\))' 686 | 'name': 'meta.method-call.coffee' 687 | 'patterns': [ 688 | { 689 | 'include': '#arguments' 690 | } 691 | ] 692 | } 693 | { 694 | # .methodCall arg1, "arg2", [...] 695 | 'begin': '(?:(\\.)|(::))\\s*([\\w$]+)\\s*(?=\\s+(?!(?<![\\w$])(of|in|then|is|isnt|and|or|for|else|when|if|unless|by|instanceof)(?![\\w$]))(?=(@|@?[\\w$]+|[=-]>|\\-\\d|\\[|{|\"|\')))' 696 | 'beginCaptures': 697 | '1': 698 | 'name': 'punctuation.separator.method.period.coffee' 699 | '2': 700 | 'name': 'keyword.operator.prototype.coffee' 701 | '3': 702 | 'patterns': [ 703 | { 704 | 'include': '#method_names' 705 | } 706 | ] 707 | 'end': '(?=\\s*(?<![\\w$])(of|in|then|is|isnt|and|or|for|else|when|if|unless|by|instanceof)(?![\\w$]))|(?=\\s*(}|\\]|\\)|#|$))' 708 | 'name': 'meta.method-call.coffee' 709 | 'patterns': [ 710 | { 711 | 'include': '#arguments' 712 | } 713 | ] 714 | } 715 | ] 716 | 'method_names': 717 | 'patterns': [ 718 | { 719 | 'match': '''(?x) 720 | \\bon(Rowsinserted|Rowsdelete|Rowenter|Rowexit|Resize|Resizestart|Resizeend|Reset| 721 | Readystatechange|Mouseout|Mouseover|Mousedown|Mouseup|Mousemove| 722 | Before(cut|deactivate|unload|update|paste|print|editfocus|activate)| 723 | Blur|Scrolltop|Submit|Select|Selectstart|Selectionchange|Hover|Help| 724 | Change|Contextmenu|Controlselect|Cut|Cellchange|Clock|Close|Deactivate| 725 | Datasetchanged|Datasetcomplete|Dataavailable|Drop|Drag|Dragstart|Dragover| 726 | Dragdrop|Dragenter|Dragend|Dragleave|Dblclick|Unload|Paste|Propertychange|Error| 727 | Errorupdate|Keydown|Keyup|Keypress|Focus|Load|Activate|Afterupdate|Afterprint|Abort)\\b 728 | ''' 729 | 'name': 'support.function.event-handler.coffee' 730 | } 731 | { 732 | 'match': '''(?x) 733 | \\b(shift|showModelessDialog|showModalDialog|showHelp|scroll|scrollX|scrollByPages| 734 | scrollByLines|scrollY|scrollTo|stop|strike|sizeToContent|sidebar|signText|sort| 735 | sup|sub|substr|substring|splice|split|send|set(Milliseconds|Seconds|Minutes|Hours| 736 | Month|Year|FullYear|Date|UTC(Milliseconds|Seconds|Minutes|Hours|Month|FullYear|Date)| 737 | Time|Hotkeys|Cursor|ZOptions|Active|Resizable|RequestHeader)|search|slice| 738 | savePreferences|small|home|handleEvent|navigate|char|charCodeAt|charAt|concat| 739 | contextual|confirm|compile|clear|captureEvents|call|createStyleSheet|createPopup| 740 | createEventObject|to(GMTString|UTCString|String|Source|UpperCase|LowerCase|LocaleString)| 741 | test|taint|taintEnabled|indexOf|italics|disableExternalCapture|dump|detachEvent|unshift| 742 | untaint|unwatch|updateCommands|join|javaEnabled|pop|push|plugins.refresh|paddings|parse| 743 | print|prompt|preference|enableExternalCapture|exec|execScript|valueOf|UTC|find|file| 744 | fileModifiedDate|fileSize|fileCreatedDate|fileUpdatedDate|fixed|fontsize|fontcolor| 745 | forward|fromCharCode|watch|link|load|lastIndexOf|anchor|attachEvent|atob|apply|alert| 746 | abort|routeEvents|resize|resizeBy|resizeTo|recalc|returnValue|replace|reverse|reload| 747 | releaseCapture|releaseEvents|go|get(Milliseconds|Seconds|Minutes|Hours|Month|Day|Year|FullYear| 748 | Time|Date|TimezoneOffset|UTC(Milliseconds|Seconds|Minutes|Hours|Day|Month|FullYear|Date)| 749 | Attention|Selection|ResponseHeader|AllResponseHeaders)|moveBy|moveBelow|moveTo| 750 | moveToAbsolute|moveAbove|mergeAttributes|match|margins|btoa|big|bold|borderWidths|blink|back)\\b 751 | ''' 752 | 'name': 'support.function.coffee' 753 | } 754 | { 755 | 'match': '''(?x) 756 | \\b(acceptNode|add|addEventListener|addTextTrack|adoptNode|after|animate|append| 757 | appendChild|appendData|before|blur|canPlayType|captureStream| 758 | caretPositionFromPoint|caretRangeFromPoint|checkValidity|clear|click| 759 | cloneContents|cloneNode|cloneRange|close|closest|collapse| 760 | compareBoundaryPoints|compareDocumentPosition|comparePoint|contains| 761 | convertPointFromNode|convertQuadFromNode|convertRectFromNode|createAttribute| 762 | createAttributeNS|createCaption|createCDATASection|createComment| 763 | createContextualFragment|createDocument|createDocumentFragment| 764 | createDocumentType|createElement|createElementNS|createEntityReference| 765 | createEvent|createExpression|createHTMLDocument|createNodeIterator| 766 | createNSResolver|createProcessingInstruction|createRange|createShadowRoot| 767 | createTBody|createTextNode|createTFoot|createTHead|createTreeWalker|delete| 768 | deleteCaption|deleteCell|deleteContents|deleteData|deleteRow|deleteTFoot| 769 | deleteTHead|detach|disconnect|dispatchEvent|elementFromPoint|elementsFromPoint| 770 | enableStyleSheetsForSet|entries|evaluate|execCommand|exitFullscreen| 771 | exitPointerLock|expand|extractContents|fastSeek|firstChild|focus|forEach|get| 772 | getAll|getAnimations|getAttribute|getAttributeNames|getAttributeNode| 773 | getAttributeNodeNS|getAttributeNS|getBoundingClientRect|getBoxQuads| 774 | getClientRects|getContext|getDestinationInsertionPoints|getElementById| 775 | getElementsByClassName|getElementsByName|getElementsByTagName| 776 | getElementsByTagNameNS|getItem|getNamedItem|getSelection|getStartDate| 777 | getVideoPlaybackQuality|has|hasAttribute|hasAttributeNS|hasAttributes| 778 | hasChildNodes|hasFeature|hasFocus|importNode|initEvent|insertAdjacentElement| 779 | insertAdjacentHTML|insertAdjacentText|insertBefore|insertCell|insertData| 780 | insertNode|insertRow|intersectsNode|isDefaultNamespace|isEqualNode| 781 | isPointInRange|isSameNode|item|key|keys|lastChild|load|lookupNamespaceURI| 782 | lookupPrefix|matches|move|moveAttribute|moveAttributeNode|moveChild| 783 | moveNamedItem|namedItem|nextNode|nextSibling|normalize|observe|open| 784 | parentNode|pause|play|postMessage|prepend|preventDefault|previousNode| 785 | previousSibling|probablySupportsContext|queryCommandEnabled| 786 | queryCommandIndeterm|queryCommandState|queryCommandSupported|queryCommandValue| 787 | querySelector|querySelectorAll|registerContentHandler|registerElement| 788 | registerProtocolHandler|releaseCapture|releaseEvents|remove|removeAttribute| 789 | removeAttributeNode|removeAttributeNS|removeChild|removeEventListener| 790 | removeItem|replace|replaceChild|replaceData|replaceWith|reportValidity| 791 | requestFullscreen|requestPointerLock|reset|scroll|scrollBy|scrollIntoView| 792 | scrollTo|seekToNextFrame|select|selectNode|selectNodeContents|set|setAttribute| 793 | setAttributeNode|setAttributeNodeNS|setAttributeNS|setCapture| 794 | setCustomValidity|setEnd|setEndAfter|setEndBefore|setItem|setNamedItem| 795 | setRangeText|setSelectionRange|setSinkId|setStart|setStartAfter|setStartBefore| 796 | slice|splitText|stepDown|stepUp|stopImmediatePropagation|stopPropagation| 797 | submit|substringData|supports|surroundContents|takeRecords|terminate|toBlob| 798 | toDataURL|toggle|toString|values|write|writeln)\\b 799 | ''' 800 | 'name': 'support.function.dom.coffee' 801 | } 802 | { 803 | 'match': "[a-zA-Z_$][\\w$]*" 804 | 'name': 'entity.name.function.coffee' 805 | } 806 | { 807 | 'match': '\\d[\\w$]*' 808 | 'name': 'invalid.illegal.identifier.coffee' 809 | } 810 | ] 811 | 'numbers': 812 | 'patterns': [ 813 | { 814 | 'match': '\\b(?<!\\$)0(x|X)[0-9a-fA-F]+\\b(?!\\$)' 815 | 'name': 'constant.numeric.hex.coffee' 816 | } 817 | { 818 | 'match': '\\b(?<!\\$)0(b|B)[01]+\\b(?!\\$)' 819 | 'name': 'constant.numeric.binary.coffee' 820 | } 821 | { 822 | 'match': '\\b(?<!\\$)0(o|O)?[0-7]+\\b(?!\\$)' 823 | 'name': 'constant.numeric.octal.coffee' 824 | } 825 | { 826 | 'match': '''(?x) 827 | (?<!\\$)(?: 828 | (?:\\b[0-9]+(\\.)[0-9]+[eE][+-]?[0-9]+\\b)| # 1.1E+3 829 | (?:\\b[0-9]+(\\.)[eE][+-]?[0-9]+\\b)| # 1.E+3 830 | (?:\\B(\\.)[0-9]+[eE][+-]?[0-9]+\\b)| # .1E+3 831 | (?:\\b[0-9]+[eE][+-]?[0-9]+\\b)| # 1E+3 832 | (?:\\b[0-9]+(\\.)[0-9]+\\b)| # 1.1 833 | (?:\\b[0-9]+(?=\\.{2,3}))| # 1 followed by a slice 834 | (?:\\b[0-9]+(\\.)\\B)| # 1. 835 | (?:\\B(\\.)[0-9]+\\b)| # .1 836 | (?:\\b[0-9]+\\b(?!\\.)) # 1 837 | )(?!\\$) 838 | ''' 839 | 'captures': 840 | '0': 841 | 'name': 'constant.numeric.decimal.coffee' 842 | '1': 843 | 'name': 'punctuation.separator.decimal.period.coffee' 844 | '2': 845 | 'name': 'punctuation.separator.decimal.period.coffee' 846 | '3': 847 | 'name': 'punctuation.separator.decimal.period.coffee' 848 | '4': 849 | 'name': 'punctuation.separator.decimal.period.coffee' 850 | '5': 851 | 'name': 'punctuation.separator.decimal.period.coffee' 852 | '6': 853 | 'name': 'punctuation.separator.decimal.period.coffee' 854 | } 855 | ] 856 | 'objects': 857 | 'patterns': [ 858 | { 859 | # OBJ in OBJ.prop, OBJ.methodCall() 860 | 'match': '[A-Z][A-Z0-9_$]*(?=\\s*\\??(\\.\\s*[a-zA-Z_$]\\w*|::))' 861 | 'name': 'constant.other.object.coffee' 862 | } 863 | { 864 | # obj in obj.prop, obj.methodCall() 865 | 'match': '[a-zA-Z_$][\\w$]*(?=\\s*\\??(\\.\\s*[a-zA-Z_$]\\w*|::))' 866 | 'name': 'variable.other.object.coffee' 867 | } 868 | ] 869 | 'operators': 870 | 'patterns': [ 871 | { 872 | 'match': '(?:([a-zA-Z$_][\\w$]*)?\\s+|(?<![\\w$]))(and=|or=)' 873 | 'captures': 874 | '1': 875 | 'name': 'variable.assignment.coffee' 876 | '2': 877 | 'name': 'keyword.operator.assignment.compound.coffee' 878 | } 879 | { 880 | 'match': '([a-zA-Z$_][\\w$]*)?\\s*(%=|\\+=|-=|\\*=|&&=|\\|\\|=|\\?=|(?<!\\()/=)' 881 | 'captures': 882 | '1': 883 | 'name': 'variable.assignment.coffee' 884 | '2': 885 | 'name': 'keyword.operator.assignment.compound.coffee' 886 | } 887 | { 888 | 'match': '([a-zA-Z$_][\\w$]*)?\\s*(&=|\\^=|<<=|>>=|>>>=|\\|=)' 889 | 'captures': 890 | '1': 891 | 'name': 'variable.assignment.coffee' 892 | '2': 893 | 'name': 'keyword.operator.assignment.compound.bitwise.coffee' 894 | } 895 | { 896 | 'match': '<<|>>>|>>' 897 | 'name': 'keyword.operator.bitwise.shift.coffee' 898 | } 899 | { 900 | 'match': '!=|<=|>=|==|<|>' 901 | 'name': 'keyword.operator.comparison.coffee' 902 | } 903 | { 904 | 'match': '&&|!|\\|\\|' 905 | 'name': 'keyword.operator.logical.coffee' 906 | } 907 | { 908 | 'match': '&|\\||\\^|~' 909 | 'name': 'keyword.operator.bitwise.coffee' 910 | } 911 | { 912 | 'match': '([a-zA-Z$_][\\w$]*)?\\s*(=|:(?!:))(?![>=])' 913 | 'captures': 914 | '1': 915 | 'name': 'variable.assignment.coffee' 916 | '2': 917 | 'name': 'keyword.operator.assignment.coffee' 918 | } 919 | { 920 | 'match': '--' 921 | 'name': 'keyword.operator.decrement.coffee' 922 | } 923 | { 924 | 'match': '\\+\\+' 925 | 'name': 'keyword.operator.increment.coffee' 926 | } 927 | { 928 | 'match': '\\.\\.\\.' 929 | 'name': 'keyword.operator.splat.coffee' 930 | } 931 | { 932 | 'match': '\\?' 933 | 'name': 'keyword.operator.existential.coffee' 934 | } 935 | { 936 | 'match': '%|\\*|/|-|\\+' 937 | 'name': 'keyword.operator.coffee' 938 | } 939 | { 940 | 'match': '''(?x) 941 | \\b(?<![\\.\\$]) 942 | (?: 943 | (and|or|not) # logical 944 | | 945 | (is|isnt) # comparison 946 | ) 947 | (?!\\s*:)\\b 948 | ''' 949 | 'captures': 950 | '1': 951 | 'name': 'keyword.operator.logical.coffee' 952 | '2': 953 | 'name': 'keyword.operator.comparison.coffee' 954 | } 955 | ] 956 | 'properties': 957 | 'patterns': [ 958 | { 959 | # PROP1 in obj.PROP1.prop2, func().PROP1.prop2 960 | 'match': '(?:(\\.)|(::))\\s*([A-Z][A-Z0-9_$]*\\b\\$*)(?=\\s*\\??(\\.\\s*[a-zA-Z_$]\\w*|::))' 961 | 'captures': 962 | '1': 963 | 'name': 'punctuation.separator.property.period.coffee' 964 | '2': 965 | 'name': 'keyword.operator.prototype.coffee' 966 | '3': 967 | 'name': 'constant.other.object.property.coffee' 968 | } 969 | { 970 | # prop1 in obj.prop1.prop2, func().prop1.prop2 971 | 'match': '(?:(\\.)|(::))\\s*(\\$*[a-zA-Z_$][\\w$]*)(?=\\s*\\??(\\.\\s*[a-zA-Z_$]\\w*|::))' 972 | 'captures': 973 | '1': 974 | 'name': 'punctuation.separator.property.period.coffee' 975 | '2': 976 | 'name': 'keyword.operator.prototype.coffee' 977 | '3': 978 | 'name': 'variable.other.object.property.coffee' 979 | } 980 | { 981 | # PROP in obj.PROP, func().PROP 982 | 'match': '(?:(\\.)|(::))\\s*([A-Z][A-Z0-9_$]*\\b\\$*)' 983 | 'captures': 984 | '1': 985 | 'name': 'punctuation.separator.property.period.coffee' 986 | '2': 987 | 'name': 'keyword.operator.prototype.coffee' 988 | '3': 989 | 'name': 'constant.other.property.coffee' 990 | } 991 | { 992 | # prop in obj.prop, func().prop 993 | 'match': '(?:(\\.)|(::))\\s*(\\$*[a-zA-Z_$][\\w$]*)' 994 | 'captures': 995 | '1': 996 | 'name': 'punctuation.separator.property.period.coffee' 997 | '2': 998 | 'name': 'keyword.operator.prototype.coffee' 999 | '3': 1000 | 'name': 'variable.other.property.coffee' 1001 | } 1002 | { 1003 | # 123illegal in obj.123illegal, func().123illegal 1004 | 'match': '(?:(\\.)|(::))\\s*([0-9][\\w$]*)' 1005 | 'captures': 1006 | '1': 1007 | 'name': 'punctuation.separator.property.period.coffee' 1008 | '2': 1009 | 'name': 'keyword.operator.prototype.coffee' 1010 | '3': 1011 | 'name': 'invalid.illegal.identifier.coffee' 1012 | } 1013 | ] 1014 | 'single_quoted_string': 1015 | 'patterns': [ 1016 | { 1017 | 'begin': '\'' 1018 | 'beginCaptures': 1019 | '0': 1020 | 'name': 'punctuation.definition.string.begin.coffee' 1021 | 'end': '\'' 1022 | 'endCaptures': 1023 | '0': 1024 | 'name': 'punctuation.definition.string.end.coffee' 1025 | 'name': 'string.quoted.single.coffee' 1026 | 'patterns': [ 1027 | { 1028 | 'captures': 1029 | '1': 1030 | 'name': 'punctuation.definition.escape.backslash.coffee' 1031 | 'match': '(\\\\)(x[0-9A-Fa-f]{2}|[0-2][0-7]{0,2}|3[0-6][0-7]?|37[0-7]?|[4-7][0-7]?|.)' 1032 | 'name': 'constant.character.escape.backslash.coffee' 1033 | } 1034 | ] 1035 | } 1036 | ] 1037 | 'regex-character-class': 1038 | 'patterns': [ 1039 | { 1040 | 'match': '\\\\[wWsSdD]|\\.' 1041 | 'name': 'constant.character.character-class.regexp' 1042 | } 1043 | { 1044 | 'match': '\\\\([0-7]{3}|x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4})' 1045 | 'name': 'constant.character.numeric.regexp' 1046 | } 1047 | { 1048 | 'match': '\\\\c[A-Z]' 1049 | 'name': 'constant.character.control.regexp' 1050 | } 1051 | { 1052 | 'match': '\\\\.' 1053 | 'name': 'constant.character.escape.backslash.regexp' 1054 | } 1055 | ] 1056 | 'heregexp': 1057 | 'patterns': [ 1058 | { 1059 | 'match': '\\\\[bB]|\\^|\\$' 1060 | 'name': 'keyword.control.anchor.regexp' 1061 | } 1062 | { 1063 | 'match': '\\\\[1-9]\\d*' 1064 | 'name': 'keyword.other.back-reference.regexp' 1065 | } 1066 | { 1067 | 'match': '[?+*]|\\{(\\d+,\\d+|\\d+,|,\\d+|\\d+)\\}\\??' 1068 | 'name': 'keyword.operator.quantifier.regexp' 1069 | } 1070 | { 1071 | 'match': '\\|' 1072 | 'name': 'keyword.operator.or.regexp' 1073 | } 1074 | { 1075 | 'begin': '(\\()((\\?=)|(\\?!))' 1076 | 'beginCaptures': 1077 | '1': 1078 | 'name': 'punctuation.definition.group.regexp' 1079 | '3': 1080 | 'name': 'meta.assertion.look-ahead.regexp' 1081 | '4': 1082 | 'name': 'meta.assertion.negative-look-ahead.regexp' 1083 | 'end': '(\\))' 1084 | 'endCaptures': 1085 | '1': 1086 | 'name': 'punctuation.definition.group.regexp' 1087 | 'name': 'meta.group.assertion.regexp' 1088 | 'patterns': [ 1089 | { 1090 | 'include': '#heregexp' 1091 | } 1092 | ] 1093 | } 1094 | { 1095 | 'begin': '\\((\\?:)?' 1096 | 'beginCaptures': 1097 | '0': 1098 | 'name': 'punctuation.definition.group.regexp' 1099 | 'end': '\\)' 1100 | 'endCaptures': 1101 | '0': 1102 | 'name': 'punctuation.definition.group.regexp' 1103 | 'name': 'meta.group.regexp' 1104 | 'patterns': [ 1105 | { 1106 | 'include': '#heregexp' 1107 | } 1108 | ] 1109 | } 1110 | { 1111 | 'begin': '(\\[)(\\^)?' 1112 | 'beginCaptures': 1113 | '1': 1114 | 'name': 'punctuation.definition.character-class.regexp' 1115 | '2': 1116 | 'name': 'keyword.operator.negation.regexp' 1117 | 'end': '(\\])' 1118 | 'endCaptures': 1119 | '1': 1120 | 'name': 'punctuation.definition.character-class.regexp' 1121 | 'name': 'constant.other.character-class.set.regexp' 1122 | 'patterns': [ 1123 | { 1124 | 'captures': 1125 | '1': 1126 | 'name': 'constant.character.numeric.regexp' 1127 | '2': 1128 | 'name': 'constant.character.control.regexp' 1129 | '3': 1130 | 'name': 'constant.character.escape.backslash.regexp' 1131 | '4': 1132 | 'name': 'constant.character.numeric.regexp' 1133 | '5': 1134 | 'name': 'constant.character.control.regexp' 1135 | '6': 1136 | 'name': 'constant.character.escape.backslash.regexp' 1137 | 'match': '(?:.|(\\\\(?:[0-7]{3}|x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}))|(\\\\c[A-Z])|(\\\\.))\\-(?:[^\\]\\\\]|(\\\\(?:[0-7]{3}|x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}))|(\\\\c[A-Z])|(\\\\.))' 1138 | 'name': 'constant.other.character-class.range.regexp' 1139 | } 1140 | { 1141 | 'include': '#regex-character-class' 1142 | } 1143 | ] 1144 | } 1145 | { 1146 | 'include': '#regex-character-class' 1147 | } 1148 | { 1149 | 'include': '#interpolated_coffee' 1150 | } 1151 | { 1152 | 'include': '#embedded_comment' 1153 | } 1154 | ] 1155 | 1156 | 'jsx': 1157 | 'patterns': [ 1158 | { 1159 | 'include': '#jsx-tag' 1160 | } 1161 | { 1162 | 'include': '#jsx-end-tag' 1163 | } 1164 | ] 1165 | 1166 | 'jsx-expression': 1167 | 'begin': '{' 1168 | 'beginCaptures': 1169 | '0': 1170 | 'name': 'meta.brace.curly.coffee' 1171 | 'end': '}' 1172 | 'endCaptures': 1173 | '0': 1174 | 'name': 'meta.brace.curly.coffee' 1175 | 1176 | 'patterns': [ 1177 | { 1178 | 'include': '#double_quoted_string' 1179 | } 1180 | { 1181 | 'include': '$self' 1182 | } 1183 | ] 1184 | 1185 | 'jsx-attribute': 1186 | 'patterns': [ 1187 | { 1188 | 'captures': 1189 | '1': 1190 | 'name': 'entity.other.attribute-name.coffee' 1191 | '2': 1192 | 'name': 'keyword.operator.assignment.coffee' 1193 | 'match': '(?:^|\\s+)([-\\w.]+)\\s*(=)' 1194 | } 1195 | { 1196 | 'include': '#double_quoted_string' 1197 | } 1198 | { 1199 | 'include': '#single_quoted_string' 1200 | } 1201 | { 1202 | 'include': '#jsx-expression' 1203 | } 1204 | ] 1205 | 1206 | 'jsx-tag': 1207 | 'patterns': [ 1208 | { 1209 | 'begin': '(<)([-\\w\\.]+)' 1210 | 'beginCaptures': 1211 | '1': 1212 | 'name': 'punctuation.definition.tag.coffee' 1213 | '2': 1214 | 'name': 'entity.name.tag.coffee' 1215 | 'end': '(/?>)' 1216 | 'name': 'meta.tag.coffee' 1217 | 'patterns': [ 1218 | 'include': '#jsx-attribute' 1219 | ] 1220 | } 1221 | ] 1222 | 1223 | 'jsx-end-tag': 1224 | 'patterns': [ 1225 | { 1226 | 'begin': '(</)([-\\w\\.]+)' 1227 | 'beginCaptures': 1228 | '1': 1229 | 'name': 'punctuation.definition.tag.coffee' 1230 | '2': 1231 | 'name': 'entity.name.tag.coffee' 1232 | 'end': '(/?>)' 1233 | 'name': 'meta.tag.coffee' 1234 | } 1235 | ] 1236 | -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "language-coffee-script", 3 | "version": "0.50.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "balanced-match": { 8 | "version": "1.0.0", 9 | "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", 10 | "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", 11 | "dev": true 12 | }, 13 | "brace-expansion": { 14 | "version": "1.1.11", 15 | "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", 16 | "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", 17 | "dev": true, 18 | "requires": { 19 | "balanced-match": "^1.0.0", 20 | "concat-map": "0.0.1" 21 | } 22 | }, 23 | "coffee-script": { 24 | "version": "1.11.1", 25 | "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.11.1.tgz", 26 | "integrity": "sha1-vxxHrWREOg2V0S3ysUfMCk2q1uk=", 27 | "dev": true 28 | }, 29 | "coffeelint": { 30 | "version": "1.16.2", 31 | "resolved": "https://registry.npmjs.org/coffeelint/-/coffeelint-1.16.2.tgz", 32 | "integrity": "sha512-6mzgOo4zb17WfdrSui/cSUEgQ0AQkW3gXDht+6lHkfkqGUtSYKwGdGcXsDfAyuScVzTlTtKdfwkAlJWfqul7zg==", 33 | "dev": true, 34 | "requires": { 35 | "coffee-script": "~1.11.0", 36 | "glob": "^7.0.6", 37 | "ignore": "^3.0.9", 38 | "optimist": "^0.6.1", 39 | "resolve": "^0.6.3", 40 | "strip-json-comments": "^1.0.2" 41 | } 42 | }, 43 | "concat-map": { 44 | "version": "0.0.1", 45 | "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", 46 | "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", 47 | "dev": true 48 | }, 49 | "fs.realpath": { 50 | "version": "1.0.0", 51 | "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", 52 | "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", 53 | "dev": true 54 | }, 55 | "glob": { 56 | "version": "7.1.3", 57 | "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", 58 | "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", 59 | "dev": true, 60 | "requires": { 61 | "fs.realpath": "^1.0.0", 62 | "inflight": "^1.0.4", 63 | "inherits": "2", 64 | "minimatch": "^3.0.4", 65 | "once": "^1.3.0", 66 | "path-is-absolute": "^1.0.0" 67 | } 68 | }, 69 | "ignore": { 70 | "version": "3.3.10", 71 | "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", 72 | "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", 73 | "dev": true 74 | }, 75 | "inflight": { 76 | "version": "1.0.6", 77 | "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", 78 | "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", 79 | "dev": true, 80 | "requires": { 81 | "once": "^1.3.0", 82 | "wrappy": "1" 83 | } 84 | }, 85 | "inherits": { 86 | "version": "2.0.3", 87 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", 88 | "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", 89 | "dev": true 90 | }, 91 | "minimatch": { 92 | "version": "3.0.4", 93 | "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", 94 | "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", 95 | "dev": true, 96 | "requires": { 97 | "brace-expansion": "^1.1.7" 98 | } 99 | }, 100 | "minimist": { 101 | "version": "0.0.10", 102 | "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", 103 | "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=", 104 | "dev": true 105 | }, 106 | "once": { 107 | "version": "1.4.0", 108 | "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", 109 | "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", 110 | "dev": true, 111 | "requires": { 112 | "wrappy": "1" 113 | } 114 | }, 115 | "optimist": { 116 | "version": "0.6.1", 117 | "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", 118 | "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", 119 | "dev": true, 120 | "requires": { 121 | "minimist": "~0.0.1", 122 | "wordwrap": "~0.0.2" 123 | } 124 | }, 125 | "path-is-absolute": { 126 | "version": "1.0.1", 127 | "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", 128 | "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", 129 | "dev": true 130 | }, 131 | "resolve": { 132 | "version": "0.6.3", 133 | "resolved": "https://registry.npmjs.org/resolve/-/resolve-0.6.3.tgz", 134 | "integrity": "sha1-3ZV5gufnNt699TtYpN2RdUV13UY=", 135 | "dev": true 136 | }, 137 | "strip-json-comments": { 138 | "version": "1.0.4", 139 | "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz", 140 | "integrity": "sha1-HhX7ysl9Pumb8tc7TGVrCCu6+5E=", 141 | "dev": true 142 | }, 143 | "wordwrap": { 144 | "version": "0.0.3", 145 | "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", 146 | "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", 147 | "dev": true 148 | }, 149 | "wrappy": { 150 | "version": "1.0.2", 151 | "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", 152 | "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", 153 | "dev": true 154 | } 155 | } 156 | } 157 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.50.0", 3 | "name": "language-coffee-script", 4 | "description": "CoffeeScript language support in Atom", 5 | "license": "MIT", 6 | "engines": { 7 | "atom": "*", 8 | "node": "*" 9 | }, 10 | "homepage": "http://atom.github.io/language-coffee-script", 11 | "repository": { 12 | "type": "git", 13 | "url": "https://github.com/atom/language-coffee-script.git" 14 | }, 15 | "bugs": { 16 | "url": "https://github.com/atom/language-coffee-script/issues" 17 | }, 18 | "devDependencies": { 19 | "coffeelint": "^1.10.1" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /settings/language-coffee-script.cson: -------------------------------------------------------------------------------- 1 | '.source.coffee, .source.litcoffee, .source.coffee.md': 2 | 'editor': 3 | 'commentStart': '# ' 4 | '.source.coffee': 5 | 'editor': 6 | 'autoIndentOnPaste': false 7 | 'increaseIndentPattern': '(?x) 8 | ^\\s* 9 | ( 10 | .*\\b(?<!\\.)class(\\s|$) 11 | | [a-zA-Z\\$_](\\w|\\$|:|\\.)*\\s*(?=\\:(\\s*\\(.*\\))?\\s*((=|-)>\\s*$)) 12 | | [a-zA-Z\\$_](\\w|\\$|\\.)*\\s*(:|=)\\s*((if|while)(?!.*?then)|for|$) 13 | | \\b(if|else|unless|while|when)\\b(?!.*?then)|\\b(for|loop)\\b 14 | | \\b(try|finally|catch|((catch|switch)\\s+\\S.*))\\b\\s*$ 15 | | .*[-=]>\\s*$ 16 | | .*[\\{\\[]\\s*$ 17 | | .*:\\s*$ 18 | )' 19 | 'decreaseIndentPattern': '^\\s*(\\}|\\]|\\b(else|catch|finally)\\b)$' 20 | -------------------------------------------------------------------------------- /snippets/language-coffee-script.cson: -------------------------------------------------------------------------------- 1 | '.source.coffee': 2 | 'Array Comprehension': 3 | 'prefix': 'fora' 4 | 'body': 'for ${1:name} in ${2:array}\n ${0:# body...}' 5 | 'Function (bound)': 6 | 'prefix': 'bfun' 7 | 'body': '(${1:args}) =>\n ${0:# body...}' 8 | 'Class': 9 | 'prefix': 'cla' 10 | 'body': 'class ${1:ClassName}${2: extends ${3:Ancestor}}\n\n ${4:constructor: (${5:args}) ->\n ${6:# body...}}\n $7' 11 | 'Else if': 12 | 'prefix': 'elif' 13 | 'body': 'else if ${1:condition}\n ${0:# body...}' 14 | 'Function': 15 | 'prefix': 'fun' 16 | 'body': '(${1:args}) ->\n ${0:# body...}\n\n' 17 | 'If .. Else': 18 | 'prefix': 'ife' 19 | 'body': 'if ${1:condition}\n ${2:# body...}\nelse\n ${3:# body...}' 20 | 'If': 21 | 'prefix': 'if' 22 | 'body': 'if ${1:condition}\n ${0:# body...}' 23 | 'Object comprehension': 24 | 'prefix': 'foro' 25 | 'body': 'for ${1:key}, ${2:value} of ${3:Object}\n ${0:# body...}' 26 | 'Range comprehension (exclusive)': 27 | 'prefix': 'forrex' 28 | 'body': 'for ${1:name} in [${2:start}...${3:finish}]${4: by ${5:step}}\n ${0:# body...}' 29 | 'Range comprehension (inclusive)': 30 | 'prefix': 'forr' 31 | 'body': 'for ${1:name} in [${2:start}..${3:finish}]${4: by ${5:step}}\n ${0:# body...}' 32 | 'Switch': 33 | 'prefix': 'swi' 34 | 'body': 'switch ${1:object}\n when ${2:value}\n ${0:# body...}' 35 | 'Ternary If': 36 | 'prefix': 'ifte' 37 | 'body': 'if ${1:condition} then ${2:value} else ${3:other}' 38 | 'Try .. Catch': 39 | 'prefix': 'try' 40 | 'body': 'try\n $1\ncatch ${2:error}\n $3' 41 | 'Unless': 42 | 'prefix': 'unl' 43 | 'body': '${1:action} unless ${2:condition}' 44 | 'Subheader': 45 | 'prefix': '/3' 46 | 'body': '# $1\n# -------------------------\n$0' 47 | 'log': 48 | 'prefix': 'log' 49 | 'body': 'console.log $0' 50 | 'warn': 51 | 'prefix': 'warn' 52 | 'body': 'console.warn $0' 53 | 'error': 54 | 'prefix': 'error' 55 | 'body': 'console.error $0' 56 | 'require': 57 | 'prefix': 'req' 58 | 'body': '${1:sys} $3= require \'${2:${1:sys}}\'$4' 59 | 'Describe block': 60 | 'prefix': 'de', 61 | 'body': 'describe "${1:description}", ->\n ${2:body}' 62 | 'It block': 63 | 'prefix': 'i', 64 | 'body': 'it "$1", ->\n $2' 65 | 'Before each': 66 | 'prefix': 'be', 67 | 'body': 'beforeEach ->\n $1' 68 | 'After each': 69 | 'prefix': 'af', 70 | 'body': 'afterEach ->\n $1' 71 | 'Expectation': 72 | 'prefix': 'ex', 73 | 'body': 'expect($1).to$2' 74 | 'Range array': 75 | 'prefix': 'ra', 76 | 'body': '[[$1, $2], [$3, $4]]' 77 | 'Point array': 78 | 'prefix': 'pt', 79 | 'body': '[$1, $2]' 80 | 'Key-value pair': 81 | 'prefix': 'kv', 82 | 'body': '${1:\'${2:key}\'}: ${3:value}' 83 | 'Create Jasmine spy': 84 | 'prefix': 'spy', 85 | 'body': 'jasmine.createSpy(\'${1:description}\')$2' 86 | '.string.quoted.double.coffee:not(.string .source), .string.quoted.double.heredoc.coffee:not(.string .source)': 87 | 'Interpolated Code': 88 | 'prefix': '#' 89 | 'body': '#{$1}$2' 90 | -------------------------------------------------------------------------------- /spec/coffee-script-literate-spec.coffee: -------------------------------------------------------------------------------- 1 | describe "CoffeeScript (Literate) grammar", -> 2 | grammar = null 3 | 4 | beforeEach -> 5 | waitsForPromise -> 6 | atom.packages.activatePackage("language-coffee-script") 7 | 8 | runs -> 9 | grammar = atom.grammars.grammarForScopeName("source.litcoffee") 10 | 11 | it "parses the grammar", -> 12 | expect(grammar).toBeTruthy() 13 | expect(grammar.scopeName).toBe "source.litcoffee" 14 | 15 | it "recognizes a code block after a list", -> 16 | tokens = grammar.tokenizeLines ''' 17 | 1. Example 18 | 2. List 19 | 20 | 1 + 2 21 | ''' 22 | expect(tokens[3][1]).toEqual value: "1", scopes: ["source.litcoffee", "markup.raw.block.markdown", "constant.numeric.decimal.coffee"] 23 | 24 | describe "firstLineMatch", -> 25 | it "recognises interpreter directives", -> 26 | valid = """ 27 | #!/usr/local/bin/coffee --no-header --literate -w 28 | #!/usr/local/bin/coffee -l 29 | #!/usr/local/bin/env coffee --literate -w 30 | """ 31 | for line in valid.split /\n/ 32 | expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() 33 | 34 | invalid = """ 35 | #!/usr/local/bin/coffee --no-head -literate -w 36 | #!/usr/local/bin/coffee --wl 37 | #!/usr/local/bin/env coffee --illiterate -w=l 38 | """ 39 | for line in invalid.split /\n/ 40 | expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() 41 | 42 | it "recognises Emacs modelines", -> 43 | valid = """ 44 | #-*- litcoffee -*- 45 | #-*- mode: litcoffee -*- 46 | /* -*-litcoffee-*- */ 47 | // -*- litcoffee -*- 48 | /* -*- mode:LITCOFFEE -*- */ 49 | // -*- font:bar;mode:LitCoffee -*- 50 | // -*- font:bar;mode:litcoffee;foo:bar; -*- 51 | // -*-font:mode;mode:litcoffee-*- 52 | // -*- foo:bar mode: litcoffee bar:baz -*- 53 | " -*-foo:bar;mode:litcoffee;bar:foo-*- "; 54 | " -*-font-mode:foo;mode:LITcofFEE;foo-bar:quux-*-" 55 | "-*-font:x;foo:bar; mode : litCOFFEE; bar:foo;foooooo:baaaaar;fo:ba;-*-"; 56 | "-*- font:x;foo : bar ; mode : LiTcOFFEe ; bar : foo ; foooooo:baaaaar;fo:ba-*-"; 57 | """ 58 | for line in valid.split /\n/ 59 | expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() 60 | 61 | invalid = """ 62 | /* --*litcoffee-*- */ 63 | /* -*-- litcoffee -*- 64 | /* -*- -- litcoffee -*- 65 | /* -*- LITCOFFEE -;- -*- 66 | // -*- itsLitCoffeeFam -*- 67 | // -*- litcoffee; -*- 68 | // -*- litcoffee-stuff -*- 69 | /* -*- model:litcoffee -*- 70 | /* -*- indent-mode:litcoffee -*- 71 | // -*- font:mode;litcoffee -*- 72 | // -*- mode: -*- litcoffee 73 | // -*- mode: burnt-because-litcoffee -*- 74 | // -*-font:mode;mode:litcoffee--*- 75 | """ 76 | for line in invalid.split /\n/ 77 | expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() 78 | 79 | it "recognises Vim modelines", -> 80 | valid = """ 81 | vim: se filetype=litcoffee: 82 | # vim: se ft=litcoffee: 83 | # vim: set ft=LITCOFFEE: 84 | # vim: set filetype=litcoffee: 85 | # vim: ft=LITCOFFEE 86 | # vim: syntax=litcoffee 87 | # vim: se syntax=litcoffee: 88 | # ex: syntax=litcoffee 89 | # vim:ft=LitCoffee 90 | # vim600: ft=litcoffee 91 | # vim>600: set ft=litcoffee: 92 | # vi:noai:sw=3 ts=6 ft=litcoffee 93 | # vi::::::::::noai:::::::::::: ft=litcoffee 94 | # vim:ts=4:sts=4:sw=4:noexpandtab:ft=LITCOFFEE 95 | # vi:: noai : : : : sw =3 ts =6 ft =litCoffee 96 | # vim: ts=4: pi sts=4: ft=litcoffee: noexpandtab: sw=4: 97 | # vim: ts=4 sts=4: ft=litcoffee noexpandtab: 98 | # vim:noexpandtab sts=4 ft=LitCOffEE ts=4 99 | # vim:noexpandtab:ft=litcoffee 100 | # vim:ts=4:sts=4 ft=litcoffee:noexpandtab:\x20 101 | # vim:noexpandtab titlestring=hi\|there\\\\ ft=litcoffee ts=4 102 | """ 103 | for line in valid.split /\n/ 104 | expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() 105 | 106 | invalid = """ 107 | ex: se filetype=litcoffee: 108 | _vi: se filetype=litcoffee: 109 | vi: se filetype=litcoffee 110 | # vim set ft=illitcoffee 111 | # vim: soft=litcoffee 112 | # vim: clean-syntax=litcoffee: 113 | # vim set ft=litcoffee: 114 | # vim: setft=litcoffee: 115 | # vim: se ft=litcoffee backupdir=tmp 116 | # vim: set ft=LITCOFFEE set cmdheight=1 117 | # vim:noexpandtab sts:4 ft:litcoffee ts:4 118 | # vim:noexpandtab titlestring=hi\\|there\\ ft=litcoffee ts=4 119 | # vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=litcoffee ts=4 120 | """ 121 | for line in invalid.split /\n/ 122 | expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() 123 | -------------------------------------------------------------------------------- /spec/coffee-script-spec.coffee: -------------------------------------------------------------------------------- 1 | fs = require 'fs' 2 | path = require 'path' 3 | 4 | describe "CoffeeScript grammar", -> 5 | grammar = null 6 | 7 | beforeEach -> 8 | waitsForPromise -> 9 | atom.packages.activatePackage("language-coffee-script") 10 | 11 | runs -> 12 | grammar = atom.grammars.grammarForScopeName("source.coffee") 13 | 14 | it "parses the grammar", -> 15 | expect(grammar).toBeTruthy() 16 | expect(grammar.scopeName).toBe "source.coffee" 17 | 18 | it "tokenizes classes", -> 19 | {tokens} = grammar.tokenizeLine("class Foo") 20 | 21 | expect(tokens[0]).toEqual value: "class", scopes: ["source.coffee", "meta.class.coffee", "storage.type.class.coffee"] 22 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.class.coffee"] 23 | expect(tokens[2]).toEqual value: "Foo", scopes: ["source.coffee", "meta.class.coffee", "entity.name.type.class.coffee"] 24 | 25 | {tokens} = grammar.tokenizeLine("class_ Foo") 26 | expect(tokens[0]).toEqual value: "class_", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] 27 | 28 | {tokens} = grammar.tokenizeLine("_class Foo") 29 | expect(tokens[0]).toEqual value: "_class", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] 30 | 31 | {tokens} = grammar.tokenizeLine("[class Foo]") 32 | expect(tokens[0]).toEqual value: "[", scopes: ["source.coffee", "punctuation.definition.array.begin.bracket.square.coffee"] 33 | expect(tokens[1]).toEqual value: "class", scopes: ["source.coffee", "meta.class.coffee", "storage.type.class.coffee"] 34 | expect(tokens[2]).toEqual value: " ", scopes: ["source.coffee", "meta.class.coffee"] 35 | expect(tokens[3]).toEqual value: "Foo", scopes: ["source.coffee", "meta.class.coffee", "entity.name.type.class.coffee"] 36 | expect(tokens[4]).toEqual value: "]", scopes: ["source.coffee", "punctuation.definition.array.end.bracket.square.coffee"] 37 | 38 | {tokens} = grammar.tokenizeLine("bar(class Foo)") 39 | expect(tokens[0]).toEqual value: "bar", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] 40 | expect(tokens[1]).toEqual value: "(", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "punctuation.definition.arguments.begin.bracket.round.coffee"] 41 | expect(tokens[2]).toEqual value: "class", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "meta.class.coffee", "storage.type.class.coffee"] 42 | expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "meta.class.coffee"] 43 | expect(tokens[4]).toEqual value: "Foo", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "meta.class.coffee", "entity.name.type.class.coffee"] 44 | expect(tokens[5]).toEqual value: ")", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "punctuation.definition.arguments.end.bracket.round.coffee"] 45 | 46 | it "tokenizes named subclasses", -> 47 | {tokens} = grammar.tokenizeLine("class Foo extends Bar") 48 | 49 | expect(tokens[0]).toEqual value: "class", scopes: ["source.coffee", "meta.class.coffee", "storage.type.class.coffee"] 50 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.class.coffee"] 51 | expect(tokens[2]).toEqual value: "Foo", scopes: ["source.coffee", "meta.class.coffee", "entity.name.type.class.coffee"] 52 | expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.class.coffee"] 53 | expect(tokens[4]).toEqual value: "extends", scopes: ["source.coffee", "meta.class.coffee", "keyword.control.inheritance.coffee"] 54 | expect(tokens[5]).toEqual value: " ", scopes: ["source.coffee", "meta.class.coffee"] 55 | expect(tokens[6]).toEqual value: "Bar", scopes: ["source.coffee", "meta.class.coffee", "entity.other.inherited-class.coffee"] 56 | 57 | it "tokenizes anonymous subclasses", -> 58 | {tokens} = grammar.tokenizeLine("class extends Foo") 59 | 60 | expect(tokens[0]).toEqual value: "class", scopes: ["source.coffee", "meta.class.coffee", "storage.type.class.coffee"] 61 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.class.coffee"] 62 | expect(tokens[2]).toEqual value: "extends", scopes: ["source.coffee", "meta.class.coffee", "keyword.control.inheritance.coffee"] 63 | expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.class.coffee"] 64 | expect(tokens[4]).toEqual value: "Foo", scopes: ["source.coffee", "meta.class.coffee", "entity.other.inherited-class.coffee"] 65 | 66 | it "tokenizes instantiated anonymous classes", -> 67 | {tokens} = grammar.tokenizeLine("new class") 68 | 69 | expect(tokens[0]).toEqual value: "new", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "keyword.operator.new.coffee"] 70 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.class.instance.constructor.coffee"] 71 | expect(tokens[2]).toEqual value: "class", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "storage.type.class.coffee"] 72 | 73 | it "tokenizes instantiated named classes", -> 74 | {tokens} = grammar.tokenizeLine("new class Foo") 75 | 76 | expect(tokens[0]).toEqual value: "new", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "keyword.operator.new.coffee"] 77 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.class.instance.constructor.coffee"] 78 | expect(tokens[2]).toEqual value: "class", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "storage.type.class.coffee"] 79 | expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.class.instance.constructor.coffee"] 80 | expect(tokens[4]).toEqual value: "Foo", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "entity.name.type.instance.coffee"] 81 | 82 | {tokens} = grammar.tokenizeLine("new Foo") 83 | 84 | expect(tokens[0]).toEqual value: "new", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "keyword.operator.new.coffee"] 85 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.class.instance.constructor.coffee"] 86 | expect(tokens[2]).toEqual value: "Foo", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "entity.name.type.instance.coffee"] 87 | 88 | it "tokenizes class names that start with `class` correctly", -> 89 | {tokens} = grammar.tokenizeLine("new classTest") 90 | 91 | expect(tokens[0]).toEqual value: "new", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "keyword.operator.new.coffee"] 92 | expect(tokens[2]).toEqual value: "classTest", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "entity.name.type.instance.coffee"] 93 | 94 | it "tokenizes comments", -> 95 | {tokens} = grammar.tokenizeLine("# I am a comment") 96 | 97 | expect(tokens[0]).toEqual value: "#", scopes: ["source.coffee", "comment.line.number-sign.coffee", "punctuation.definition.comment.coffee"] 98 | expect(tokens[1]).toEqual value: " I am a comment", scopes: ["source.coffee", "comment.line.number-sign.coffee"] 99 | 100 | {tokens} = grammar.tokenizeLine("\#{Comment}") 101 | 102 | expect(tokens[0]).toEqual value: "#", scopes: ["source.coffee", "comment.line.number-sign.coffee", "punctuation.definition.comment.coffee"] 103 | expect(tokens[1]).toEqual value: "{Comment}", scopes: ["source.coffee", "comment.line.number-sign.coffee"] 104 | 105 | it "tokenizes block comments", -> 106 | lines = grammar.tokenizeLines """ 107 | ### I am a block comment 108 | Very blocky 109 | Until here 110 | ### 111 | """ 112 | expect(lines[0][0]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] 113 | expect(lines[0][1]).toEqual value: ' I am a block comment', scopes: ['source.coffee', 'comment.block.coffee'] 114 | expect(lines[2][0]).toEqual value: 'Until here', scopes: ['source.coffee', 'comment.block.coffee'] 115 | expect(lines[3][0]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] 116 | 117 | {tokens} = grammar.tokenizeLine "identity = ###::<T>### (value ###: T ###) ###: T ### ->" 118 | expect(tokens[0]).toEqual value: 'identity', scopes: ['source.coffee', 'variable.assignment.coffee'] 119 | expect(tokens[4]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] 120 | expect(tokens[5]).toEqual value: '::<T>', scopes: ['source.coffee', 'comment.block.coffee'] 121 | expect(tokens[6]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] 122 | expect(tokens[9]).toEqual value: 'value ', scopes: ['source.coffee'] # TODO: These scopes are incorrect and should be fixed 123 | expect(tokens[10]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] 124 | expect(tokens[11]).toEqual value: ': T ', scopes: ['source.coffee', 'comment.block.coffee'] 125 | expect(tokens[12]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] 126 | expect(tokens[14]).toEqual value: ' ', scopes: ['source.coffee'] # TODO: These scopes are incorrect and should be fixed 127 | expect(tokens[15]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] 128 | expect(tokens[16]).toEqual value: ': T ', scopes: ['source.coffee', 'comment.block.coffee'] 129 | expect(tokens[17]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] 130 | expect(tokens[19]).toEqual value: '->', scopes: ['source.coffee', 'meta.function.inline.coffee', 'storage.type.function.coffee'] 131 | 132 | it "tokenizes annotations in block comments", -> 133 | lines = grammar.tokenizeLines """ 134 | ### 135 | @foo - food 136 | @bar - bart 137 | """ 138 | 139 | expect(lines[1][0]).toEqual value: ' ', scopes: ["source.coffee", "comment.block.coffee"] 140 | expect(lines[1][1]).toEqual value: '@foo', scopes: ["source.coffee", "comment.block.coffee", "storage.type.annotation.coffee"] 141 | expect(lines[2][0]).toEqual value: '@bar', scopes: ["source.coffee", "comment.block.coffee", "storage.type.annotation.coffee"] 142 | 143 | describe "numbers", -> 144 | it "tokenizes hexadecimals", -> 145 | {tokens} = grammar.tokenizeLine('0x1D306') 146 | expect(tokens[0]).toEqual value: '0x1D306', scopes: ['source.coffee', 'constant.numeric.hex.coffee'] 147 | 148 | {tokens} = grammar.tokenizeLine('0X1D306') 149 | expect(tokens[0]).toEqual value: '0X1D306', scopes: ['source.coffee', 'constant.numeric.hex.coffee'] 150 | 151 | it "tokenizes binary literals", -> 152 | {tokens} = grammar.tokenizeLine('0b011101110111010001100110') 153 | expect(tokens[0]).toEqual value: '0b011101110111010001100110', scopes: ['source.coffee', 'constant.numeric.binary.coffee'] 154 | 155 | {tokens} = grammar.tokenizeLine('0B011101110111010001100110') 156 | expect(tokens[0]).toEqual value: '0B011101110111010001100110', scopes: ['source.coffee', 'constant.numeric.binary.coffee'] 157 | 158 | it "tokenizes octal literals", -> 159 | {tokens} = grammar.tokenizeLine('0o1411') 160 | expect(tokens[0]).toEqual value: '0o1411', scopes: ['source.coffee', 'constant.numeric.octal.coffee'] 161 | 162 | {tokens} = grammar.tokenizeLine('0O1411') 163 | expect(tokens[0]).toEqual value: '0O1411', scopes: ['source.coffee', 'constant.numeric.octal.coffee'] 164 | 165 | {tokens} = grammar.tokenizeLine('0010') 166 | expect(tokens[0]).toEqual value: '0010', scopes: ['source.coffee', 'constant.numeric.octal.coffee'] 167 | 168 | it "tokenizes decimals", -> 169 | {tokens} = grammar.tokenizeLine('1234') 170 | expect(tokens[0]).toEqual value: '1234', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] 171 | 172 | {tokens} = grammar.tokenizeLine('5e-10') 173 | expect(tokens[0]).toEqual value: '5e-10', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] 174 | 175 | {tokens} = grammar.tokenizeLine('5E+5') 176 | expect(tokens[0]).toEqual value: '5E+5', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] 177 | 178 | {tokens} = grammar.tokenizeLine('9.') 179 | expect(tokens[0]).toEqual value: '9', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] 180 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'constant.numeric.decimal.coffee', 'punctuation.separator.decimal.period.coffee'] 181 | 182 | {tokens} = grammar.tokenizeLine('.9') 183 | expect(tokens[0]).toEqual value: '.', scopes: ['source.coffee', 'constant.numeric.decimal.coffee', 'punctuation.separator.decimal.period.coffee'] 184 | expect(tokens[1]).toEqual value: '9', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] 185 | 186 | {tokens} = grammar.tokenizeLine('9.9') 187 | expect(tokens[0]).toEqual value: '9', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] 188 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'constant.numeric.decimal.coffee', 'punctuation.separator.decimal.period.coffee'] 189 | expect(tokens[2]).toEqual value: '9', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] 190 | 191 | {tokens} = grammar.tokenizeLine('.1e-23') 192 | expect(tokens[0]).toEqual value: '.', scopes: ['source.coffee', 'constant.numeric.decimal.coffee', 'punctuation.separator.decimal.period.coffee'] 193 | expect(tokens[1]).toEqual value: '1e-23', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] 194 | 195 | {tokens} = grammar.tokenizeLine('1.E3') 196 | expect(tokens[0]).toEqual value: '1', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] 197 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'constant.numeric.decimal.coffee', 'punctuation.separator.decimal.period.coffee'] 198 | expect(tokens[2]).toEqual value: 'E3', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] 199 | 200 | it "does not tokenize numbers that are part of a variable", -> 201 | {tokens} = grammar.tokenizeLine('hi$1') 202 | expect(tokens[0]).toEqual value: 'hi$1', scopes: ['source.coffee'] 203 | 204 | {tokens} = grammar.tokenizeLine('hi_1') 205 | expect(tokens[0]).toEqual value: 'hi_1', scopes: ['source.coffee'] 206 | 207 | it "tokenizes variable assignments", -> 208 | {tokens} = grammar.tokenizeLine("something = b") 209 | expect(tokens[0]).toEqual value: "something", scopes: ["source.coffee", "variable.assignment.coffee"] 210 | expect(tokens[2]).toEqual value: "=", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] 211 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 212 | 213 | {tokens} = grammar.tokenizeLine("something : b") 214 | expect(tokens[0]).toEqual value: "something", scopes: ["source.coffee", "variable.assignment.coffee"] 215 | expect(tokens[2]).toEqual value: ":", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] 216 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 217 | 218 | {tokens} = grammar.tokenizeLine("a and= b") 219 | expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] 220 | expect(tokens[2]).toEqual value: "and=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] 221 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 222 | 223 | # Should NOT be tokenized as and= 224 | {tokens} = grammar.tokenizeLine("operand=true") 225 | expect(tokens[0]).toEqual value: "operand", scopes: ["source.coffee", "variable.assignment.coffee"] 226 | expect(tokens[1]).toEqual value: "=", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] 227 | 228 | {tokens} = grammar.tokenizeLine("a or= b") 229 | expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] 230 | expect(tokens[2]).toEqual value: "or=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] 231 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 232 | 233 | # Should NOT be tokenized as or= 234 | {tokens} = grammar.tokenizeLine("editor=false") 235 | expect(tokens[0]).toEqual value: "editor", scopes: ["source.coffee", "variable.assignment.coffee"] 236 | expect(tokens[1]).toEqual value: "=", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] 237 | 238 | {tokens} = grammar.tokenizeLine("a -= b") 239 | expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] 240 | expect(tokens[2]).toEqual value: "-=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] 241 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 242 | 243 | {tokens} = grammar.tokenizeLine("a += b") 244 | expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] 245 | expect(tokens[2]).toEqual value: "+=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] 246 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 247 | 248 | {tokens} = grammar.tokenizeLine("a /= b") 249 | expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] 250 | expect(tokens[2]).toEqual value: "/=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] 251 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 252 | 253 | {tokens} = grammar.tokenizeLine("a &= b") 254 | expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] 255 | expect(tokens[2]).toEqual value: "&=", scopes: ["source.coffee", "keyword.operator.assignment.compound.bitwise.coffee"] 256 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 257 | 258 | {tokens} = grammar.tokenizeLine("a %= b") 259 | expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] 260 | expect(tokens[2]).toEqual value: "%=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] 261 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 262 | 263 | {tokens} = grammar.tokenizeLine("a *= b") 264 | expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] 265 | expect(tokens[2]).toEqual value: "*=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] 266 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 267 | 268 | {tokens} = grammar.tokenizeLine("a ?= b") 269 | expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] 270 | expect(tokens[2]).toEqual value: "?=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] 271 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 272 | 273 | {tokens} = grammar.tokenizeLine("a == b") 274 | expect(tokens[0]).toEqual value: "a ", scopes: ["source.coffee"] 275 | expect(tokens[1]).toEqual value: "==", scopes: ["source.coffee", "keyword.operator.comparison.coffee"] 276 | expect(tokens[2]).toEqual value: " b", scopes: ["source.coffee"] 277 | 278 | {tokens} = grammar.tokenizeLine("false == b") 279 | expect(tokens[0]).toEqual value: "false", scopes: ["source.coffee", "constant.language.boolean.false.coffee"] 280 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee"] 281 | expect(tokens[2]).toEqual value: "==", scopes: ["source.coffee", "keyword.operator.comparison.coffee"] 282 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 283 | 284 | {tokens} = grammar.tokenizeLine("true == b") 285 | expect(tokens[0]).toEqual value: "true", scopes: ["source.coffee", "constant.language.boolean.true.coffee"] 286 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee"] 287 | expect(tokens[2]).toEqual value: "==", scopes: ["source.coffee", "keyword.operator.comparison.coffee"] 288 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 289 | 290 | {tokens} = grammar.tokenizeLine("null == b") 291 | expect(tokens[0]).toEqual value: "null", scopes: ["source.coffee", "constant.language.null.coffee"] 292 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee"] 293 | expect(tokens[2]).toEqual value: "==", scopes: ["source.coffee", "keyword.operator.comparison.coffee"] 294 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 295 | 296 | {tokens} = grammar.tokenizeLine("this == b") 297 | expect(tokens[0]).toEqual value: "this", scopes: ["source.coffee", "variable.language.this.coffee"] 298 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee"] 299 | expect(tokens[2]).toEqual value: "==", scopes: ["source.coffee", "keyword.operator.comparison.coffee"] 300 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 301 | 302 | it "tokenizes compound operators properly", -> 303 | assignmentOperators = ["and=", "or=", "&&=", "||=", "/=", "*=", "%=", "+=", "-="] 304 | bitwiseOperators = ["<<=", ">>=", ">>>=", "&=", "|=", "^="] 305 | comparisonOperators = ["==", "!=", "<=", ">="] 306 | 307 | for assignmentOperator in assignmentOperators 308 | {tokens} = grammar.tokenizeLine(assignmentOperator) 309 | expect(tokens[0]).toEqual value: assignmentOperator, scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] 310 | 311 | for bitwiseOperator in bitwiseOperators 312 | {tokens} = grammar.tokenizeLine(bitwiseOperator) 313 | expect(tokens[0]).toEqual value: bitwiseOperator, scopes: ["source.coffee", "keyword.operator.assignment.compound.bitwise.coffee"] 314 | 315 | for comparisonOperator in comparisonOperators 316 | {tokens} = grammar.tokenizeLine(comparisonOperator) 317 | expect(tokens[0]).toEqual value: comparisonOperator, scopes: ["source.coffee", "keyword.operator.comparison.coffee"] 318 | 319 | it "tokenizes operators properly", -> 320 | logicalOperators = ["!", "&&", "||", "and", "or", "not"] 321 | bitwiseOperators = ["^", "~", "&", "|"] 322 | comparisonOperators = ["<", ">", "is", "isnt"] 323 | decrementOperators = ["--"] 324 | incrementOperators = ["++"] 325 | splatOperators = ["..."] 326 | existentialOperators = ["?"] 327 | operators = ["%", "*", "/", "-", "+"] 328 | keywords = ["delete", "instanceof", "new", "typeof"] 329 | 330 | for logicalOperator in logicalOperators 331 | {tokens} = grammar.tokenizeLine(logicalOperator) 332 | expect(tokens[0]).toEqual value: logicalOperator, scopes: ["source.coffee", "keyword.operator.logical.coffee"] 333 | 334 | for bitwiseOperator in bitwiseOperators 335 | {tokens} = grammar.tokenizeLine(bitwiseOperator) 336 | expect(tokens[0]).toEqual value: bitwiseOperator, scopes: ["source.coffee", "keyword.operator.bitwise.coffee"] 337 | 338 | for comparisonOperator in comparisonOperators 339 | {tokens} = grammar.tokenizeLine(comparisonOperator) 340 | expect(tokens[0]).toEqual value: comparisonOperator, scopes: ["source.coffee", "keyword.operator.comparison.coffee"] 341 | 342 | for decrementOperator in decrementOperators 343 | {tokens} = grammar.tokenizeLine(decrementOperator) 344 | expect(tokens[0]).toEqual value: decrementOperator, scopes: ["source.coffee", "keyword.operator.decrement.coffee"] 345 | 346 | for incrementOperator in incrementOperators 347 | {tokens} = grammar.tokenizeLine(incrementOperator) 348 | expect(tokens[0]).toEqual value: incrementOperator, scopes: ["source.coffee", "keyword.operator.increment.coffee"] 349 | 350 | for splatOperator in splatOperators 351 | {tokens} = grammar.tokenizeLine(splatOperator) 352 | expect(tokens[0]).toEqual value: splatOperator, scopes: ["source.coffee", "keyword.operator.splat.coffee"] 353 | 354 | for existentialOperator in existentialOperators 355 | {tokens} = grammar.tokenizeLine(existentialOperator) 356 | expect(tokens[0]).toEqual value: existentialOperator, scopes: ["source.coffee", "keyword.operator.existential.coffee"] 357 | 358 | for operator in operators 359 | {tokens} = grammar.tokenizeLine(operator) 360 | expect(tokens[0]).toEqual value: operator, scopes: ["source.coffee", "keyword.operator.coffee"] 361 | 362 | for keyword in keywords 363 | {tokens} = grammar.tokenizeLine(keyword) 364 | expect(tokens[0]).toEqual value: keyword, scopes: ["source.coffee", "keyword.operator.#{keyword}.coffee"] 365 | 366 | it "does not tokenize non-operators as operators", -> 367 | notOperators = ["(/=", "-->", "=>", "->"] 368 | 369 | for notOperator in notOperators 370 | {tokens} = grammar.tokenizeLine(notOperator) 371 | expect(tokens[0]).not.toEqual value: notOperator, scopes: ["source.coffee", "keyword.operator.coffee"] 372 | 373 | describe "properties", -> 374 | it "tokenizes properties", -> 375 | {tokens} = grammar.tokenizeLine('obj.property') 376 | expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] 377 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] 378 | expect(tokens[2]).toEqual value: 'property', scopes: ['source.coffee', 'variable.other.property.coffee'] 379 | 380 | {tokens} = grammar.tokenizeLine('obj.property instanceof Object') 381 | expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] 382 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] 383 | expect(tokens[2]).toEqual value: 'property', scopes: ['source.coffee', 'variable.other.property.coffee'] 384 | 385 | {tokens} = grammar.tokenizeLine('obj.property.property') 386 | expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] 387 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] 388 | expect(tokens[2]).toEqual value: 'property', scopes: ['source.coffee', 'variable.other.object.property.coffee'] 389 | 390 | {tokens} = grammar.tokenizeLine('obj.Property') 391 | expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] 392 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] 393 | expect(tokens[2]).toEqual value: 'Property', scopes: ['source.coffee', 'variable.other.property.coffee'] 394 | 395 | {tokens} = grammar.tokenizeLine('obj.prop1?.prop2?') 396 | expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] 397 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] 398 | expect(tokens[2]).toEqual value: 'prop1', scopes: ['source.coffee', 'variable.other.object.property.coffee'] 399 | expect(tokens[3]).toEqual value: '?', scopes: ['source.coffee', 'keyword.operator.existential.coffee'] 400 | expect(tokens[4]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] 401 | expect(tokens[5]).toEqual value: 'prop2', scopes: ['source.coffee', 'variable.other.property.coffee'] 402 | expect(tokens[6]).toEqual value: '?', scopes: ['source.coffee', 'keyword.operator.existential.coffee'] 403 | 404 | {tokens} = grammar.tokenizeLine('obj.$abc$') 405 | expect(tokens[2]).toEqual value: '$abc$', scopes: ['source.coffee', 'variable.other.property.coffee'] 406 | 407 | {tokens} = grammar.tokenizeLine('obj.$$') 408 | expect(tokens[2]).toEqual value: '$$', scopes: ['source.coffee', 'variable.other.property.coffee'] 409 | 410 | {tokens} = grammar.tokenizeLine('a().b') 411 | expect(tokens[2]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] 412 | expect(tokens[3]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] 413 | expect(tokens[4]).toEqual value: 'b', scopes: ['source.coffee', 'variable.other.property.coffee'] 414 | 415 | {tokens} = grammar.tokenizeLine('a.123illegal') 416 | expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee'] 417 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] 418 | expect(tokens[2]).toEqual value: '123illegal', scopes: ['source.coffee', 'invalid.illegal.identifier.coffee'] 419 | 420 | it "tokenizes constant properties", -> 421 | {tokens} = grammar.tokenizeLine('obj.MY_CONSTANT') 422 | expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] 423 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] 424 | expect(tokens[2]).toEqual value: 'MY_CONSTANT', scopes: ['source.coffee', 'constant.other.property.coffee'] 425 | 426 | {tokens} = grammar.tokenizeLine('obj.MY_CONSTANT.prop') 427 | expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] 428 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] 429 | expect(tokens[2]).toEqual value: 'MY_CONSTANT', scopes: ['source.coffee', 'constant.other.object.property.coffee'] 430 | 431 | {tokens} = grammar.tokenizeLine('a.C') 432 | expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] 433 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] 434 | expect(tokens[2]).toEqual value: 'C', scopes: ['source.coffee', 'constant.other.property.coffee'] 435 | 436 | it "tokenizes objects, methods, and properties using :: prototype syntax", -> 437 | {tokens} = grammar.tokenizeLine("Foo::") 438 | expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] 439 | expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] 440 | 441 | {tokens} = grammar.tokenizeLine("Foo::true") 442 | expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] 443 | expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] 444 | expect(tokens[2]).toEqual value: "true", scopes: ["source.coffee", "variable.other.property.coffee"] 445 | 446 | {tokens} = grammar.tokenizeLine("Foo::on") 447 | expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] 448 | expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] 449 | expect(tokens[2]).toEqual value: "on", scopes: ["source.coffee", "variable.other.property.coffee"] 450 | 451 | {tokens} = grammar.tokenizeLine("Foo::yes") 452 | expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] 453 | expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] 454 | expect(tokens[2]).toEqual value: "yes", scopes: ["source.coffee", "variable.other.property.coffee"] 455 | 456 | {tokens} = grammar.tokenizeLine("Foo::false") 457 | expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] 458 | expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] 459 | expect(tokens[2]).toEqual value: "false", scopes: ["source.coffee", "variable.other.property.coffee"] 460 | 461 | {tokens} = grammar.tokenizeLine("Foo::off") 462 | expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] 463 | expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] 464 | expect(tokens[2]).toEqual value: "off", scopes: ["source.coffee", "variable.other.property.coffee"] 465 | 466 | {tokens} = grammar.tokenizeLine("Foo::no") 467 | expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] 468 | expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] 469 | expect(tokens[2]).toEqual value: "no", scopes: ["source.coffee", "variable.other.property.coffee"] 470 | 471 | {tokens} = grammar.tokenizeLine("Foo::null") 472 | expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] 473 | expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] 474 | expect(tokens[2]).toEqual value: "null", scopes: ["source.coffee", "variable.other.property.coffee"] 475 | 476 | {tokens} = grammar.tokenizeLine("Foo::extends") 477 | expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] 478 | expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] 479 | expect(tokens[2]).toEqual value: "extends", scopes: ["source.coffee", "variable.other.property.coffee"] 480 | 481 | {tokens} = grammar.tokenizeLine("Foo :: something :: else") 482 | expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] 483 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee"] 484 | expect(tokens[2]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] 485 | expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee"] 486 | expect(tokens[4]).toEqual value: "something", scopes: ["source.coffee", "variable.other.object.property.coffee"] 487 | expect(tokens[5]).toEqual value: " ", scopes: ["source.coffee"] 488 | expect(tokens[6]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] 489 | expect(tokens[7]).toEqual value: " ", scopes: ["source.coffee"] 490 | expect(tokens[8]).toEqual value: "else", scopes: ["source.coffee", "variable.other.property.coffee"] 491 | 492 | {tokens} = grammar.tokenizeLine("Foo::toString()") 493 | expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] 494 | expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "meta.method-call.coffee", "keyword.operator.prototype.coffee"] 495 | expect(tokens[2]).toEqual value: "toString", scopes: ["source.coffee", "meta.method-call.coffee", "support.function.coffee"] 496 | 497 | describe "variables", -> 498 | it "tokenizes 'this'", -> 499 | {tokens} = grammar.tokenizeLine('this') 500 | expect(tokens[0]).toEqual value: 'this', scopes: ['source.coffee', 'variable.language.this.coffee'] 501 | 502 | {tokens} = grammar.tokenizeLine('this.obj.prototype = new El()') 503 | expect(tokens[0]).toEqual value: 'this', scopes: ['source.coffee', 'variable.language.this.coffee'] 504 | 505 | {tokens} = grammar.tokenizeLine('$this') 506 | expect(tokens[0]).toEqual value: '$this', scopes: ['source.coffee'] 507 | 508 | {tokens} = grammar.tokenizeLine('this$') 509 | expect(tokens[0]).toEqual value: 'this$', scopes: ['source.coffee'] 510 | 511 | it "tokenizes 'super'", -> 512 | {tokens} = grammar.tokenizeLine('super') 513 | expect(tokens[0]).toEqual value: 'super', scopes: ['source.coffee', 'variable.language.super.coffee'] 514 | 515 | it "tokenizes 'arguments'", -> 516 | {tokens} = grammar.tokenizeLine('arguments') 517 | expect(tokens[0]).toEqual value: 'arguments', scopes: ['source.coffee', 'variable.language.arguments.coffee'] 518 | 519 | {tokens} = grammar.tokenizeLine('arguments[0]') 520 | expect(tokens[0]).toEqual value: 'arguments', scopes: ['source.coffee', 'variable.language.arguments.coffee'] 521 | 522 | {tokens} = grammar.tokenizeLine('arguments.length') 523 | expect(tokens[0]).toEqual value: 'arguments', scopes: ['source.coffee', 'variable.language.arguments.coffee'] 524 | 525 | it "tokenizes illegal identifiers", -> 526 | {tokens} = grammar.tokenizeLine('0illegal') 527 | expect(tokens[0]).toEqual value: '0illegal', scopes: ['source.coffee', 'invalid.illegal.identifier.coffee'] 528 | 529 | {tokens} = grammar.tokenizeLine('123illegal') 530 | expect(tokens[0]).toEqual value: '123illegal', scopes: ['source.coffee', 'invalid.illegal.identifier.coffee'] 531 | 532 | {tokens} = grammar.tokenizeLine('123$illegal') 533 | expect(tokens[0]).toEqual value: '123$illegal', scopes: ['source.coffee', 'invalid.illegal.identifier.coffee'] 534 | 535 | describe "objects", -> 536 | it "tokenizes them", -> 537 | {tokens} = grammar.tokenizeLine('obj.prop') 538 | expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] 539 | 540 | {tokens} = grammar.tokenizeLine('$abc$.prop') 541 | expect(tokens[0]).toEqual value: '$abc$', scopes: ['source.coffee', 'variable.other.object.coffee'] 542 | 543 | {tokens} = grammar.tokenizeLine('$$.prop') 544 | expect(tokens[0]).toEqual value: '$$', scopes: ['source.coffee', 'variable.other.object.coffee'] 545 | 546 | {tokens} = grammar.tokenizeLine('obj?.prop') 547 | expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] 548 | expect(tokens[1]).toEqual value: '?', scopes: ['source.coffee', 'keyword.operator.existential.coffee'] 549 | 550 | it "tokenizes illegal objects", -> 551 | {tokens} = grammar.tokenizeLine('1.prop') 552 | expect(tokens[0]).toEqual value: '1', scopes: ['source.coffee', 'invalid.illegal.identifier.coffee'] 553 | 554 | {tokens} = grammar.tokenizeLine('123.prop') 555 | expect(tokens[0]).toEqual value: '123', scopes: ['source.coffee', 'invalid.illegal.identifier.coffee'] 556 | 557 | {tokens} = grammar.tokenizeLine('123a.prop') 558 | expect(tokens[0]).toEqual value: '123a', scopes: ['source.coffee', 'invalid.illegal.identifier.coffee'] 559 | 560 | describe "arrays", -> 561 | it "tokenizes basic arrays", -> 562 | {tokens} = grammar.tokenizeLine('[a, "b", 3]') 563 | expect(tokens[0]).toEqual value: '[', scopes: ['source.coffee', 'punctuation.definition.array.begin.bracket.square.coffee'] 564 | expect(tokens[1]).toEqual value: 'a', scopes: ['source.coffee'] 565 | expect(tokens[2]).toEqual value: ',', scopes: ['source.coffee', 'punctuation.separator.delimiter.coffee'] 566 | expect(tokens[3]).toEqual value: ' ', scopes: ['source.coffee'] 567 | expect(tokens[9]).toEqual value: '3', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] 568 | expect(tokens[10]).toEqual value: ']', scopes: ['source.coffee', 'punctuation.definition.array.end.bracket.square.coffee'] 569 | 570 | it "tokenizes inclusive and exclusive slices", -> 571 | {tokens} = grammar.tokenizeLine('[a..3]') 572 | expect(tokens[0]).toEqual value: '[', scopes: ['source.coffee', 'punctuation.definition.array.begin.bracket.square.coffee'] 573 | expect(tokens[1]).toEqual value: 'a', scopes: ['source.coffee'] 574 | expect(tokens[2]).toEqual value: '..', scopes: ['source.coffee', 'keyword.operator.slice.inclusive.coffee'] 575 | expect(tokens[3]).toEqual value: '3', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] 576 | expect(tokens[4]).toEqual value: ']', scopes: ['source.coffee', 'punctuation.definition.array.end.bracket.square.coffee'] 577 | 578 | {tokens} = grammar.tokenizeLine('[3...b]') 579 | expect(tokens[0]).toEqual value: '[', scopes: ['source.coffee', 'punctuation.definition.array.begin.bracket.square.coffee'] 580 | expect(tokens[1]).toEqual value: '3', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] 581 | expect(tokens[2]).toEqual value: '...', scopes: ['source.coffee', 'keyword.operator.slice.exclusive.coffee'] 582 | expect(tokens[3]).toEqual value: 'b', scopes: ['source.coffee'] 583 | expect(tokens[4]).toEqual value: ']', scopes: ['source.coffee', 'punctuation.definition.array.end.bracket.square.coffee'] 584 | 585 | it "verifies that regular expressions have explicit count modifiers", -> 586 | source = fs.readFileSync(path.resolve(__dirname, '..', 'grammars', 'coffeescript.cson'), 'utf8') 587 | expect(source.search /{,/).toEqual -1 588 | 589 | source = fs.readFileSync(path.resolve(__dirname, '..', 'grammars', 'coffeescript (literate).cson'), 'utf8') 590 | expect(source.search /{,/).toEqual -1 591 | 592 | it "tokenizes embedded JavaScript", -> 593 | waitsForPromise -> 594 | atom.packages.activatePackage("language-javascript") 595 | 596 | runs -> 597 | {tokens} = grammar.tokenizeLine("`;`") 598 | expect(tokens[0]).toEqual value: "`", scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.begin.coffee"] 599 | expect(tokens[1]).toEqual value: ";", scopes: ["source.coffee", "string.quoted.script.coffee", "source.js.embedded.coffee", "punctuation.terminator.statement.js"] 600 | expect(tokens[2]).toEqual value: "`", scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.end.coffee"] 601 | 602 | lines = grammar.tokenizeLines """ 603 | `var a = 1;` 604 | a = 2 605 | """ 606 | expect(lines[0][0]).toEqual value: '`', scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.begin.coffee"] 607 | expect(lines[0][1]).toEqual value: 'var', scopes: ["source.coffee", "string.quoted.script.coffee", "source.js.embedded.coffee", "storage.type.var.js"] 608 | expect(lines[0][6]).toEqual value: ';', scopes: ["source.coffee", "string.quoted.script.coffee", "source.js.embedded.coffee", "punctuation.terminator.statement.js"] 609 | expect(lines[0][7]).toEqual value: '`', scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.end.coffee"] 610 | expect(lines[1][0]).toEqual value: 'a', scopes: ["source.coffee", "variable.assignment.coffee"] 611 | 612 | {tokens} = grammar.tokenizeLine("`// comment` a = 2") 613 | expect(tokens[0]).toEqual value: '`', scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.begin.coffee"] 614 | expect(tokens[1]).toEqual value: '//', scopes: ["source.coffee", "string.quoted.script.coffee", "source.js.embedded.coffee", "comment.line.double-slash.js", "punctuation.definition.comment.js"] 615 | expect(tokens[2]).toEqual value: ' comment', scopes: ["source.coffee", "string.quoted.script.coffee", "source.js.embedded.coffee", "comment.line.double-slash.js"] 616 | expect(tokens[3]).toEqual value: '`', scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.end.coffee"] 617 | expect(tokens[5]).toEqual value: 'a', scopes: ["source.coffee", "variable.assignment.coffee"] 618 | 619 | describe "function calls", -> 620 | it "tokenizes function calls", -> 621 | {tokens} = grammar.tokenizeLine('functionCall()') 622 | expect(tokens[0]).toEqual value: 'functionCall', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] 623 | expect(tokens[1]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] 624 | expect(tokens[2]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] 625 | 626 | {tokens} = grammar.tokenizeLine('functionCall(arg1, "test", {a: 123})') 627 | expect(tokens[0]).toEqual value: 'functionCall', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] 628 | expect(tokens[1]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] 629 | expect(tokens[2]).toEqual value: 'arg1', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee'] 630 | expect(tokens[3]).toEqual value: ',', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.separator.delimiter.coffee'] 631 | expect(tokens[5]).toEqual value: '"', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.begin.coffee'] 632 | expect(tokens[6]).toEqual value: 'test', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'string.quoted.double.coffee'] 633 | expect(tokens[7]).toEqual value: '"', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.end.coffee'] 634 | expect(tokens[8]).toEqual value: ',', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.separator.delimiter.coffee'] 635 | expect(tokens[10]).toEqual value: '{', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.brace.curly.coffee'] 636 | expect(tokens[11]).toEqual value: 'a', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'variable.assignment.coffee'] 637 | expect(tokens[12]).toEqual value: ':', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'keyword.operator.assignment.coffee'] 638 | expect(tokens[14]).toEqual value: '123', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'constant.numeric.decimal.coffee'] 639 | expect(tokens[15]).toEqual value: '}', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.brace.curly.coffee'] 640 | expect(tokens[16]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] 641 | 642 | {tokens} = grammar.tokenizeLine('functionCall((123).toString())') 643 | expect(tokens[1]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] 644 | expect(tokens[2]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.brace.round.coffee'] 645 | expect(tokens[3]).toEqual value: '123', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'constant.numeric.decimal.coffee'] 646 | expect(tokens[4]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.brace.round.coffee'] 647 | expect(tokens[9]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] 648 | 649 | {tokens} = grammar.tokenizeLine('$abc$()') 650 | expect(tokens[0]).toEqual value: '$abc$', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] 651 | 652 | {tokens} = grammar.tokenizeLine('$$()') 653 | expect(tokens[0]).toEqual value: '$$', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] 654 | 655 | {tokens} = grammar.tokenizeLine('ABC()') 656 | expect(tokens[0]).toEqual value: 'ABC', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] 657 | 658 | {tokens} = grammar.tokenizeLine('$ABC$()') 659 | expect(tokens[0]).toEqual value: '$ABC$', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] 660 | 661 | {tokens} = grammar.tokenizeLine('@$()') 662 | expect(tokens[0]).toEqual value: '@', scopes: ['source.coffee', 'meta.function-call.coffee', 'variable.other.readwrite.instance.coffee'] 663 | expect(tokens[1]).toEqual value: '$', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] 664 | 665 | {tokens} = grammar.tokenizeLine('functionCall arg1, "test", {a: 123}') 666 | expect(tokens[0]).toEqual value: 'functionCall', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] 667 | expect(tokens[2]).toEqual value: 'arg1', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee'] 668 | expect(tokens[3]).toEqual value: ',', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.separator.delimiter.coffee'] 669 | expect(tokens[5]).toEqual value: '"', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.begin.coffee'] 670 | expect(tokens[6]).toEqual value: 'test', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'string.quoted.double.coffee'] 671 | expect(tokens[7]).toEqual value: '"', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.end.coffee'] 672 | expect(tokens[8]).toEqual value: ',', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.separator.delimiter.coffee'] 673 | expect(tokens[10]).toEqual value: '{', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.brace.curly.coffee'] 674 | expect(tokens[11]).toEqual value: 'a', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'variable.assignment.coffee'] 675 | expect(tokens[12]).toEqual value: ':', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'keyword.operator.assignment.coffee'] 676 | expect(tokens[14]).toEqual value: '123', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'constant.numeric.decimal.coffee'] 677 | expect(tokens[15]).toEqual value: '}', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.brace.curly.coffee'] 678 | 679 | {tokens} = grammar.tokenizeLine("foo bar") 680 | expect(tokens[0]).toEqual value: "foo", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] 681 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.function-call.coffee"] 682 | expect(tokens[2]).toEqual value: "bar", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] 683 | 684 | {tokens} = grammar.tokenizeLine("foo not food") 685 | expect(tokens[0]).toEqual value: "foo", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] 686 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.function-call.coffee"] 687 | expect(tokens[2]).toEqual value: "not", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "keyword.operator.logical.coffee"] 688 | expect(tokens[3]).toEqual value: " food", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] 689 | 690 | {tokens} = grammar.tokenizeLine("eat food for food in foods") 691 | expect(tokens[0]).toEqual value: "eat", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] 692 | expect(tokens[2]).toEqual value: "food", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] 693 | expect(tokens[4]).toEqual value: "for", scopes: ["source.coffee", "keyword.control.coffee"] 694 | expect(tokens[5]).toEqual value: " food ", scopes: ["source.coffee"] 695 | expect(tokens[6]).toEqual value: "in", scopes: ["source.coffee", "keyword.control.coffee"] 696 | expect(tokens[7]).toEqual value: " foods", scopes: ["source.coffee"] 697 | 698 | {tokens} = grammar.tokenizeLine("(eat food for food in get foods)") 699 | expect(tokens[0]).toEqual value: "(", scopes: ["source.coffee", "meta.brace.round.coffee"] 700 | expect(tokens[1]).toEqual value: "eat", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] 701 | expect(tokens[3]).toEqual value: "food", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] 702 | expect(tokens[5]).toEqual value: "for", scopes: ["source.coffee", "keyword.control.coffee"] 703 | expect(tokens[6]).toEqual value: " food ", scopes: ["source.coffee"] 704 | expect(tokens[7]).toEqual value: "in", scopes: ["source.coffee", "keyword.control.coffee"] 705 | expect(tokens[9]).toEqual value: "get", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] 706 | expect(tokens[11]).toEqual value: "foods", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] 707 | expect(tokens[12]).toEqual value: ")", scopes: ["source.coffee", "meta.brace.round.coffee"] 708 | 709 | {tokens} = grammar.tokenizeLine("[eat food]") 710 | expect(tokens[0]).toEqual value: "[", scopes: ["source.coffee", "punctuation.definition.array.begin.bracket.square.coffee"] 711 | expect(tokens[1]).toEqual value: "eat", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] 712 | expect(tokens[3]).toEqual value: "food", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] 713 | expect(tokens[4]).toEqual value: "]", scopes: ["source.coffee", "punctuation.definition.array.end.bracket.square.coffee"] 714 | 715 | {tokens} = grammar.tokenizeLine("foo @bar") 716 | expect(tokens[0]).toEqual value: "foo", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] 717 | expect(tokens[2]).toEqual value: "@bar", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "variable.other.readwrite.instance.coffee"] 718 | 719 | {tokens} = grammar.tokenizeLine("@foo bar") 720 | expect(tokens[0]).toEqual value: "@", scopes: ["source.coffee", "meta.function-call.coffee", "variable.other.readwrite.instance.coffee"] 721 | expect(tokens[1]).toEqual value: "foo", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] 722 | expect(tokens[3]).toEqual value: "bar", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] 723 | 724 | {tokens} = grammar.tokenizeLine("foo baz, @bar") 725 | expect(tokens[0]).toEqual value: "foo", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] 726 | expect(tokens[2]).toEqual value: "baz", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] 727 | expect(tokens[3]).toEqual value: ",", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "punctuation.separator.delimiter.coffee"] 728 | expect(tokens[5]).toEqual value: "@bar", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "variable.other.readwrite.instance.coffee"] 729 | 730 | {tokens} = grammar.tokenizeLine("$ @$") 731 | expect(tokens[0]).toEqual value: "$", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] 732 | expect(tokens[2]).toEqual value: "@$", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "variable.other.readwrite.instance.coffee"] 733 | 734 | it "tokenizes function calls when they are arguments", -> 735 | {tokens} = grammar.tokenizeLine('a(b(c))') 736 | expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] 737 | expect(tokens[1]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] 738 | expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] 739 | expect(tokens[3]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] 740 | expect(tokens[4]).toEqual value: 'c', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee'] 741 | expect(tokens[5]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] 742 | expect(tokens[6]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] 743 | 744 | {tokens} = grammar.tokenizeLine('a b c') 745 | expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] 746 | expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] 747 | expect(tokens[4]).toEqual value: 'c', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee'] 748 | 749 | it "tokenizes illegal function calls", -> 750 | {tokens} = grammar.tokenizeLine('0illegal()') 751 | expect(tokens[0]).toEqual value: '0illegal', scopes: ['source.coffee', 'meta.function-call.coffee', 'invalid.illegal.identifier.coffee'] 752 | expect(tokens[1]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] 753 | expect(tokens[2]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] 754 | 755 | it "tokenizes illegal arguments", -> 756 | {tokens} = grammar.tokenizeLine('a(1a)') 757 | expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] 758 | expect(tokens[1]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] 759 | expect(tokens[2]).toEqual value: '1a', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'invalid.illegal.identifier.coffee'] 760 | expect(tokens[3]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] 761 | 762 | {tokens} = grammar.tokenizeLine('a(123a)') 763 | expect(tokens[2]).toEqual value: '123a', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'invalid.illegal.identifier.coffee'] 764 | 765 | {tokens} = grammar.tokenizeLine('a(1.prop)') 766 | expect(tokens[2]).toEqual value: '1', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'invalid.illegal.identifier.coffee'] 767 | expect(tokens[3]).toEqual value: '.', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.separator.property.period.coffee'] 768 | expect(tokens[4]).toEqual value: 'prop', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'variable.other.property.coffee'] 769 | 770 | {tokens} = grammar.tokenizeLine('a 1a') 771 | expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] 772 | expect(tokens[2]).toEqual value: '1a', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'invalid.illegal.identifier.coffee'] 773 | 774 | it "tokenizes function declaration as an argument", -> 775 | {tokens} = grammar.tokenizeLine('a((p) -> return p )') 776 | expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] 777 | expect(tokens[1]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] 778 | expect(tokens[2]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function.inline.coffee', 'meta.parameters.coffee', 'punctuation.definition.parameters.begin.bracket.round.coffee'] 779 | expect(tokens[3]).toEqual value: 'p', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function.inline.coffee', 'meta.parameters.coffee', 'variable.parameter.function.coffee'] 780 | expect(tokens[4]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function.inline.coffee', 'meta.parameters.coffee', 'punctuation.definition.parameters.end.bracket.round.coffee'] 781 | expect(tokens[8]).toEqual value: 'return', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'keyword.control.coffee'] 782 | expect(tokens[9]).toEqual value: ' p ', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee'] 783 | expect(tokens[10]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] 784 | 785 | it "does not tokenize booleans as function calls", -> 786 | {tokens} = grammar.tokenizeLine("false unless true") 787 | expect(tokens[0]).toEqual value: "false", scopes: ["source.coffee", "constant.language.boolean.false.coffee"] 788 | expect(tokens[2]).toEqual value: "unless", scopes: ["source.coffee", "keyword.control.coffee"] 789 | expect(tokens[4]).toEqual value: "true", scopes: ["source.coffee", "constant.language.boolean.true.coffee"] 790 | 791 | {tokens} = grammar.tokenizeLine("true if false") 792 | expect(tokens[0]).toEqual value: "true", scopes: ["source.coffee", "constant.language.boolean.true.coffee"] 793 | expect(tokens[2]).toEqual value: "if", scopes: ["source.coffee", "keyword.control.coffee"] 794 | expect(tokens[4]).toEqual value: "false", scopes: ["source.coffee", "constant.language.boolean.false.coffee"] 795 | 796 | it "does not tokenize comparison operators as function calls", -> 797 | {tokens} = grammar.tokenizeLine("if a is b") 798 | expect(tokens[1]).toEqual value: " a ", scopes: ["source.coffee"] 799 | expect(tokens[2]).toEqual value: "is", scopes: ["source.coffee", "keyword.operator.comparison.coffee"] 800 | expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] 801 | 802 | describe "functions", -> 803 | it "tokenizes regular functions", -> 804 | {tokens} = grammar.tokenizeLine("foo = -> 1") 805 | expect(tokens[0]).toEqual value: "foo", scopes: ["source.coffee", "meta.function.coffee", "entity.name.function.coffee"] 806 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] 807 | expect(tokens[2]).toEqual value: "=", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] 808 | expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] 809 | expect(tokens[4]).toEqual value: "->", scopes: ["source.coffee", "meta.function.coffee", "storage.type.function.coffee"] 810 | expect(tokens[5]).toEqual value: " ", scopes: ["source.coffee"] 811 | expect(tokens[6]).toEqual value: "1", scopes: ["source.coffee", "constant.numeric.decimal.coffee"] 812 | 813 | {tokens} = grammar.tokenizeLine("@foo = -> 1") 814 | expect(tokens[0]).toEqual value: "@", scopes: ["source.coffee", "meta.function.coffee", "entity.name.function.coffee", "variable.other.readwrite.instance.coffee"] 815 | expect(tokens[1]).toEqual value: "foo", scopes: ["source.coffee", "meta.function.coffee", "entity.name.function.coffee"] 816 | expect(tokens[3]).toEqual value: "=", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] 817 | 818 | {tokens} = grammar.tokenizeLine("$ = => 1") 819 | expect(tokens[0]).toEqual value: "$", scopes: ["source.coffee", "meta.function.coffee", "entity.name.function.coffee"] 820 | expect(tokens[2]).toEqual value: "=", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] 821 | 822 | {tokens} = grammar.tokenizeLine("foo: -> 1") 823 | expect(tokens[0]).toEqual value: "foo", scopes: ["source.coffee", "meta.function.coffee", "entity.name.function.coffee"] 824 | expect(tokens[1]).toEqual value: ":", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] 825 | expect(tokens[2]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] 826 | expect(tokens[3]).toEqual value: "->", scopes: ["source.coffee", "meta.function.coffee", "storage.type.function.coffee"] 827 | expect(tokens[4]).toEqual value: " ", scopes: ["source.coffee"] 828 | expect(tokens[5]).toEqual value: "1", scopes: ["source.coffee", "constant.numeric.decimal.coffee"] 829 | 830 | {tokens} = grammar.tokenizeLine("'quoted': (a) => true") 831 | expect(tokens[0]).toEqual value: "'", scopes: ["source.coffee", "meta.function.coffee", "string.quoted.single.coffee", "punctuation.definition.string.begin.coffee"] 832 | expect(tokens[1]).toEqual value: "quoted", scopes: ["source.coffee", "meta.function.coffee", "string.quoted.single.coffee", "entity.name.function.coffee"] 833 | expect(tokens[2]).toEqual value: "'", scopes: ["source.coffee", "meta.function.coffee", "string.quoted.single.coffee", "punctuation.definition.string.end.coffee"] 834 | expect(tokens[3]).toEqual value: ":", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] 835 | expect(tokens[4]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] 836 | expect(tokens[5]).toEqual value: "(", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.begin.bracket.round.coffee"] 837 | expect(tokens[6]).toEqual value: "a", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] 838 | expect(tokens[7]).toEqual value: ")", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.end.bracket.round.coffee"] 839 | expect(tokens[8]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] 840 | expect(tokens[9]).toEqual value: "=>", scopes: ["source.coffee", "meta.function.coffee", "storage.type.function.coffee"] 841 | expect(tokens[10]).toEqual value: " ", scopes: ["source.coffee"] 842 | expect(tokens[11]).toEqual value: "true", scopes: ["source.coffee", "constant.language.boolean.true.coffee"] 843 | 844 | {tokens} = grammar.tokenizeLine('"quoted": (a) -> true') 845 | expect(tokens[0]).toEqual value: '"', scopes: ["source.coffee", "meta.function.coffee", "string.quoted.double.coffee", "punctuation.definition.string.begin.coffee"] 846 | expect(tokens[1]).toEqual value: "quoted", scopes: ["source.coffee", "meta.function.coffee", "string.quoted.double.coffee", "entity.name.function.coffee"] 847 | expect(tokens[2]).toEqual value: '"', scopes: ["source.coffee", "meta.function.coffee", "string.quoted.double.coffee", "punctuation.definition.string.end.coffee"] 848 | expect(tokens[3]).toEqual value: ":", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] 849 | expect(tokens[4]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] 850 | expect(tokens[5]).toEqual value: "(", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.begin.bracket.round.coffee"] 851 | expect(tokens[6]).toEqual value: "a", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] 852 | expect(tokens[7]).toEqual value: ")", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.end.bracket.round.coffee"] 853 | expect(tokens[8]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] 854 | expect(tokens[9]).toEqual value: "->", scopes: ["source.coffee", "meta.function.coffee", "storage.type.function.coffee"] 855 | expect(tokens[10]).toEqual value: " ", scopes: ["source.coffee"] 856 | expect(tokens[11]).toEqual value: "true", scopes: ["source.coffee", "constant.language.boolean.true.coffee"] 857 | 858 | {tokens} = grammar.tokenizeLine("hello: (a) -> 1") 859 | expect(tokens[0]).toEqual value: "hello", scopes: ["source.coffee", "meta.function.coffee", "entity.name.function.coffee"] 860 | expect(tokens[1]).toEqual value: ":", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] 861 | expect(tokens[2]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] 862 | expect(tokens[3]).toEqual value: "(", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.begin.bracket.round.coffee"] 863 | expect(tokens[4]).toEqual value: "a", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] 864 | expect(tokens[5]).toEqual value: ")", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.end.bracket.round.coffee"] 865 | expect(tokens[6]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] 866 | expect(tokens[7]).toEqual value: "->", scopes: ["source.coffee", "meta.function.coffee", "storage.type.function.coffee"] 867 | expect(tokens[9]).toEqual value: "1", scopes: ["source.coffee", "constant.numeric.decimal.coffee"] 868 | 869 | {tokens} = grammar.tokenizeLine("hello: (a, b, {c, d}, e = 'test', f = 3, g = -> 4) -> 1") 870 | expect(tokens[0]).toEqual value: "hello", scopes: ["source.coffee", "meta.function.coffee", "entity.name.function.coffee"] 871 | expect(tokens[1]).toEqual value: ":", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] 872 | expect(tokens[3]).toEqual value: "(", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.begin.bracket.round.coffee"] 873 | expect(tokens[4]).toEqual value: "a", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] 874 | expect(tokens[5]).toEqual value: ",", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.separator.delimiter.coffee"] 875 | expect(tokens[6]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee"] 876 | expect(tokens[7]).toEqual value: "b", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] 877 | expect(tokens[8]).toEqual value: ",", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.separator.delimiter.coffee"] 878 | expect(tokens[10]).toEqual value: "{", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "meta.brace.curly.coffee"] 879 | expect(tokens[11]).toEqual value: "c", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee"] 880 | expect(tokens[12]).toEqual value: ",", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.separator.delimiter.coffee"] 881 | expect(tokens[13]).toEqual value: " d", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee"] 882 | expect(tokens[14]).toEqual value: "}", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "meta.brace.curly.coffee"] 883 | expect(tokens[17]).toEqual value: "e", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] 884 | expect(tokens[19]).toEqual value: "=", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "keyword.operator.assignment.coffee"] 885 | expect(tokens[21]).toEqual value: "'", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "string.quoted.single.coffee", "punctuation.definition.string.begin.coffee"] 886 | expect(tokens[24]).toEqual value: ",", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.separator.delimiter.coffee"] 887 | expect(tokens[26]).toEqual value: "f", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] 888 | expect(tokens[30]).toEqual value: "3", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "constant.numeric.decimal.coffee"] 889 | expect(tokens[33]).toEqual value: "g", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] 890 | expect(tokens[35]).toEqual value: "=", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "keyword.operator.assignment.coffee"] 891 | expect(tokens[37]).toEqual value: "->", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "meta.function.inline.coffee", "storage.type.function.coffee"] 892 | expect(tokens[40]).toEqual value: ")", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.end.bracket.round.coffee"] 893 | expect(tokens[42]).toEqual value: "->", scopes: ["source.coffee", "meta.function.coffee", "storage.type.function.coffee"] 894 | 895 | it "tokenizes inline functions", -> 896 | {tokens} = grammar.tokenizeLine("-> true") 897 | expect(tokens[0]).toEqual value: "->", scopes: ["source.coffee", "meta.function.inline.coffee", "storage.type.function.coffee"] 898 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee"] 899 | 900 | {tokens} = grammar.tokenizeLine(" -> true") 901 | expect(tokens[0]).toEqual value: " ", scopes: ["source.coffee"] 902 | expect(tokens[1]).toEqual value: "->", scopes: ["source.coffee", "meta.function.inline.coffee", "storage.type.function.coffee"] 903 | expect(tokens[2]).toEqual value: " ", scopes: ["source.coffee"] 904 | 905 | {tokens} = grammar.tokenizeLine("->true") 906 | expect(tokens[0]).toEqual value: "->", scopes: ["source.coffee", "meta.function.inline.coffee", "storage.type.function.coffee"] 907 | 908 | {tokens} = grammar.tokenizeLine("(arg) -> true") 909 | expect(tokens[0]).toEqual value: "(", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.begin.bracket.round.coffee"] 910 | expect(tokens[1]).toEqual value: "arg", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] 911 | expect(tokens[2]).toEqual value: ")", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.end.bracket.round.coffee"] 912 | expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.function.inline.coffee"] 913 | expect(tokens[4]).toEqual value: "->", scopes: ["source.coffee", "meta.function.inline.coffee", "storage.type.function.coffee"] 914 | expect(tokens[5]).toEqual value: " ", scopes: ["source.coffee"] 915 | 916 | {tokens} = grammar.tokenizeLine("(arg1, arg2) -> true") 917 | expect(tokens[0]).toEqual value: "(", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.begin.bracket.round.coffee"] 918 | expect(tokens[1]).toEqual value: "arg1", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] 919 | expect(tokens[2]).toEqual value: ",", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.separator.delimiter.coffee"] 920 | expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee"] 921 | expect(tokens[4]).toEqual value: "arg2", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] 922 | expect(tokens[5]).toEqual value: ")", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.end.bracket.round.coffee"] 923 | expect(tokens[6]).toEqual value: " ", scopes: ["source.coffee", "meta.function.inline.coffee"] 924 | expect(tokens[7]).toEqual value: "->", scopes: ["source.coffee", "meta.function.inline.coffee", "storage.type.function.coffee"] 925 | expect(tokens[8]).toEqual value: " ", scopes: ["source.coffee"] 926 | 927 | {tokens} = grammar.tokenizeLine("( arg1, arg2 )-> true") 928 | expect(tokens[0]).toEqual value: "(", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.begin.bracket.round.coffee"] 929 | expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee"] 930 | expect(tokens[2]).toEqual value: "arg1", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] 931 | expect(tokens[3]).toEqual value: ",", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.separator.delimiter.coffee"] 932 | expect(tokens[4]).toEqual value: " ", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee"] 933 | expect(tokens[5]).toEqual value: "arg2", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] 934 | expect(tokens[6]).toEqual value: " ", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee"] 935 | expect(tokens[7]).toEqual value: ")", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.end.bracket.round.coffee"] 936 | expect(tokens[8]).toEqual value: "->", scopes: ["source.coffee", "meta.function.inline.coffee", "storage.type.function.coffee"] 937 | expect(tokens[9]).toEqual value: " ", scopes: ["source.coffee"] 938 | 939 | describe "method calls", -> 940 | it "tokenizes method calls", -> 941 | {tokens} = grammar.tokenizeLine('a.b(1+1)') 942 | expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] 943 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] 944 | expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] 945 | expect(tokens[3]).toEqual value: '(', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] 946 | expect(tokens[4]).toEqual value: '1', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'constant.numeric.decimal.coffee'] 947 | expect(tokens[5]).toEqual value: '+', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'keyword.operator.coffee'] 948 | expect(tokens[6]).toEqual value: '1', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'constant.numeric.decimal.coffee'] 949 | expect(tokens[7]).toEqual value: ')', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] 950 | 951 | {tokens} = grammar.tokenizeLine('a . b(1+1)') 952 | expect(tokens[2]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] 953 | expect(tokens[4]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] 954 | expect(tokens[5]).toEqual value: '(', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] 955 | 956 | {tokens} = grammar.tokenizeLine('a.$abc$()') 957 | expect(tokens[2]).toEqual value: '$abc$', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] 958 | 959 | {tokens} = grammar.tokenizeLine('a.$$()') 960 | expect(tokens[2]).toEqual value: '$$', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] 961 | 962 | {tokens} = grammar.tokenizeLine('a.b c') 963 | expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] 964 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] 965 | expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] 966 | expect(tokens[3]).toEqual value: ' ', scopes: ['source.coffee', 'meta.method-call.coffee'] 967 | expect(tokens[4]).toEqual value: 'c', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee'] 968 | 969 | {tokens} = grammar.tokenizeLine('(a.b c)') 970 | expect(tokens[0]).toEqual value: '(', scopes: ['source.coffee', 'meta.brace.round.coffee'] 971 | expect(tokens[1]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] 972 | expect(tokens[2]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] 973 | expect(tokens[3]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] 974 | expect(tokens[4]).toEqual value: ' ', scopes: ['source.coffee', 'meta.method-call.coffee'] 975 | expect(tokens[5]).toEqual value: 'c', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee'] 976 | expect(tokens[6]).toEqual value: ')', scopes: ['source.coffee', 'meta.brace.round.coffee'] 977 | 978 | {tokens} = grammar.tokenizeLine("[a.b c]") 979 | expect(tokens[0]).toEqual value: "[", scopes: ["source.coffee", "punctuation.definition.array.begin.bracket.square.coffee"] 980 | expect(tokens[1]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] 981 | expect(tokens[2]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] 982 | expect(tokens[3]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] 983 | expect(tokens[4]).toEqual value: ' ', scopes: ['source.coffee', 'meta.method-call.coffee'] 984 | expect(tokens[5]).toEqual value: 'c', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee'] 985 | expect(tokens[6]).toEqual value: "]", scopes: ["source.coffee", "punctuation.definition.array.end.bracket.square.coffee"] 986 | 987 | {tokens} = grammar.tokenizeLine('a.b not c') 988 | expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] 989 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] 990 | expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] 991 | expect(tokens[3]).toEqual value: ' ', scopes: ['source.coffee', 'meta.method-call.coffee'] 992 | expect(tokens[4]).toEqual value: 'not', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'keyword.operator.logical.coffee'] 993 | expect(tokens[5]).toEqual value: ' c', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee'] 994 | 995 | {tokens} = grammar.tokenizeLine('a.b 1+1') 996 | expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] 997 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] 998 | expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] 999 | expect(tokens[3]).toEqual value: ' ', scopes: ['source.coffee', 'meta.method-call.coffee'] 1000 | expect(tokens[4]).toEqual value: '1', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'constant.numeric.decimal.coffee'] 1001 | expect(tokens[5]).toEqual value: '+', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'keyword.operator.coffee'] 1002 | expect(tokens[6]).toEqual value: '1', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'constant.numeric.decimal.coffee'] 1003 | 1004 | {tokens} = grammar.tokenizeLine('a.b @') 1005 | expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] 1006 | expect(tokens[4]).toEqual value: '@', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'variable.other.readwrite.instance.coffee'] 1007 | 1008 | {tokens} = grammar.tokenizeLine('a.$abc$ "q"') 1009 | expect(tokens[2]).toEqual value: '$abc$', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] 1010 | 1011 | {tokens} = grammar.tokenizeLine('a.$$ 4') 1012 | expect(tokens[2]).toEqual value: '$$', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] 1013 | 1014 | {tokens} = grammar.tokenizeLine('a.b @$') 1015 | expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] 1016 | expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] 1017 | expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] 1018 | expect(tokens[3]).toEqual value: ' ', scopes: ['source.coffee', 'meta.method-call.coffee'] 1019 | expect(tokens[4]).toEqual value: '@$', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'variable.other.readwrite.instance.coffee'] 1020 | 1021 | describe "destructuring assignments", -> 1022 | it "tokenizes object and array destructuring", -> 1023 | {tokens} = grammar.tokenizeLine("{something} = hi") 1024 | expect(tokens[0]).toEqual value: "{", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.begin.bracket.curly.coffee"] 1025 | expect(tokens[1]).toEqual value: "something", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "variable.assignment.coffee"] 1026 | expect(tokens[2]).toEqual value: "}", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.end.bracket.curly.coffee"] 1027 | expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee"] 1028 | expect(tokens[4]).toEqual value: "=", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] 1029 | expect(tokens[5]).toEqual value: " hi", scopes: ["source.coffee"] 1030 | 1031 | {tokens} = grammar.tokenizeLine("[x, y] = browserWindow.getPosition()") 1032 | expect(tokens[0]).toEqual value: "[", scopes: ["source.coffee", "meta.variable.assignment.destructured.array.coffee", "punctuation.definition.destructuring.begin.bracket.square.coffee"] 1033 | expect(tokens[1]).toEqual value: "x", scopes: ["source.coffee", "meta.variable.assignment.destructured.array.coffee", "variable.assignment.coffee"] 1034 | expect(tokens[2]).toEqual value: ",", scopes: ["source.coffee", "meta.variable.assignment.destructured.array.coffee", "punctuation.separator.delimiter.coffee"] 1035 | expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.variable.assignment.destructured.array.coffee"] 1036 | expect(tokens[4]).toEqual value: "y", scopes: ["source.coffee", "meta.variable.assignment.destructured.array.coffee", "variable.assignment.coffee"] 1037 | expect(tokens[5]).toEqual value: "]", scopes: ["source.coffee", "meta.variable.assignment.destructured.array.coffee", "punctuation.definition.destructuring.end.bracket.square.coffee"] 1038 | expect(tokens[6]).toEqual value: " ", scopes: ["source.coffee"] 1039 | expect(tokens[7]).toEqual value: "=", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] 1040 | expect(tokens[8]).toEqual value: " ", scopes: ["source.coffee"] 1041 | 1042 | {tokens} = grammar.tokenizeLine("{'} ='}") # Make sure this *isn't* tokenized as a destructuring assignment 1043 | expect(tokens[0]).not.toEqual value: "{", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.begin.bracket.curly.coffee"] 1044 | expect(tokens[0]).toEqual value: "{", scopes: ["source.coffee", "meta.brace.curly.coffee"] 1045 | 1046 | it "tokenizes nested destructuring assignments", -> 1047 | {tokens} = grammar.tokenizeLine("{poet: {name, address: [street, city]}} = futurists") 1048 | expect(tokens[0]).toEqual value: "{", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.begin.bracket.curly.coffee"] 1049 | expect(tokens[4]).toEqual value: "{", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.begin.bracket.curly.coffee"] 1050 | expect(tokens[11]).toEqual value: "[", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.array.coffee", "punctuation.definition.destructuring.begin.bracket.square.coffee"] 1051 | expect(tokens[16]).toEqual value: "]", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.array.coffee", "punctuation.definition.destructuring.end.bracket.square.coffee"] 1052 | expect(tokens[17]).toEqual value: "}", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.end.bracket.curly.coffee"] 1053 | expect(tokens[18]).toEqual value: "}", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.end.bracket.curly.coffee"] 1054 | expect(tokens[19]).toEqual value: " ", scopes: ["source.coffee"] 1055 | expect(tokens[20]).toEqual value: "=", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] 1056 | 1057 | it "tokenizes multiple nested destructuring assignments", -> 1058 | {tokens} = grammar.tokenizeLine("{start: {row: startRow}, end: {row: endRow}} = range") 1059 | expect(tokens[0]).toEqual value: "{", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.begin.bracket.curly.coffee"] 1060 | expect(tokens[4]).toEqual value: "{", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.begin.bracket.curly.coffee"] 1061 | expect(tokens[9]).toEqual value: "}", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.end.bracket.curly.coffee"] 1062 | expect(tokens[15]).toEqual value: "{", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.begin.bracket.curly.coffee"] 1063 | expect(tokens[20]).toEqual value: "}", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.end.bracket.curly.coffee"] 1064 | expect(tokens[21]).toEqual value: "}", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.end.bracket.curly.coffee"] 1065 | expect(tokens[22]).toEqual value: " ", scopes: ["source.coffee"] 1066 | expect(tokens[23]).toEqual value: "=", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] 1067 | 1068 | it "doesn't tokenize nested brackets as destructuring assignments", -> 1069 | {tokens} = grammar.tokenizeLine("[Point(0, 1), [Point(0, 0), Point(0, 1)]]") 1070 | expect(tokens[0]).not.toEqual value: "[", scopes: ["source.coffee", "meta.variable.assignment.destructured.array.coffee", "punctuation.definition.destructuring.begin.bracket.square.coffee"] 1071 | expect(tokens[0]).toEqual value: "[", scopes: ["source.coffee", "punctuation.definition.array.begin.bracket.square.coffee"] 1072 | 1073 | it "tokenizes inline constant followed by unless statement correctly", -> 1074 | {tokens} = grammar.tokenizeLine("return 0 unless true") 1075 | expect(tokens[0]).toEqual value: "return", scopes: ["source.coffee", "keyword.control.coffee"] 1076 | expect(tokens[2]).toEqual value: "0", scopes: ["source.coffee", "constant.numeric.decimal.coffee"] 1077 | expect(tokens[4]).toEqual value: "unless", scopes: ["source.coffee", "keyword.control.coffee"] 1078 | expect(tokens[6]).toEqual value: "true", scopes: ["source.coffee", "constant.language.boolean.true.coffee"] 1079 | 1080 | describe "for loops", -> 1081 | it "tokenizes for-in loops", -> 1082 | {tokens} = grammar.tokenizeLine("for food in foods") 1083 | expect(tokens[0]).toEqual value: "for", scopes: ["source.coffee", "keyword.control.coffee"] 1084 | expect(tokens[1]).toEqual value: " food ", scopes: ["source.coffee"] 1085 | expect(tokens[2]).toEqual value: "in", scopes: ["source.coffee", "keyword.control.coffee"] 1086 | expect(tokens[3]).toEqual value: " foods", scopes: ["source.coffee"] 1087 | 1088 | it "tokenizes for-of loops", -> 1089 | {tokens} = grammar.tokenizeLine("for food, type of foods") 1090 | expect(tokens[0]).toEqual value: "for", scopes: ["source.coffee", "keyword.control.coffee"] 1091 | expect(tokens[1]).toEqual value: " food", scopes: ["source.coffee"] 1092 | expect(tokens[2]).toEqual value: ",", scopes: ["source.coffee", "punctuation.separator.delimiter.coffee"] 1093 | expect(tokens[3]).toEqual value: " type ", scopes: ["source.coffee"] 1094 | expect(tokens[4]).toEqual value: "of", scopes: ["source.coffee", "keyword.control.coffee"] 1095 | expect(tokens[5]).toEqual value: " foods", scopes: ["source.coffee"] 1096 | 1097 | it "tokenizes loops using arrays", -> 1098 | {tokens} = grammar.tokenizeLine("for food in ['broccoli', 'spinach', 'chocolate']") 1099 | expect(tokens[0]).toEqual value: "for", scopes: ["source.coffee", "keyword.control.coffee"] 1100 | expect(tokens[1]).toEqual value: " food ", scopes: ["source.coffee"] 1101 | expect(tokens[2]).toEqual value: "in", scopes: ["source.coffee", "keyword.control.coffee"] 1102 | expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee"] 1103 | expect(tokens[4]).toEqual value: "[", scopes: ["source.coffee", "punctuation.definition.array.begin.bracket.square.coffee"] 1104 | expect(tokens[18]).toEqual value: "]", scopes: ["source.coffee", "punctuation.definition.array.end.bracket.square.coffee"] 1105 | 1106 | it "tokenizes loops using the optional `when` keyword", -> 1107 | {tokens} = grammar.tokenizeLine("for food in foods when food isnt chocolate") 1108 | expect(tokens[0]).toEqual value: "for", scopes: ["source.coffee", "keyword.control.coffee"] 1109 | expect(tokens[3]).toEqual value: " foods ", scopes: ["source.coffee"] 1110 | expect(tokens[4]).toEqual value: "when", scopes: ["source.coffee", "keyword.control.coffee"] 1111 | expect(tokens[5]).toEqual value: " food ", scopes: ["source.coffee"] 1112 | expect(tokens[6]).toEqual value: "isnt", scopes: ["source.coffee", "keyword.operator.comparison.coffee"] 1113 | expect(tokens[7]).toEqual value: " chocolate", scopes: ["source.coffee"] 1114 | 1115 | it "tokenizes loops using the optional `by` keyword", -> 1116 | {tokens} = grammar.tokenizeLine("for food in foods by -1") 1117 | expect(tokens[0]).toEqual value: "for", scopes: ["source.coffee", "keyword.control.coffee"] 1118 | expect(tokens[3]).toEqual value: " foods ", scopes: ["source.coffee"] 1119 | expect(tokens[4]).toEqual value: "by", scopes: ["source.coffee", "keyword.control.coffee"] 1120 | expect(tokens[5]).toEqual value: " ", scopes: ["source.coffee"] 1121 | expect(tokens[6]).toEqual value: "-", scopes: ["source.coffee", "keyword.operator.coffee"] 1122 | expect(tokens[7]).toEqual value: "1", scopes: ["source.coffee", "constant.numeric.decimal.coffee"] 1123 | 1124 | describe "regular expressions", -> 1125 | beforeEach -> 1126 | waitsForPromise -> 1127 | atom.packages.activatePackage("language-javascript") # Provides the regexp subgrammar 1128 | 1129 | it "tokenizes regular expressions", -> 1130 | {tokens} = grammar.tokenizeLine("/test/") 1131 | expect(tokens[0]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] 1132 | expect(tokens[1]).toEqual value: "test", scopes: ["source.coffee", "string.regexp.coffee"] 1133 | expect(tokens[2]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] 1134 | 1135 | {tokens} = grammar.tokenizeLine("/{'}/") 1136 | expect(tokens[0]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] 1137 | expect(tokens[2]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] 1138 | 1139 | {tokens} = grammar.tokenizeLine("foo + /test/") 1140 | expect(tokens[0]).toEqual value: "foo ", scopes: ["source.coffee"] 1141 | expect(tokens[1]).toEqual value: "+", scopes: ["source.coffee", "keyword.operator.coffee"] 1142 | expect(tokens[2]).toEqual value: " ", scopes: ["source.coffee"] 1143 | expect(tokens[3]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] 1144 | expect(tokens[4]).toEqual value: "test", scopes: ["source.coffee", "string.regexp.coffee"] 1145 | expect(tokens[5]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] 1146 | 1147 | it "tokenizes regular expressions containing spaces", -> 1148 | {tokens} = grammar.tokenizeLine("/ te st /") 1149 | expect(tokens[0]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] 1150 | expect(tokens[1]).toEqual value: " te st ", scopes: ["source.coffee", "string.regexp.coffee"] 1151 | expect(tokens[2]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] 1152 | 1153 | it "tokenizes regular expressions containing escaped forward slashes", -> 1154 | {tokens} = grammar.tokenizeLine("/test\\//") 1155 | expect(tokens[0]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] 1156 | expect(tokens[1]).toEqual value: "test", scopes: ["source.coffee", "string.regexp.coffee"] 1157 | expect(tokens[2]).toEqual value: "\\/", scopes: ["source.coffee", "string.regexp.coffee", "constant.character.escape.backslash.regexp"] 1158 | expect(tokens[3]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] 1159 | 1160 | {tokens} = grammar.tokenizeLine("/one\\/two!\\/three/") 1161 | expect(tokens[0]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] 1162 | expect(tokens[1]).toEqual value: "one", scopes: ["source.coffee", "string.regexp.coffee"] 1163 | expect(tokens[2]).toEqual value: "\\/", scopes: ["source.coffee", "string.regexp.coffee", "constant.character.escape.backslash.regexp"] 1164 | expect(tokens[3]).toEqual value: "two!", scopes: ["source.coffee", "string.regexp.coffee"] 1165 | expect(tokens[4]).toEqual value: "\\/", scopes: ["source.coffee", "string.regexp.coffee", "constant.character.escape.backslash.regexp"] 1166 | expect(tokens[5]).toEqual value: "three", scopes: ["source.coffee", "string.regexp.coffee"] 1167 | expect(tokens[6]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] 1168 | 1169 | it "tokenizes regular expressions inside arrays", -> 1170 | {tokens} = grammar.tokenizeLine("[/test/]") 1171 | expect(tokens[0]).toEqual value: "[", scopes: ["source.coffee", "punctuation.definition.array.begin.bracket.square.coffee"] 1172 | expect(tokens[1]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] 1173 | expect(tokens[2]).toEqual value: "test", scopes: ["source.coffee", "string.regexp.coffee"] 1174 | expect(tokens[3]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] 1175 | expect(tokens[4]).toEqual value: "]", scopes: ["source.coffee", "punctuation.definition.array.end.bracket.square.coffee"] 1176 | 1177 | {tokens} = grammar.tokenizeLine("[1, /test/]") 1178 | expect(tokens[0]).toEqual value: "[", scopes: ["source.coffee", "punctuation.definition.array.begin.bracket.square.coffee"] 1179 | expect(tokens[1]).toEqual value: "1", scopes: ["source.coffee", "constant.numeric.decimal.coffee"] 1180 | expect(tokens[2]).toEqual value: ",", scopes: ["source.coffee", "punctuation.separator.delimiter.coffee"] 1181 | expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee"] 1182 | expect(tokens[4]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] 1183 | expect(tokens[5]).toEqual value: "test", scopes: ["source.coffee", "string.regexp.coffee"] 1184 | expect(tokens[6]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] 1185 | expect(tokens[7]).toEqual value: "]", scopes: ["source.coffee", "punctuation.definition.array.end.bracket.square.coffee"] 1186 | 1187 | it "does not tokenize multiple division as regex", -> 1188 | # https://github.com/atom/language-coffee-script/issues/112 1189 | {tokens} = grammar.tokenizeLine("a / b + c / d") 1190 | expect(tokens[1]).toEqual value: "/", scopes: ["source.coffee", "keyword.operator.coffee"] 1191 | expect(tokens[2]).toEqual value: " b ", scopes: ["source.coffee"] 1192 | expect(tokens[5]).toEqual value: "/", scopes: ["source.coffee", "keyword.operator.coffee"] 1193 | 1194 | {tokens} = grammar.tokenizeLine("a / 2 / (3)") 1195 | expect(tokens[1]).toEqual value: "/", scopes: ["source.coffee", "keyword.operator.coffee"] 1196 | expect(tokens[3]).toEqual value: "2", scopes: ["source.coffee", "constant.numeric.decimal.coffee"] 1197 | expect(tokens[5]).toEqual value: "/", scopes: ["source.coffee", "keyword.operator.coffee"] 1198 | 1199 | it "does not tokenize comments with URLs in them as regex", -> 1200 | # Disclaimer: This does not fix when comments contain only slashes, such as `a / something # comment /` 1201 | {tokens} = grammar.tokenizeLine("canvas.width/2 # https://github.com/atom/language-coffee-script/issues/112") 1202 | expect(tokens[3]).toEqual value: "/", scopes: ["source.coffee", "keyword.operator.coffee"] 1203 | expect(tokens[6]).toEqual value: "#", scopes: ["source.coffee", "comment.line.number-sign.coffee", "punctuation.definition.comment.coffee"] 1204 | expect(tokens[7]).toEqual value: " https://github.com/atom/language-coffee-script/issues/112", scopes: ["source.coffee", "comment.line.number-sign.coffee"] 1205 | 1206 | it "stops tokenizing regex at the first non-escaped forwards slash", -> 1207 | {tokens} = grammar.tokenizeLine("path.replace(/\\\\/g, '/')") 1208 | expect(tokens[4]).toEqual value: "/", scopes: ["source.coffee", "meta.method-call.coffee", "meta.arguments.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] 1209 | expect(tokens[6]).toEqual value: "/", scopes: ["source.coffee", "meta.method-call.coffee", "meta.arguments.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] 1210 | expect(tokens[11]).toEqual value: "/", scopes: ["source.coffee", "meta.method-call.coffee", "meta.arguments.coffee", "string.quoted.single.coffee"] 1211 | 1212 | {tokens} = grammar.tokenizeLine("path.replace(/\\\\\\//g, '/')") 1213 | expect(tokens[4]).toEqual value: "/", scopes: ["source.coffee", "meta.method-call.coffee", "meta.arguments.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] 1214 | expect(tokens[6]).toEqual value: "\\/", scopes: ["source.coffee", "meta.method-call.coffee", "meta.arguments.coffee", "string.regexp.coffee", "constant.character.escape.backslash.regexp"] 1215 | expect(tokens[7]).toEqual value: "/", scopes: ["source.coffee", "meta.method-call.coffee", "meta.arguments.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] 1216 | expect(tokens[12]).toEqual value: "/", scopes: ["source.coffee", "meta.method-call.coffee", "meta.arguments.coffee", "string.quoted.single.coffee"] 1217 | 1218 | it "tokenises multi-line regular expressions", -> 1219 | {tokens} = grammar.tokenizeLine('/// (XYZ) ///') 1220 | expect(tokens[0]).toEqual value: '///', scopes: ['source.coffee', 'string.regexp.multiline.coffee', 'punctuation.definition.string.begin.coffee'] 1221 | expect(tokens[2]).toEqual value: '(', scopes: ['source.coffee', 'string.regexp.multiline.coffee', 'meta.group.regexp', 'punctuation.definition.group.regexp'] 1222 | expect(tokens[3]).toEqual value: 'XYZ', scopes: ['source.coffee', 'string.regexp.multiline.coffee', 'meta.group.regexp'] 1223 | expect(tokens[4]).toEqual value: ')', scopes: ['source.coffee', 'string.regexp.multiline.coffee', 'meta.group.regexp', 'punctuation.definition.group.regexp'] 1224 | expect(tokens[6]).toEqual value: '///', scopes: ['source.coffee', 'string.regexp.multiline.coffee', 'punctuation.definition.string.end.coffee'] 1225 | 1226 | lines = grammar.tokenizeLines """ 1227 | /// 1228 | XYZ // 1229 | /~/ 1230 | /// 1231 | """ 1232 | expect(lines[0][0]).toEqual value: '///', scopes: ['source.coffee', 'string.regexp.multiline.coffee', 'punctuation.definition.string.begin.coffee'] 1233 | expect(lines[1][0]).toEqual value: 'XYZ //', scopes: ['source.coffee', 'string.regexp.multiline.coffee'] 1234 | expect(lines[2][0]).toEqual value: '/~/', scopes: ['source.coffee', 'string.regexp.multiline.coffee'] 1235 | expect(lines[3][0]).toEqual value: '///', scopes: ['source.coffee', 'string.regexp.multiline.coffee', 'punctuation.definition.string.end.coffee'] 1236 | 1237 | describe "here-docs", -> 1238 | it "tokenises single-quoted here-docs", -> 1239 | {tokens} = grammar.tokenizeLine "'''XYZ'''" 1240 | expect(tokens[0]).toEqual value: "'''", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee', 'punctuation.definition.string.begin.coffee'] 1241 | expect(tokens[1]).toEqual value: 'XYZ', scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee'] 1242 | expect(tokens[2]).toEqual value: "'''", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee', 'punctuation.definition.string.end.coffee'] 1243 | 1244 | lines = grammar.tokenizeLines """ 1245 | ''' 1246 | 'ABC' 1247 | XYZ '' 1248 | ''' 1249 | """ 1250 | expect(lines[0][0]).toEqual value: "'''", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee', 'punctuation.definition.string.begin.coffee'] 1251 | expect(lines[1][0]).toEqual value: "'ABC'", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee'] 1252 | expect(lines[2][0]).toEqual value: "XYZ ''", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee'] 1253 | expect(lines[3][0]).toEqual value: "'''", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee', 'punctuation.definition.string.end.coffee'] 1254 | 1255 | it "tokenises double-quoted here-docs", -> 1256 | {tokens} = grammar.tokenizeLine "'''XYZ'''" 1257 | expect(tokens[0]).toEqual value: "'''", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee', 'punctuation.definition.string.begin.coffee'] 1258 | expect(tokens[1]).toEqual value: 'XYZ', scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee'] 1259 | expect(tokens[2]).toEqual value: "'''", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee', 'punctuation.definition.string.end.coffee'] 1260 | 1261 | lines = grammar.tokenizeLines ''' 1262 | """ 1263 | "ABC" 1264 | XYZ "" 1265 | """ 1266 | ''' 1267 | expect(lines[0][0]).toEqual value: '"""', scopes: ['source.coffee', 'string.quoted.double.heredoc.coffee', 'punctuation.definition.string.begin.coffee'] 1268 | expect(lines[1][0]).toEqual value: '"ABC"', scopes: ['source.coffee', 'string.quoted.double.heredoc.coffee'] 1269 | expect(lines[2][0]).toEqual value: 'XYZ ""', scopes: ['source.coffee', 'string.quoted.double.heredoc.coffee'] 1270 | expect(lines[3][0]).toEqual value: '"""', scopes: ['source.coffee', 'string.quoted.double.heredoc.coffee', 'punctuation.definition.string.end.coffee'] 1271 | 1272 | describe "escape sequences in strings", -> 1273 | it "tokenises leading backslashes in double-quoted strings", -> 1274 | {tokens} = grammar.tokenizeLine('"a\\\\b\\\\\\\\c"') 1275 | expect(tokens[0]).toEqual value: '"', scopes: ['source.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.begin.coffee'] 1276 | expect(tokens[1]).toEqual value: 'a', scopes: ['source.coffee', 'string.quoted.double.coffee'] 1277 | expect(tokens[2]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] 1278 | expect(tokens[3]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee'] 1279 | expect(tokens[4]).toEqual value: 'b', scopes: ['source.coffee', 'string.quoted.double.coffee'] 1280 | expect(tokens[5]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] 1281 | expect(tokens[6]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee'] 1282 | expect(tokens[7]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] 1283 | expect(tokens[8]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee'] 1284 | expect(tokens[9]).toEqual value: 'c', scopes: ['source.coffee', 'string.quoted.double.coffee'] 1285 | expect(tokens[10]).toEqual value: '"', scopes: ['source.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.end.coffee'] 1286 | 1287 | {tokens} = grammar.tokenizeLine('"\\a\\t\\a\\b"') 1288 | expect(tokens[0]).toEqual value: '"', scopes: ['source.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.begin.coffee'] 1289 | expect(tokens[1]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] 1290 | expect(tokens[2]).toEqual value: 'a', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee'] 1291 | expect(tokens[3]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] 1292 | expect(tokens[4]).toEqual value: 't', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee'] 1293 | expect(tokens[5]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] 1294 | expect(tokens[6]).toEqual value: 'a', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee'] 1295 | expect(tokens[7]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] 1296 | expect(tokens[8]).toEqual value: 'b', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee'] 1297 | expect(tokens[9]).toEqual value: '"', scopes: ['source.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.end.coffee'] 1298 | 1299 | it "tokenises leading backslashes in single-quoted strings", -> 1300 | {tokens} = grammar.tokenizeLine("'a\\\\b\\\\\\\\c'") 1301 | expect(tokens[0]).toEqual value: "'", scopes: ['source.coffee', 'string.quoted.single.coffee', 'punctuation.definition.string.begin.coffee'] 1302 | expect(tokens[1]).toEqual value: 'a', scopes: ['source.coffee', 'string.quoted.single.coffee'] 1303 | expect(tokens[2]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] 1304 | expect(tokens[3]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee'] 1305 | expect(tokens[4]).toEqual value: 'b', scopes: ['source.coffee', 'string.quoted.single.coffee'] 1306 | expect(tokens[5]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] 1307 | expect(tokens[6]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee'] 1308 | expect(tokens[7]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] 1309 | expect(tokens[8]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee'] 1310 | expect(tokens[9]).toEqual value: 'c', scopes: ['source.coffee', 'string.quoted.single.coffee'] 1311 | expect(tokens[10]).toEqual value: "'", scopes: ['source.coffee', 'string.quoted.single.coffee', 'punctuation.definition.string.end.coffee'] 1312 | 1313 | {tokens} = grammar.tokenizeLine("'\\a\\t\\a\\b'") 1314 | expect(tokens[0]).toEqual value: "'", scopes: ['source.coffee', 'string.quoted.single.coffee', 'punctuation.definition.string.begin.coffee'] 1315 | expect(tokens[1]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] 1316 | expect(tokens[2]).toEqual value: 'a', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee'] 1317 | expect(tokens[3]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] 1318 | expect(tokens[4]).toEqual value: 't', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee'] 1319 | expect(tokens[5]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] 1320 | expect(tokens[6]).toEqual value: 'a', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee'] 1321 | expect(tokens[7]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] 1322 | expect(tokens[8]).toEqual value: 'b', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee'] 1323 | expect(tokens[9]).toEqual value: "'", scopes: ['source.coffee', 'string.quoted.single.coffee', 'punctuation.definition.string.end.coffee'] 1324 | 1325 | describe "jsx", -> 1326 | it "tokenises HTML tags", -> 1327 | {tokens} = grammar.tokenizeLine("<div></div>") 1328 | expect(tokens[0]).toEqual value: '<', scopes: ['source.coffee', 'meta.tag.coffee', 'punctuation.definition.tag.coffee'] 1329 | expect(tokens[1]).toEqual value: 'div', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.name.tag.coffee' ] 1330 | expect(tokens[2]).toEqual value: '>', scopes: ['source.coffee', 'meta.tag.coffee' ] 1331 | expect(tokens[3]).toEqual value: '</', scopes: ['source.coffee', 'meta.tag.coffee', 'punctuation.definition.tag.coffee' ] 1332 | expect(tokens[4]).toEqual value: 'div', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.name.tag.coffee' ] 1333 | expect(tokens[5]).toEqual value: '>', scopes: ['source.coffee', 'meta.tag.coffee' ] 1334 | 1335 | {tokens} = grammar.tokenizeLine("<div/>") 1336 | expect(tokens[0]).toEqual value: '<', scopes: ['source.coffee', 'meta.tag.coffee', 'punctuation.definition.tag.coffee'] 1337 | expect(tokens[1]).toEqual value: 'div', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.name.tag.coffee' ] 1338 | expect(tokens[2]).toEqual value: '/>', scopes: ['source.coffee', 'meta.tag.coffee' ] 1339 | 1340 | it "tokenises HTML tags with attributes", -> 1341 | {tokens} = grammar.tokenizeLine("<div class='myclass' id=\"myid\">") 1342 | expect(tokens[0]).toEqual value: '<', scopes: ['source.coffee', 'meta.tag.coffee', 'punctuation.definition.tag.coffee'] 1343 | expect(tokens[1]).toEqual value: 'div', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.name.tag.coffee' ] 1344 | expect(tokens[2]).toEqual value: ' ', scopes: ['source.coffee', 'meta.tag.coffee' ] 1345 | expect(tokens[3]).toEqual value: 'class', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.other.attribute-name.coffee' ] 1346 | expect(tokens[4]).toEqual value: '=', scopes: ['source.coffee', 'meta.tag.coffee', 'keyword.operator.assignment.coffee' ] 1347 | expect(tokens[5]).toEqual value: '\'', scopes: ['source.coffee', 'meta.tag.coffee', 'string.quoted.single.coffee', 'punctuation.definition.string.begin.coffee' ] 1348 | expect(tokens[6]).toEqual value: 'myclass', scopes: ['source.coffee', 'meta.tag.coffee', 'string.quoted.single.coffee' ] 1349 | expect(tokens[7]).toEqual value: '\'', scopes: ['source.coffee', 'meta.tag.coffee', 'string.quoted.single.coffee', 'punctuation.definition.string.end.coffee' ] 1350 | expect(tokens[8]).toEqual value: ' ', scopes: ['source.coffee', 'meta.tag.coffee' ] 1351 | expect(tokens[9]).toEqual value: 'id', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.other.attribute-name.coffee' ] 1352 | expect(tokens[10]).toEqual value: '=', scopes: ['source.coffee', 'meta.tag.coffee', 'keyword.operator.assignment.coffee' ] 1353 | expect(tokens[11]).toEqual value: '"', scopes: ['source.coffee', 'meta.tag.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.begin.coffee' ] 1354 | expect(tokens[12]).toEqual value: 'myid', scopes: ['source.coffee', 'meta.tag.coffee', 'string.quoted.double.coffee' ] 1355 | expect(tokens[13]).toEqual value: '"', scopes: ['source.coffee', 'meta.tag.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.end.coffee' ] 1356 | expect(tokens[14]).toEqual value: '>', scopes: ['source.coffee', 'meta.tag.coffee' ] 1357 | 1358 | it "tokenises HTML tags with attributes that have expressions", -> 1359 | {tokens} = grammar.tokenizeLine("<div on-click={(e)->@handleClick(e)}>") 1360 | expect(tokens[0]).toEqual value: '<', scopes: ['source.coffee', 'meta.tag.coffee', 'punctuation.definition.tag.coffee'] 1361 | expect(tokens[1]).toEqual value: 'div', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.name.tag.coffee' ] 1362 | expect(tokens[2]).toEqual value: ' ', scopes: ['source.coffee', 'meta.tag.coffee' ] 1363 | expect(tokens[3]).toEqual value: 'on-click', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.other.attribute-name.coffee' ] 1364 | expect(tokens[4]).toEqual value: '=', scopes: ['source.coffee', 'meta.tag.coffee', 'keyword.operator.assignment.coffee' ] 1365 | expect(tokens[5]).toEqual value: '{', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.brace.curly.coffee'] 1366 | expect(tokens[6]).toEqual value: '(', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function.inline.coffee', 'meta.parameters.coffee', 'punctuation.definition.parameters.begin.bracket.round.coffee' ] 1367 | expect(tokens[7]).toEqual value: 'e', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function.inline.coffee', 'meta.parameters.coffee', 'variable.parameter.function.coffee' ] 1368 | expect(tokens[8]).toEqual value: ')', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function.inline.coffee', 'meta.parameters.coffee', 'punctuation.definition.parameters.end.bracket.round.coffee' ] 1369 | expect(tokens[9]).toEqual value: '->', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function.inline.coffee', 'storage.type.function.coffee' ] 1370 | expect(tokens[10]).toEqual value: '@', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function-call.coffee', 'variable.other.readwrite.instance.coffee' ] 1371 | expect(tokens[11]).toEqual value: 'handleClick', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee' ] 1372 | expect(tokens[12]).toEqual value: '(', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee' ] 1373 | expect(tokens[13]).toEqual value: 'e', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee' ] 1374 | expect(tokens[14]).toEqual value: ')', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee' ] 1375 | expect(tokens[15]).toEqual value: '}', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.brace.curly.coffee'] 1376 | expect(tokens[16]).toEqual value: '>', scopes: ['source.coffee', 'meta.tag.coffee'] 1377 | 1378 | describe "firstLineMatch", -> 1379 | it "recognises interpreter directives", -> 1380 | valid = """ 1381 | #!/usr/sbin/coffee foo 1382 | #!/usr/bin/coffee foo=bar/ 1383 | #!/usr/sbin/coffee 1384 | #!/usr/sbin/coffee foo bar baz 1385 | #!/usr/bin/coffee perl 1386 | #!/usr/bin/coffee bin/perl 1387 | #!/usr/bin/coffee 1388 | #!/bin/coffee 1389 | #!/usr/bin/coffee --script=usr/bin 1390 | #! /usr/bin/env A=003 B=149 C=150 D=xzd E=base64 F=tar G=gz H=head I=tail coffee 1391 | #!\t/usr/bin/env --foo=bar coffee --quu=quux 1392 | #! /usr/bin/coffee 1393 | #!/usr/bin/env coffee 1394 | """ 1395 | for line in valid.split /\n/ 1396 | expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() 1397 | 1398 | invalid = """ 1399 | \x20#!/usr/sbin/coffee 1400 | \t#!/usr/sbin/coffee 1401 | #!/usr/bin/env-coffee/node-env/ 1402 | #!/usr/bin/env-coffee 1403 | #! /usr/bincoffee 1404 | #!\t/usr/bin/env --coffee=bar 1405 | """ 1406 | for line in invalid.split /\n/ 1407 | expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() 1408 | 1409 | it "recognises Emacs modelines", -> 1410 | valid = """ 1411 | #-*- coffee -*- 1412 | #-*- mode: Coffee -*- 1413 | /* -*-coffee-*- */ 1414 | // -*- Coffee -*- 1415 | /* -*- mode:Coffee -*- */ 1416 | // -*- font:bar;mode:Coffee -*- 1417 | // -*- font:bar;mode:Coffee;foo:bar; -*- 1418 | // -*-font:mode;mode:COFFEE-*- 1419 | // -*- foo:bar mode: coffee bar:baz -*- 1420 | " -*-foo:bar;mode:cOFFEE;bar:foo-*- "; 1421 | " -*-font-mode:foo;mode:coFFeE;foo-bar:quux-*-" 1422 | "-*-font:x;foo:bar; mode : Coffee; bar:foo;foooooo:baaaaar;fo:ba;-*-"; 1423 | "-*- font:x;foo : bar ; mode : Coffee ; bar : foo ; foooooo:baaaaar;fo:ba-*-"; 1424 | """ 1425 | for line in valid.split /\n/ 1426 | expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() 1427 | 1428 | invalid = """ 1429 | /* --*coffee-*- */ 1430 | /* -*-- coffee -*- 1431 | /* -*- -- Coffee -*- 1432 | /* -*- Coffee -;- -*- 1433 | // -*- freeCoffee -*- 1434 | // -*- Coffee; -*- 1435 | // -*- coffee-sugar -*- 1436 | /* -*- model:coffee -*- 1437 | /* -*- indent-mode:coffee -*- 1438 | // -*- font:mode;Coffee -*- 1439 | // -*- mode: -*- Coffee 1440 | // -*- mode: jfc-give-me-coffee -*- 1441 | // -*-font:mode;mode:coffee--*- 1442 | """ 1443 | for line in invalid.split /\n/ 1444 | expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() 1445 | 1446 | it "recognises Vim modelines", -> 1447 | valid = """ 1448 | vim: se filetype=coffee: 1449 | # vim: se ft=coffee: 1450 | # vim: set ft=COFFEE: 1451 | # vim: set filetype=CoffEE: 1452 | # vim: ft=CoffEE 1453 | # vim: syntax=CoffEE 1454 | # vim: se syntax=CoffEE: 1455 | # ex: syntax=CoffEE 1456 | # vim:ft=coffee 1457 | # vim600: ft=coffee 1458 | # vim>600: set ft=coffee: 1459 | # vi:noai:sw=3 ts=6 ft=coffee 1460 | # vi::::::::::noai:::::::::::: ft=COFFEE 1461 | # vim:ts=4:sts=4:sw=4:noexpandtab:ft=cOfFeE 1462 | # vi:: noai : : : : sw =3 ts =6 ft =coFFEE 1463 | # vim: ts=4: pi sts=4: ft=cofFeE: noexpandtab: sw=4: 1464 | # vim: ts=4 sts=4: ft=coffee noexpandtab: 1465 | # vim:noexpandtab sts=4 ft=coffEE ts=4 1466 | # vim:noexpandtab:ft=cOFFEe 1467 | # vim:ts=4:sts=4 ft=cofFeE:noexpandtab:\x20 1468 | # vim:noexpandtab titlestring=hi\|there\\\\ ft=cOFFEe ts=4 1469 | """ 1470 | for line in valid.split /\n/ 1471 | expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() 1472 | 1473 | invalid = """ 1474 | ex: se filetype=coffee: 1475 | _vi: se filetype=coffee: 1476 | vi: se filetype=coffee 1477 | # vim set ft=coffee 1478 | # vim: soft=coffee 1479 | # vim: clean-syntax=coffee: 1480 | # vim set ft=coffee: 1481 | # vim: setft=coffee: 1482 | # vim: se ft=coffee backupdir=tmp 1483 | # vim: set ft=coffee set cmdheight=1 1484 | # vim:noexpandtab sts:4 ft:coffee ts:4 1485 | # vim:noexpandtab titlestring=hi\\|there\\ ft=coffee ts=4 1486 | # vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=coffee ts=4 1487 | """ 1488 | for line in invalid.split /\n/ 1489 | expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() 1490 | --------------------------------------------------------------------------------