├── .travis.yml ├── LICENSE ├── README.md ├── grammars ├── eex.cson ├── elixir.cson ├── html (eex).cson └── xml (eex).cson ├── package.json ├── settings └── language-elixir.cson ├── snippets └── language-elixir.cson └── spec └── elixir-spec.coffee /.travis.yml: -------------------------------------------------------------------------------- 1 | language: objective-c 2 | 3 | notifications: 4 | email: 5 | on_success: never 6 | on_failure: change 7 | 8 | script: 'curl -s https://raw.githubusercontent.com/atom/ci/master/build-package.sh | sh' 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2012 Plataformatec. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # language-elixir package 2 | 3 | [![Build Status](https://travis-ci.org/elixir-lang/language-elixir.svg?branch=master)](https://travis-ci.org/elixir-lang/language-elixir) 4 | 5 | Elixir language support for Atom. 6 | 7 | Adds syntax highlighting and snippets to Elixir files in Atom. 8 | 9 | Copyright (c) 2014 [Plataformatec](http://plataformatec.com.br). 10 | -------------------------------------------------------------------------------- /grammars/eex.cson: -------------------------------------------------------------------------------- 1 | 'fileTypes': [ 2 | 'eex' 3 | ] 4 | 'name': 'EEx' 5 | 'patterns': [ 6 | { 7 | 'begin': '<%+#' 8 | 'captures': 9 | '0': 10 | 'name': 'punctuation.definition.comment.eex' 11 | 'end': '%>' 12 | 'name': 'comment.block.eex' 13 | } 14 | { 15 | 'begin': '<%+(?!>)[-=]*' 16 | 'captures': 17 | '0': 18 | 'name': 'punctuation.section.embedded.elixir' 19 | 'end': '-?%>' 20 | 'name': 'source.elixir.embedded' 21 | 'patterns': [ 22 | { 23 | 'captures': 24 | '1': 25 | 'name': 'punctuation.definition.comment.elixir' 26 | 'match': '(#).*?(?=-?%>)' 27 | 'name': 'comment.line.number-sign.elixir' 28 | } 29 | { 30 | 'include': 'source.elixir' 31 | } 32 | ] 33 | } 34 | ] 35 | 'scopeName': 'text.elixir' 36 | -------------------------------------------------------------------------------- /grammars/elixir.cson: -------------------------------------------------------------------------------- 1 | 'comment': 'Atom Syntax Parser for Elixir Programming Language.' 2 | 'fileTypes': [ 3 | 'ex' 4 | 'exs' 5 | ] 6 | 'firstLineMatch': '^#!/.*\\belixir' 7 | 'foldingStartMarker': '(after|else|catch|rescue|\\-\\>|\\{|\\[|do)\\s*$' 8 | 'foldingStopMarker': '^\\s*((\\}|\\]|after|else|catch|rescue)\\s*$|end\\b)' 9 | 'name': 'Elixir' 10 | 'patterns': [ 11 | { 12 | 'captures': 13 | '1': 14 | 'name': 'keyword.control.module.elixir' 15 | '2': 16 | 'name': 'entity.name.type.module.elixir' 17 | 'match': '^\\s*(defmodule)\\s+(([A-Z]\\w*\\s*(\\.)\\s*)*[A-Z]\\w*)' 18 | 'name': 'meta.module.elixir' 19 | } 20 | 21 | # Docs - Heredoc Interpolated 22 | { 23 | 'begin': '@(module|type)?doc (~s)?"""' 24 | 'comment': '@doc with interpolated heredocs' 25 | 'end': '\\s*"""' 26 | 'name': 'comment.documentation.heredoc.elixir' 27 | 'patterns': [ 28 | { 29 | 'include': '#interpolated_elixir' 30 | } 31 | { 32 | 'include': '#escaped_char' 33 | } 34 | ] 35 | } 36 | { 37 | 'begin': "@(module|type)?doc ~s'''" 38 | 'comment': '@doc with interpolated single quoted heredocs' 39 | 'end': "\\s*'''" 40 | 'name': 'comment.documentation.heredoc.elixir' 41 | 'patterns': [ 42 | { 43 | 'include': '#interpolated_elixir' 44 | } 45 | { 46 | 'include': '#escaped_char' 47 | } 48 | ] 49 | } 50 | 51 | # Docs - Heredoc Literal 52 | { 53 | 'begin': '@(module|type)?doc ~S"""' 54 | 'comment': '@doc with heredocs is treated as documentation' 55 | 'end': '\\s*"""' 56 | 'name': 'comment.documentation.heredoc.elixir' 57 | 'patterns': [ 58 | { 59 | 'include': '#escaped_char' 60 | } 61 | ] 62 | } 63 | { 64 | 'begin': "@(module|type)?doc ~S'''" 65 | 'comment': '@doc with heredocs is treated as documentation' 66 | 'end': "\\s*'''" 67 | 'name': 'comment.documentation.heredoc.elixir' 68 | 'patterns': [ 69 | { 70 | 'include': '#escaped_char' 71 | } 72 | ] 73 | } 74 | 75 | # Docs - False 76 | { 77 | 'comment': '@doc false is treated as documentation' 78 | 'match': '@(module|type)?doc false' 79 | 'name': 'comment.documentation.false' 80 | } 81 | 82 | # Docs - Non-heredoc strings 83 | { 84 | 'begin': '@(module|type)?doc "' 85 | 'comment': '@doc with string is treated as documentation' 86 | 'end': '"' 87 | 'name': 'comment.documentation.string' 88 | 'patterns': [ 89 | { 90 | 'include': '#interpolated_elixir' 91 | } 92 | { 93 | 'include': '#escaped_char' 94 | } 95 | ] 96 | } 97 | 98 | # Special methods - Imports, Aliases, Requires 99 | { 100 | 'match': '(?[a-zA-Z_][\\w@]*(?>[?!])?)(:)(?!:)' 110 | 'name': 'constant.other.symbol.elixir' 111 | } 112 | { 113 | 'match': '(?_?\\h)*|\\d(?>_?\\d)*(\\.(?![^[:space:][:digit:]])(?>_?\\d)*)?([eE][-+]?\\d(?>_?\\d)*)?|0[bB][01]+)\\b' 156 | 'name': 'constant.numeric.elixir' 157 | } 158 | 159 | # Interpolated Regex Sigils 160 | { 161 | 'comment': 'Regex sigil with curlies' 162 | 'begin': '~r\\{' 163 | 'beginCaptures': 164 | '0': 165 | 'name': 'punctuation.section.regexp.begin.elixir' 166 | 'end': '\\}[eimnosux]*' 167 | 'endCaptures': 168 | '0': 169 | 'name': 'punctuation.section.regexp.end.elixir' 170 | 'name': 'string.regexp.interpolated.elixir' 171 | 'patterns': [ 172 | { 'include': '#regex_sub' } 173 | { 'include': '#nest_curly' } 174 | ] 175 | } 176 | { 177 | 'comment': 'Regex sigil with pipes' 178 | 'begin': '~r\\|' 179 | 'beginCaptures': 180 | '0': 181 | 'name': 'punctuation.section.regexp.begin.elixir' 182 | 'end': '\\|[eimnosux]*' 183 | 'endCaptures': 184 | '0': 185 | 'name': 'punctuation.section.regexp.end.elixir' 186 | 'name': 'string.regexp.interpolated.elixir' 187 | 'patterns': [ 188 | { 'include': '#regex_sub' } 189 | ] 190 | } 191 | { 192 | 'comment': 'Regex sigil with parens' 193 | 'begin': '~r\\(' 194 | 'beginCaptures': 195 | '0': 196 | 'name': 'punctuation.section.regexp.begin.elixir' 197 | 'end': '\\)[eimnosux]*' 198 | 'endCaptures': 199 | '0': 200 | 'name': 'punctuation.section.regexp.end.elixir' 201 | 'name': 'string.regexp.interpolated.elixir' 202 | 'patterns': [ 203 | { 'include': '#regex_sub' } 204 | ] 205 | } 206 | { 207 | 'comment': 'Regex sigil with slashes' 208 | 'begin': '~r\\/' 209 | 'beginCaptures': 210 | '0': 211 | 'name': 'punctuation.section.regexp.begin.elixir' 212 | 'end': '\\/[eimnosux]*' 213 | 'endCaptures': 214 | '0': 215 | 'name': 'punctuation.section.regexp.end.elixir' 216 | 'name': 'string.regexp.interpolated.elixir' 217 | 'patterns': [ 218 | { 'include': '#regex_sub' } 219 | ] 220 | } 221 | { 222 | 'comment': 'Regex sigil with brackets' 223 | 'begin': '~r\\[' 224 | 'beginCaptures': 225 | '0': 226 | 'name': 'punctuation.section.regexp.begin.elixir' 227 | 'end': '\\][eimnosux]*' 228 | 'endCaptures': 229 | '0': 230 | 'name': 'punctuation.section.regexp.end.elixir' 231 | 'name': 'string.regexp.interpolated.elixir' 232 | 'patterns': [ 233 | { 'include': '#regex_sub' } 234 | { 'include': '#nest_brackets' } 235 | ] 236 | } 237 | { 238 | 'comment': 'Regex sigil with ltgt' 239 | 'begin': '~r\\<' 240 | 'beginCaptures': 241 | '0': 242 | 'name': 'punctuation.section.regexp.begin.elixir' 243 | 'end': '\\>[eimnosux]*' 244 | 'endCaptures': 245 | '0': 246 | 'name': 'punctuation.section.regexp.end.elixir' 247 | 'name': 'string.regexp.interpolated.elixir' 248 | 'patterns': [ 249 | { 'include': '#regex_sub' } 250 | { 'include': '#nest_ltgt' } 251 | ] 252 | } 253 | { 254 | 'comment': 'Regex sigil with single quoted heredocs' 255 | 'begin': "~r\\'\\'\\'" 256 | 'beginCaptures': 257 | '0': 258 | 'name': 'punctuation.section.regexp.begin.elixir' 259 | 'end': "\\'\\'\\'[eimnosux]*" 260 | 'endCaptures': 261 | '0': 262 | 'name': 'punctuation.section.regexp.end.elixir' 263 | 'name': 'string.regexp.interpolated.elixir' 264 | 'patterns': [ 265 | { 'include': '#regex_sub' } 266 | { 'include': '#nest_ltgt' } 267 | ] 268 | } 269 | { 270 | 'comment': 'Regex sigil with single quotes' 271 | 'begin': '~r\\"\\"\\"' 272 | 'beginCaptures': 273 | '0': 274 | 'name': 'punctuation.section.regexp.begin.elixir' 275 | 'end': '\\"\\"\\"[eimnosux]*' 276 | 'endCaptures': 277 | '0': 278 | 'name': 'punctuation.section.regexp.end.elixir' 279 | 'name': 'string.regexp.interpolated.elixir' 280 | 'patterns': [ 281 | { 'include': '#regex_sub' } 282 | { 'include': '#nest_ltgt' } 283 | ] 284 | } 285 | { 286 | 'comment': 'Regex sigil with double quotes' 287 | 'begin': '~r\\"' 288 | 'beginCaptures': 289 | '0': 290 | 'name': 'punctuation.section.regexp.begin.elixir' 291 | 'end': '\\"[eimnosux]*' 292 | 'endCaptures': 293 | '0': 294 | 'name': 'punctuation.section.regexp.end.elixir' 295 | 'name': 'string.regexp.interpolated.elixir' 296 | 'patterns': [ 297 | { 'include': '#regex_sub' } 298 | { 'include': '#nest_ltgt' } 299 | ] 300 | } 301 | { 302 | 'comment': 'Regex sigil with single quotes' 303 | 'begin': "~r\\'" 304 | 'beginCaptures': 305 | '0': 306 | 'name': 'punctuation.section.regexp.begin.elixir' 307 | 'end': "\\'[eimnosux]*" 308 | 'endCaptures': 309 | '0': 310 | 'name': 'punctuation.section.regexp.end.elixir' 311 | 'name': 'string.regexp.interpolated.elixir' 312 | 'patterns': [ 313 | { 'include': '#regex_sub' } 314 | { 'include': '#nest_ltgt' } 315 | ] 316 | } 317 | 318 | # Literal Regex Sigils 319 | { 320 | 'comment': 'Literal regex sigil with curlies' 321 | 'begin': '~R\\{' 322 | 'beginCaptures': 323 | '0': 324 | 'name': 'punctuation.section.regexp.begin.elixir' 325 | 'end': '\\}[eimnosux]*' 326 | 'endCaptures': 327 | '0': 328 | 'name': 'punctuation.section.regexp.end.elixir' 329 | 'name': 'string.regexp.literal.elixir' 330 | 'patterns': [ 331 | { 'include': '#nest_curly' } 332 | ] 333 | } 334 | { 335 | 'comment': 'Literal regex sigil with pipes' 336 | 'begin': '~R\\|' 337 | 'beginCaptures': 338 | '0': 339 | 'name': 'punctuation.section.regexp.begin.elixir' 340 | 'end': '\\|[eimnosux]*' 341 | 'endCaptures': 342 | '0': 343 | 'name': 'punctuation.section.regexp.end.elixir' 344 | 'name': 'string.regexp.literal.elixir' 345 | } 346 | { 347 | 'comment': 'Literal regex sigil with parens' 348 | 'begin': '~R\\(' 349 | 'beginCaptures': 350 | '0': 351 | 'name': 'punctuation.section.regexp.begin.elixir' 352 | 'end': '\\)[eimnosux]*' 353 | 'endCaptures': 354 | '0': 355 | 'name': 'punctuation.section.regexp.end.elixir' 356 | 'name': 'string.regexp.literal.elixir' 357 | 'patterns': [ 358 | { 'include': '#nest_parens'} 359 | ] 360 | } 361 | { 362 | 'comment': 'Literal regex sigil with slashes' 363 | 'begin': '~R\\/' 364 | 'beginCaptures': 365 | '0': 366 | 'name': 'punctuation.section.regexp.begin.elixir' 367 | 'end': '\\/[eimnosux]*' 368 | 'endCaptures': 369 | '0': 370 | 'name': 'punctuation.section.regexp.end.elixir' 371 | 'name': 'string.regexp.literal.elixir' 372 | } 373 | { 374 | 'comment': 'Literal regex sigil with brackets' 375 | 'begin': '~R\\[' 376 | 'beginCaptures': 377 | '0': 378 | 'name': 'punctuation.section.regexp.begin.elixir' 379 | 'end': '\\][eimnosux]*' 380 | 'endCaptures': 381 | '0': 382 | 'name': 'punctuation.section.regexp.end.elixir' 383 | 'name': 'string.regexp.literal.elixir' 384 | 'patterns': [ 385 | { 'include': '#nest_brackets' } 386 | ] 387 | } 388 | { 389 | 'comment': 'Literal regex sigil with ltgt' 390 | 'begin': '~R\\<' 391 | 'beginCaptures': 392 | '0': 393 | 'name': 'punctuation.section.regexp.begin.elixir' 394 | 'end': '\\>[eimnosux]*' 395 | 'endCaptures': 396 | '0': 397 | 'name': 'punctuation.section.regexp.end.elixir' 398 | 'name': 'string.regexp.literal.elixir' 399 | 'patterns': [ 400 | { 'include': '#nest_ltgt'} 401 | ] 402 | } 403 | { 404 | 'comment': 'Literal regex sigil with single quoted heredoc' 405 | 'begin': "~R\\'\\'\\'" 406 | 'beginCaptures': 407 | '0': 408 | 'name': 'punctuation.section.regexp.begin.elixir' 409 | 'end': "\\'\\'\\'[eimnosux]*" 410 | 'endCaptures': 411 | '0': 412 | 'name': 'punctuation.section.regexp.end.elixir' 413 | 'name': 'string.regexp.literal.elixir' 414 | 'patterns': [ 415 | { 'include': '#nest_ltgt' } 416 | ] 417 | } 418 | { 419 | 'comment': 'Literal regex sigil with double quoted heredoc' 420 | 'begin': '~R\\"\\"\\"' 421 | 'beginCaptures': 422 | '0': 423 | 'name': 'punctuation.section.regexp.begin.elixir' 424 | 'end': '\\"\\"\\"[eimnosux]*' 425 | 'endCaptures': 426 | '0': 427 | 'name': 'punctuation.section.regexp.end.elixir' 428 | 'name': 'string.regexp.literal.elixir' 429 | 'patterns': [ 430 | { 'include': '#nest_ltgt' } 431 | ] 432 | } 433 | { 434 | 'comment': 'Literal regex sigil with double quotes' 435 | 'begin': '~R\\"' 436 | 'beginCaptures': 437 | '0': 438 | 'name': 'punctuation.section.regexp.begin.elixir' 439 | 'end': '\\"[eimnosux]*' 440 | 'endCaptures': 441 | '0': 442 | 'name': 'punctuation.section.regexp.end.elixir' 443 | 'name': 'string.regexp.literal.elixir' 444 | 'patterns': [ 445 | { 'include': '#nest_ltgt' } 446 | ] 447 | } 448 | { 449 | 'comment': 'Literal regex sigil with single quotes' 450 | 'begin': "~R\\'" 451 | 'beginCaptures': 452 | '0': 453 | 'name': 'punctuation.section.regexp.begin.elixir' 454 | 'end': "\\'[eimnosux]*" 455 | 'endCaptures': 456 | '0': 457 | 'name': 'punctuation.section.regexp.end.elixir' 458 | 'name': 'string.regexp.literal.elixir' 459 | 'patterns': [ 460 | { 'include': '#nest_ltgt' } 461 | ] 462 | } 463 | 464 | # Interpolated Charater lists sigils 465 | { 466 | 'comment': 'Character list sigil with curlies' 467 | 'begin': '~c\\{' 468 | 'beginCaptures': 469 | '0': 470 | 'name': 'punctuation.definition.string.begin.elixir' 471 | 'end': '\\}[a-z]*' 472 | 'endCaptures': 473 | '0': 474 | 'name': 'punctuation.definition.string.end.elixir' 475 | 'name': 'support.function.variable.quoted.single.elixir' 476 | 'patterns': [ 477 | { 'include': '#interpolated_elixir' } 478 | { 'include': '#escaped_char' } 479 | ] 480 | } 481 | { 482 | 'comment': 'Character list sigil with pipes' 483 | 'begin': '~c\\|' 484 | 'beginCaptures': 485 | '0': 486 | 'name': 'punctuation.definition.string.begin.elixir' 487 | 'end': '\\|[a-z]*' 488 | 'endCaptures': 489 | '0': 490 | 'name': 'punctuation.definition.string.end.elixir' 491 | 'name': 'support.function.variable.quoted.single.elixir' 492 | 'patterns': [ 493 | { 'include': '#interpolated_elixir' } 494 | { 'include': '#escaped_char' } 495 | ] 496 | } 497 | { 498 | 'comment': 'Character list sigil with parens' 499 | 'begin': '~c\\(' 500 | 'beginCaptures': 501 | '0': 502 | 'name': 'punctuation.definition.string.begin.elixir' 503 | 'end': '\\)[a-z]*' 504 | 'endCaptures': 505 | '0': 506 | 'name': 'punctuation.definition.string.end.elixir' 507 | 'name': 'support.function.variable.quoted.single.elixir' 508 | 'patterns': [ 509 | { 'include': '#interpolated_elixir' } 510 | { 'include': '#escaped_char' } 511 | ] 512 | } 513 | { 514 | 'comment': 'Character list sigil with curlies' 515 | 'begin': '~c\\<' 516 | 'beginCaptures': 517 | '0': 518 | 'name': 'punctuation.definition.string.begin.elixir' 519 | 'end': '\\>[a-z]*' 520 | 'endCaptures': 521 | '0': 522 | 'name': 'punctuation.definition.string.end.elixir' 523 | 'name': 'support.function.variable.quoted.single.elixir' 524 | 'patterns': [ 525 | { 'include': '#interpolated_elixir' } 526 | { 'include': '#escaped_char' } 527 | ] 528 | } 529 | { 530 | 'comment': 'Character list sigil with curlies' 531 | 'begin': '~c\\[' 532 | 'beginCaptures': 533 | '0': 534 | 'name': 'punctuation.definition.string.begin.elixir' 535 | 'end': '\\][a-z]*' 536 | 'endCaptures': 537 | '0': 538 | 'name': 'punctuation.definition.string.end.elixir' 539 | 'name': 'support.function.variable.quoted.single.elixir' 540 | 'patterns': [ 541 | { 'include': '#interpolated_elixir' } 542 | { 'include': '#escaped_char' } 543 | ] 544 | } 545 | { 546 | 'comment': 'Character list sigil with curlies' 547 | 'begin': '~c\\/' 548 | 'beginCaptures': 549 | '0': 550 | 'name': 'punctuation.definition.string.begin.elixir' 551 | 'end': '\\/[a-z]*' 552 | 'endCaptures': 553 | '0': 554 | 'name': 'punctuation.definition.string.end.elixir' 555 | 'name': 'support.function.variable.quoted.single.elixir' 556 | 'patterns': [ 557 | { 'include': '#interpolated_elixir' } 558 | { 'include': '#escaped_char' } 559 | ] 560 | } 561 | { 562 | 'comment': 'Character list sigil with single quoted heredoc' 563 | 'begin': "~c\\'\\'\\'" 564 | 'beginCaptures': 565 | '0': 566 | 'name': 'punctuation.definition.string.begin.elixir' 567 | 'end': "\\'\\'\\'[a-z]*" 568 | 'endCaptures': 569 | '0': 570 | 'name': 'punctuation.definition.string.end.elixir' 571 | 'name': 'support.function.variable.quoted.single.elixir' 572 | 'patterns': [ 573 | { 'include': '#interpolated_elixir' } 574 | { 'include': '#escaped_char' } 575 | ] 576 | } 577 | { 578 | 'comment': 'Character list sigil with double quoted heredoc' 579 | 'begin': '~c\\"\\"\\"' 580 | 'beginCaptures': 581 | '0': 582 | 'name': 'punctuation.definition.string.begin.elixir' 583 | 'end': '\\"\\"\\"[a-z]*' 584 | 'endCaptures': 585 | '0': 586 | 'name': 'punctuation.definition.string.end.elixir' 587 | 'name': 'support.function.variable.quoted.single.elixir' 588 | 'patterns': [ 589 | { 'include': '#interpolated_elixir' } 590 | { 'include': '#escaped_char' } 591 | ] 592 | } 593 | { 594 | 'comment': 'Character list sigil with curlies' 595 | 'begin': "~c\\'" 596 | 'beginCaptures': 597 | '0': 598 | 'name': 'punctuation.definition.string.begin.elixir' 599 | 'end': "\\'[a-z]*" 600 | 'endCaptures': 601 | '0': 602 | 'name': 'punctuation.definition.string.end.elixir' 603 | 'name': 'support.function.variable.quoted.single.elixir' 604 | 'patterns': [ 605 | { 'include': '#interpolated_elixir' } 606 | { 'include': '#escaped_char' } 607 | ] 608 | } 609 | { 610 | 'comment': 'Character list sigil with curlies' 611 | 'begin': '~c\\"' 612 | 'beginCaptures': 613 | '0': 614 | 'name': 'punctuation.definition.string.begin.elixir' 615 | 'end': '\\"[a-z]*' 616 | 'endCaptures': 617 | '0': 618 | 'name': 'punctuation.definition.string.end.elixir' 619 | 'name': 'support.function.variable.quoted.single.elixir' 620 | 'patterns': [ 621 | { 'include': '#interpolated_elixir' } 622 | { 'include': '#escaped_char' } 623 | ] 624 | } 625 | 626 | # Literal Charater lists sigils 627 | { 628 | 'comment': 'Literal Character list sigil with curlies' 629 | 'begin': '~C\\{' 630 | 'beginCaptures': 631 | '0': 632 | 'name': 'punctuation.definition.string.begin.elixir' 633 | 'end': '\\}[a-z]*' 634 | 'endCaptures': 635 | '0': 636 | 'name': 'punctuation.definition.string.end.elixir' 637 | 'name': 'support.function.variable.quoted.single.elixir' 638 | } 639 | { 640 | 'comment': 'Literal Character list sigil with pipes' 641 | 'begin': '~C\\|' 642 | 'beginCaptures': 643 | '0': 644 | 'name': 'punctuation.definition.string.begin.elixir' 645 | 'end': '\\|[a-z]*' 646 | 'endCaptures': 647 | '0': 648 | 'name': 'punctuation.definition.string.end.elixir' 649 | 'name': 'support.function.variable.quoted.single.elixir' 650 | } 651 | { 652 | 'comment': 'Literal Character list sigil with parens' 653 | 'begin': '~C\\(' 654 | 'beginCaptures': 655 | '0': 656 | 'name': 'punctuation.definition.string.begin.elixir' 657 | 'end': '\\)[a-z]*' 658 | 'endCaptures': 659 | '0': 660 | 'name': 'punctuation.definition.string.end.elixir' 661 | 'name': 'support.function.variable.quoted.single.elixir' 662 | } 663 | { 664 | 'comment': 'Literal Character list sigil with curlies' 665 | 'begin': '~C\\<' 666 | 'beginCaptures': 667 | '0': 668 | 'name': 'punctuation.definition.string.begin.elixir' 669 | 'end': '\\>[a-z]*' 670 | 'endCaptures': 671 | '0': 672 | 'name': 'punctuation.definition.string.end.elixir' 673 | 'name': 'support.function.variable.quoted.single.elixir' 674 | } 675 | { 676 | 'comment': 'Literal Character list sigil with curlies' 677 | 'begin': '~C\\[' 678 | 'beginCaptures': 679 | '0': 680 | 'name': 'punctuation.definition.string.begin.elixir' 681 | 'end': '\\][a-z]*' 682 | 'endCaptures': 683 | '0': 684 | 'name': 'punctuation.definition.string.end.elixir' 685 | 'name': 'support.function.variable.quoted.single.elixir' 686 | } 687 | { 688 | 'comment': 'Literal Character list sigil with curlies' 689 | 'begin': '~C\\/' 690 | 'beginCaptures': 691 | '0': 692 | 'name': 'punctuation.definition.string.begin.elixir' 693 | 'end': '\\/[a-z]*' 694 | 'endCaptures': 695 | '0': 696 | 'name': 'punctuation.definition.string.end.elixir' 697 | 'name': 'support.function.variable.quoted.single.elixir' 698 | } 699 | { 700 | 'comment': 'Literal Character list sigil with single quoted heredoc' 701 | 'begin': "~C\\'\\'\\'" 702 | 'beginCaptures': 703 | '0': 704 | 'name': 'punctuation.definition.string.begin.elixir' 705 | 'end': "\\'\\'\\'[a-z]*" 706 | 'endCaptures': 707 | '0': 708 | 'name': 'punctuation.definition.string.end.elixir' 709 | 'name': 'support.function.variable.quoted.single.elixir' 710 | } 711 | { 712 | 'comment': 'Literal Character list sigil with double quoted heredoc' 713 | 'begin': '~C\\"\\"\\"' 714 | 'beginCaptures': 715 | '0': 716 | 'name': 'punctuation.definition.string.begin.elixir' 717 | 'end': '\\"\\"\\"[a-z]*' 718 | 'endCaptures': 719 | '0': 720 | 'name': 'punctuation.definition.string.end.elixir' 721 | 'name': 'support.function.variable.quoted.single.elixir' 722 | } 723 | { 724 | 'comment': 'Literal Character list sigil with curlies' 725 | 'begin': "~C\\'" 726 | 'beginCaptures': 727 | '0': 728 | 'name': 'punctuation.definition.string.begin.elixir' 729 | 'end': "\\'[a-z]*" 730 | 'endCaptures': 731 | '0': 732 | 'name': 'punctuation.definition.string.end.elixir' 733 | 'name': 'support.function.variable.quoted.single.elixir' 734 | } 735 | { 736 | 'comment': 'Literal Character list sigil with curlies' 737 | 'begin': '~C\\"' 738 | 'beginCaptures': 739 | '0': 740 | 'name': 'punctuation.definition.string.begin.elixir' 741 | 'end': '\\"[a-z]*' 742 | 'endCaptures': 743 | '0': 744 | 'name': 'punctuation.definition.string.end.elixir' 745 | 'name': 'support.function.variable.quoted.single.elixir' 746 | } 747 | 748 | # Interpolated word list sigils 749 | { 750 | 'begin': '~w\\{' 751 | 'beginCaptures': 752 | '0': 753 | 'name': 'punctuation.section.list.begin.elixir' 754 | 'comment': 'sigil (allow for interpolation)' 755 | 'end': '\\}[acs]*' 756 | 'endCaptures': 757 | '0': 758 | 'name': 'punctuation.section.list.end.elixir' 759 | 'name': 'string.quoted.double.interpolated.elixir' 760 | 'patterns': [ 761 | { 'include': '#interpolated_elixir' } 762 | { 'include': '#escaped_char' } 763 | { 'include': '#nest_curly' } 764 | ] 765 | } 766 | { 767 | 'begin': '~w\\[' 768 | 'beginCaptures': 769 | '0': 770 | 'name': 'punctuation.section.list.begin.elixir' 771 | 'comment': 'sigil (allow for interpolation)' 772 | 'end': '\\][acs]*' 773 | 'endCaptures': 774 | '0': 775 | 'name': 'punctuation.section.list.end.elixir' 776 | 'name': 'string.quoted.double.interpolated.elixir' 777 | 'patterns': [ 778 | { 'include': '#interpolated_elixir' } 779 | { 'include': '#escaped_char' } 780 | { 'include': '#nest_brackets' } 781 | ] 782 | } 783 | { 784 | 'begin': '~w\\<' 785 | 'beginCaptures': 786 | '0': 787 | 'name': 'punctuation.section.list.begin.elixir' 788 | 'comment': 'sigil (allow for interpolation)' 789 | 'end': '\\>[acs]*' 790 | 'endCaptures': 791 | '0': 792 | 'name': 'punctuation.section.list.end.elixir' 793 | 'name': 'string.quoted.double.interpolated.elixir' 794 | 'patterns': [ 795 | { 'include': '#interpolated_elixir' } 796 | { 'include': '#escaped_char' } 797 | { 'include': '#nest_ltgt' } 798 | ] 799 | } 800 | { 801 | 'begin': '~w\\(' 802 | 'beginCaptures': 803 | '0': 804 | 'name': 'punctuation.section.list.begin.elixir' 805 | 'comment': 'sigil (allow for interpolation)' 806 | 'end': '\\)[acs]*' 807 | 'endCaptures': 808 | '0': 809 | 'name': 'punctuation.section.list.end.elixir' 810 | 'name': 'string.quoted.double.interpolated.elixir' 811 | 'patterns': [ 812 | { 'include': '#interpolated_elixir' } 813 | { 'include': '#escaped_char' } 814 | { 'include': '#nest_parens' } 815 | ] 816 | } 817 | { 818 | 'begin': '~w\\/' 819 | 'beginCaptures': 820 | '0': 821 | 'name': 'punctuation.section.list.begin.elixir' 822 | 'comment': 'sigil (allow for interpolation)' 823 | 'end': '\\/[acs]*' 824 | 'endCaptures': 825 | '0': 826 | 'name': 'punctuation.section.list.end.elixir' 827 | 'name': 'string.quoted.double.interpolated.elixir' 828 | 'patterns': [ 829 | { 'include': '#interpolated_elixir' } 830 | { 'include': '#escaped_char' } 831 | ] 832 | } 833 | { 834 | 'begin': '~w\\|' 835 | 'beginCaptures': 836 | '0': 837 | 'name': 'punctuation.section.list.begin.elixir' 838 | 'comment': 'sigil (allow for interpolation)' 839 | 'end': '\\|[acs]*' 840 | 'endCaptures': 841 | '0': 842 | 'name': 'punctuation.section.list.end.elixir' 843 | 'name': 'string.quoted.double.interpolated.elixir' 844 | 'patterns': [ 845 | { 'include': '#interpolated_elixir' } 846 | { 'include': '#escaped_char' } 847 | ] 848 | } 849 | { 850 | 'comment': 'Interpolated word list sigil with single quoted heredoc' 851 | 'begin': "~w\\'\\'\\'" 852 | 'beginCaptures': 853 | '0': 854 | 'name': 'punctuation.section.list.begin.elixir' 855 | 'end': "\\'\\'\\'[acs]*" 856 | 'endCaptures': 857 | '0': 858 | 'name': 'punctuation.section.list.end.elixir' 859 | 'name': 'string.quoted.double.interpolated.elixir' 860 | 'patterns': [ 861 | { 'include': '#interpolated_elixir' } 862 | { 'include': '#escaped_char' } 863 | ] 864 | } 865 | { 866 | 'comment': 'Interpolated word list sigil with double quoted heredoc' 867 | 'begin': '~w\\"\\"\\"' 868 | 'beginCaptures': 869 | '0': 870 | 'name': 'punctuation.section.list.begin.elixir' 871 | 'end': '\\"\\"\\"[acs]*' 872 | 'endCaptures': 873 | '0': 874 | 'name': 'punctuation.section.list.end.elixir' 875 | 'name': 'string.quoted.double.interpolated.elixir' 876 | 'patterns': [ 877 | { 'include': '#interpolated_elixir' } 878 | { 'include': '#escaped_char' } 879 | ] 880 | } 881 | { 882 | 'begin': "~w\\'" 883 | 'beginCaptures': 884 | '0': 885 | 'name': 'punctuation.section.list.begin.elixir' 886 | 'comment': 'sigil (allow for interpolation)' 887 | 'end': "\\'[acs]*" 888 | 'endCaptures': 889 | '0': 890 | 'name': 'punctuation.section.list.end.elixir' 891 | 'name': 'string.quoted.double.interpolated.elixir' 892 | 'patterns': [ 893 | { 'include': '#interpolated_elixir' } 894 | { 'include': '#escaped_char' } 895 | ] 896 | } 897 | { 898 | 'begin': '~w\\"' 899 | 'beginCaptures': 900 | '0': 901 | 'name': 'punctuation.section.list.begin.elixir' 902 | 'comment': 'sigil (allow for interpolation)' 903 | 'end': '\\"[acs]*' 904 | 'endCaptures': 905 | '0': 906 | 'name': 'punctuation.section.list.end.elixir' 907 | 'name': 'string.quoted.double.interpolated.elixir' 908 | 'patterns': [ 909 | { 'include': '#interpolated_elixir' } 910 | { 'include': '#escaped_char' } 911 | ] 912 | } 913 | 914 | # Literal word list sigils 915 | { 916 | 'begin': '~W\\{' 917 | 'beginCaptures': 918 | '0': 919 | 'name': 'punctuation.section.list.begin.elixir' 920 | 'comment': 'sigil (without interpolation)' 921 | 'end': '\\}[acs]*' 922 | 'endCaptures': 923 | '0': 924 | 'name': 'punctuation.section.list.end.elixir' 925 | 'name': 'string.quoted.double.literal.elixir' 926 | } 927 | { 928 | 'begin': '~W\\[' 929 | 'beginCaptures': 930 | '0': 931 | 'name': 'punctuation.section.list.begin.elixir' 932 | 'comment': 'sigil (without interpolation)' 933 | 'end': '\\][acs]*' 934 | 'endCaptures': 935 | '0': 936 | 'name': 'punctuation.section.list.end.elixir' 937 | 'name': 'string.quoted.double.literal.elixir' 938 | 'patterns': [ 939 | { 'include': '#nest_brackets' } 940 | ] 941 | } 942 | { 943 | 'begin': '~W\\<' 944 | 'beginCaptures': 945 | '0': 946 | 'name': 'punctuation.section.list.begin.elixir' 947 | 'comment': 'sigil (without interpolation)' 948 | 'end': '\\>[acs]*' 949 | 'endCaptures': 950 | '0': 951 | 'name': 'punctuation.section.list.end.elixir' 952 | 'name': 'string.quoted.double.literal.elixir' 953 | 'patterns': [ 954 | { 'include': '#nest_ltgt' } 955 | ] 956 | } 957 | { 958 | 'begin': '~W\\(' 959 | 'beginCaptures': 960 | '0': 961 | 'name': 'punctuation.section.list.begin.elixir' 962 | 'comment': 'sigil (without interpolation)' 963 | 'end': '\\)[acs]*' 964 | 'endCaptures': 965 | '0': 966 | 'name': 'punctuation.section.list.end.elixir' 967 | 'name': 'string.quoted.double.literal.elixir' 968 | 'patterns': [ 969 | { 'include': '#nest_parens' } 970 | ] 971 | } 972 | { 973 | 'begin': '~W\\/' 974 | 'beginCaptures': 975 | '0': 976 | 'name': 'punctuation.section.list.begin.elixir' 977 | 'comment': 'sigil (without interpolation)' 978 | 'end': '\\/[acs]*' 979 | 'endCaptures': 980 | '0': 981 | 'name': 'punctuation.section.list.end.elixir' 982 | 'name': 'string.quoted.double.literal.elixir' 983 | } 984 | { 985 | 'begin': '~W\\|' 986 | 'beginCaptures': 987 | '0': 988 | 'name': 'punctuation.section.list.begin.elixir' 989 | 'comment': 'sigil (without interpolation)' 990 | 'end': '\\|[acs]*' 991 | 'endCaptures': 992 | '0': 993 | 'name': 'punctuation.section.list.end.elixir' 994 | 'name': 'string.quoted.double.literal.elixir' 995 | } 996 | { 997 | 'comment': 'Literal word list sigil with single quoted heredoc' 998 | 'begin': "~W\\'\\'\\'" 999 | 'beginCaptures': 1000 | '0': 1001 | 'name': 'punctuation.section.list.begin.elixir' 1002 | 'end': "\\'\\'\\'[acs]*" 1003 | 'endCaptures': 1004 | '0': 1005 | 'name': 'punctuation.section.list.end.elixir' 1006 | 'name': 'string.quoted.double.literal.elixir' 1007 | } 1008 | { 1009 | 'comment': 'Literal word list sigil with double quoted heredoc' 1010 | 'begin': '~W\\"\\"\\"' 1011 | 'beginCaptures': 1012 | '0': 1013 | 'name': 'punctuation.section.list.begin.elixir' 1014 | 'end': '\\"\\"\\"[acs]*' 1015 | 'endCaptures': 1016 | '0': 1017 | 'name': 'punctuation.section.list.end.elixir' 1018 | 'name': 'string.quoted.double.literal.elixir' 1019 | } 1020 | { 1021 | 'begin': "~W\\'" 1022 | 'beginCaptures': 1023 | '0': 1024 | 'name': 'punctuation.section.list.begin.elixir' 1025 | 'comment': 'sigil (without interpolation)' 1026 | 'end': "\\'[acs]*" 1027 | 'endCaptures': 1028 | '0': 1029 | 'name': 'punctuation.section.list.end.elixir' 1030 | 'name': 'string.quoted.double.literal.elixir' 1031 | } 1032 | { 1033 | 'begin': '~W\\"' 1034 | 'beginCaptures': 1035 | '0': 1036 | 'name': 'punctuation.section.list.begin.elixir' 1037 | 'comment': 'sigil (without interpolation)' 1038 | 'end': '\\"[acs]*' 1039 | 'endCaptures': 1040 | '0': 1041 | 'name': 'punctuation.section.list.end.elixir' 1042 | 'name': 'string.quoted.double.literal.elixir' 1043 | } 1044 | 1045 | # Interpolated String sigils 1046 | { 1047 | 'begin': '~[a-z](?>""")' 1048 | 'beginCaptures': 1049 | '0': 1050 | 'name': 'punctuation.definition.string.begin.elixir' 1051 | 'comment': 'Double-quoted heredocs sigils' 1052 | 'end': '^\\s*"""' 1053 | 'endCaptures': 1054 | '0': 1055 | 'name': 'punctuation.definition.string.end.elixir' 1056 | 'name': 'string.quoted.double.heredoc.elixir' 1057 | 'patterns': [ 1058 | { 1059 | 'include': '#interpolated_elixir' 1060 | } 1061 | { 1062 | 'include': '#escaped_char' 1063 | } 1064 | ] 1065 | } 1066 | { 1067 | 'begin': "~[a-z](?>''')" 1068 | 'beginCaptures': 1069 | '0': 1070 | 'name': 'punctuation.definition.string.begin.elixir' 1071 | 'comment': 'Double-quoted heredocs sigils' 1072 | 'end': "^\\s*'''" 1073 | 'endCaptures': 1074 | '0': 1075 | 'name': 'punctuation.definition.string.end.elixir' 1076 | 'name': 'string.quoted.double.heredoc.elixir' 1077 | 'patterns': [ 1078 | { 1079 | 'include': '#interpolated_elixir' 1080 | } 1081 | { 1082 | 'include': '#escaped_char' 1083 | } 1084 | ] 1085 | } 1086 | { 1087 | 'begin': '~[a-z]\\{' 1088 | 'beginCaptures': 1089 | '0': 1090 | 'name': 'punctuation.definition.string.begin.elixir' 1091 | 'comment': 'sigil (allow for interpolation)' 1092 | 'end': '\\}[a-z]*' 1093 | 'endCaptures': 1094 | '0': 1095 | 'name': 'punctuation.definition.string.end.elixir' 1096 | 'name': 'string.quoted.double.interpolated.elixir' 1097 | 'patterns': [ 1098 | { 1099 | 'include': '#interpolated_elixir' 1100 | } 1101 | { 1102 | 'include': '#escaped_char' 1103 | } 1104 | { 1105 | 'include': '#nest_curly' 1106 | } 1107 | ] 1108 | } 1109 | { 1110 | 'begin': '~[a-z]\\[' 1111 | 'beginCaptures': 1112 | '0': 1113 | 'name': 'punctuation.definition.string.begin.elixir' 1114 | 'comment': 'sigil (allow for interpolation)' 1115 | 'end': '\\][a-z]*' 1116 | 'endCaptures': 1117 | '0': 1118 | 'name': 'punctuation.definition.string.end.elixir' 1119 | 'name': 'string.quoted.double.interpolated.elixir' 1120 | 'patterns': [ 1121 | { 'include': '#interpolated_elixir' } 1122 | { 'include': '#escaped_char' } 1123 | { 'include': '#nest_brackets' } 1124 | ] 1125 | } 1126 | { 1127 | 'begin': '~[a-z]\\<' 1128 | 'beginCaptures': 1129 | '0': 1130 | 'name': 'punctuation.definition.string.begin.elixir' 1131 | 'comment': 'sigil (allow for interpolation)' 1132 | 'end': '\\>[a-z]*' 1133 | 'endCaptures': 1134 | '0': 1135 | 'name': 'punctuation.definition.string.end.elixir' 1136 | 'name': 'string.quoted.double.interpolated.elixir' 1137 | 'patterns': [ 1138 | { 'include': '#interpolated_elixir' } 1139 | { 'include': '#escaped_char' } 1140 | { 'include': '#nest_ltgt' } 1141 | ] 1142 | } 1143 | { 1144 | 'begin': '~[a-z]\\(' 1145 | 'beginCaptures': 1146 | '0': 1147 | 'name': 'punctuation.definition.string.begin.elixir' 1148 | 'comment': 'sigil (allow for interpolation)' 1149 | 'end': '\\)[a-z]*' 1150 | 'endCaptures': 1151 | '0': 1152 | 'name': 'punctuation.definition.string.end.elixir' 1153 | 'name': 'string.quoted.double.interpolated.elixir' 1154 | 'patterns': [ 1155 | { 'include': '#interpolated_elixir' } 1156 | { 'include': '#escaped_char' } 1157 | { 'include': '#nest_parens' } 1158 | ] 1159 | } 1160 | { 1161 | 'begin': '~[a-z]\\/' 1162 | 'beginCaptures': 1163 | '0': 1164 | 'name': 'punctuation.definition.string.begin.elixir' 1165 | 'comment': 'sigil (allow for interpolation)' 1166 | 'end': '\\/[a-z]*' 1167 | 'endCaptures': 1168 | '0': 1169 | 'name': 'punctuation.definition.string.end.elixir' 1170 | 'name': 'string.quoted.double.interpolated.elixir' 1171 | 'patterns': [ 1172 | { 'include': '#interpolated_elixir' } 1173 | { 'include': '#escaped_char' } 1174 | ] 1175 | } 1176 | { 1177 | 'begin': "~[a-z]\\'" 1178 | 'beginCaptures': 1179 | '0': 1180 | 'name': 'punctuation.definition.string.begin.elixir' 1181 | 'comment': 'sigil (allow for interpolation)' 1182 | 'end': "\\'[a-z]*" 1183 | 'endCaptures': 1184 | '0': 1185 | 'name': 'punctuation.definition.string.end.elixir' 1186 | 'name': 'string.quoted.double.interpolated.elixir' 1187 | 'patterns': [ 1188 | { 'include': '#interpolated_elixir' } 1189 | { 'include': '#escaped_char' } 1190 | ] 1191 | } 1192 | { 1193 | 'begin': '~[a-z]\\"' 1194 | 'beginCaptures': 1195 | '0': 1196 | 'name': 'punctuation.definition.string.begin.elixir' 1197 | 'comment': 'sigil (allow for interpolation)' 1198 | 'end': '\\"[a-z]*' 1199 | 'endCaptures': 1200 | '0': 1201 | 'name': 'punctuation.definition.string.end.elixir' 1202 | 'name': 'string.quoted.double.interpolated.elixir' 1203 | 'patterns': [ 1204 | { 'include': '#interpolated_elixir' } 1205 | { 'include': '#escaped_char' } 1206 | ] 1207 | } 1208 | { 1209 | 'begin': '~[a-z]\\|' 1210 | 'beginCaptures': 1211 | '0': 1212 | 'name': 'punctuation.definition.string.begin.elixir' 1213 | 'comment': 'sigil (allow for interpolation)' 1214 | 'end': '\\|[a-z]*' 1215 | 'endCaptures': 1216 | '0': 1217 | 'name': 'punctuation.definition.string.end.elixir' 1218 | 'name': 'string.quoted.double.interpolated.elixir' 1219 | 'patterns': [ 1220 | { 'include': '#interpolated_elixir' } 1221 | { 'include': '#escaped_char' } 1222 | ] 1223 | } 1224 | 1225 | # Literal String sigils 1226 | { 1227 | 'begin': '~[A-Z](?>""")' 1228 | 'beginCaptures': 1229 | '0': 1230 | 'name': 'punctuation.definition.string.begin.elixir' 1231 | 'comment': 'Double-quoted heredocs sigils' 1232 | 'end': '^\\s*"""' 1233 | 'endCaptures': 1234 | '0': 1235 | 'name': 'punctuation.definition.string.end.elixir' 1236 | 'name': 'string.quoted.other.literal.elixir' 1237 | } 1238 | { 1239 | 'begin': "~[A-Z](?>''')" 1240 | 'beginCaptures': 1241 | '0': 1242 | 'name': 'punctuation.definition.string.begin.elixir' 1243 | 'comment': 'Double-quoted heredocs sigils' 1244 | 'end': "^\\s*'''" 1245 | 'endCaptures': 1246 | '0': 1247 | 'name': 'punctuation.definition.string.end.elixir' 1248 | 'name': 'string.quoted.other.literal.elixir' 1249 | } 1250 | { 1251 | 'begin': '~[A-Z]\\{' 1252 | 'beginCaptures': 1253 | '0': 1254 | 'name': 'punctuation.definition.string.begin.elixir' 1255 | 'comment': 'sigil (without interpolation)' 1256 | 'end': '\\}[a-z]*' 1257 | 'endCaptures': 1258 | '0': 1259 | 'name': 'punctuation.definition.string.end.elixir' 1260 | 'name': 'string.quoted.double.literal.elixir' 1261 | } 1262 | { 1263 | 'begin': '~[A-Z]\\[' 1264 | 'beginCaptures': 1265 | '0': 1266 | 'name': 'punctuation.definition.string.begin.elixir' 1267 | 'comment': 'sigil (without interpolation)' 1268 | 'end': '\\][a-z]*' 1269 | 'endCaptures': 1270 | '0': 1271 | 'name': 'punctuation.definition.string.end.elixir' 1272 | 'name': 'string.quoted.double.literal.elixir' 1273 | 'patterns': [ 1274 | { 'include': '#nest_brackets' } 1275 | ] 1276 | } 1277 | { 1278 | 'begin': '~[A-Z]\\<' 1279 | 'beginCaptures': 1280 | '0': 1281 | 'name': 'punctuation.definition.string.begin.elixir' 1282 | 'comment': 'sigil (without interpolation)' 1283 | 'end': '\\>[a-z]*' 1284 | 'endCaptures': 1285 | '0': 1286 | 'name': 'punctuation.definition.string.end.elixir' 1287 | 'name': 'string.quoted.double.literal.elixir' 1288 | 'patterns': [ 1289 | { 'include': '#nest_ltgt' } 1290 | ] 1291 | } 1292 | { 1293 | 'begin': '~[A-Z]\\(' 1294 | 'beginCaptures': 1295 | '0': 1296 | 'name': 'punctuation.definition.string.begin.elixir' 1297 | 'comment': 'sigil (without interpolation)' 1298 | 'end': '\\)[a-z]*' 1299 | 'endCaptures': 1300 | '0': 1301 | 'name': 'punctuation.definition.string.end.elixir' 1302 | 'name': 'string.quoted.double.literal.elixir' 1303 | 'patterns': [ 1304 | { 'include': '#nest_parens' } 1305 | ] 1306 | } 1307 | { 1308 | 'begin': '~[A-Z]\\/' 1309 | 'beginCaptures': 1310 | '0': 1311 | 'name': 'punctuation.definition.string.begin.elixir' 1312 | 'comment': 'sigil (without interpolation)' 1313 | 'end': '\\/[a-z]*' 1314 | 'endCaptures': 1315 | '0': 1316 | 'name': 'punctuation.definition.string.end.elixir' 1317 | 'name': 'string.quoted.double.literal.elixir' 1318 | } 1319 | { 1320 | 'begin': "~[A-Z]\\'" 1321 | 'beginCaptures': 1322 | '0': 1323 | 'name': 'punctuation.definition.string.begin.elixir' 1324 | 'comment': 'sigil (without interpolation)' 1325 | 'end': "\\'[a-z]*" 1326 | 'endCaptures': 1327 | '0': 1328 | 'name': 'punctuation.definition.string.end.elixir' 1329 | 'name': 'string.quoted.double.literal.elixir' 1330 | } 1331 | { 1332 | 'begin': '~[A-Z]\\"' 1333 | 'beginCaptures': 1334 | '0': 1335 | 'name': 'punctuation.definition.string.begin.elixir' 1336 | 'comment': 'sigil (without interpolation)' 1337 | 'end': '\\"[a-z]*' 1338 | 'endCaptures': 1339 | '0': 1340 | 'name': 'punctuation.definition.string.end.elixir' 1341 | 'name': 'string.quoted.double.literal.elixir' 1342 | } 1343 | { 1344 | 'begin': '~[A-Z]\\|' 1345 | 'beginCaptures': 1346 | '0': 1347 | 'name': 'punctuation.definition.string.begin.elixir' 1348 | 'comment': 'sigil (without interpolation)' 1349 | 'end': '\\|[a-z]*' 1350 | 'endCaptures': 1351 | '0': 1352 | 'name': 'punctuation.definition.string.end.elixir' 1353 | 'name': 'string.quoted.double.literal.elixir' 1354 | } 1355 | 1356 | # Punctuation 1357 | { 1358 | 'begin': ':\'' 1359 | 'captures': 1360 | '0': 1361 | 'name': 'punctuation.definition.constant.elixir' 1362 | 'end': '\'' 1363 | 'name': 'constant.other.symbol.single-quoted.elixir' 1364 | 'patterns': [ 1365 | { 1366 | 'include': '#interpolated_elixir' 1367 | } 1368 | { 1369 | 'include': '#escaped_char' 1370 | } 1371 | ] 1372 | } 1373 | { 1374 | 'comment': """ 1375 | symbols with single-quoted string, used as keys in Keyword lists. 1376 | """ 1377 | 'match': "(')((?:[^'\\\\]*(?:\\\\.[^'\\\\]*)*))(':)(?!:)" 1378 | 'name': 'constant.other.symbol.single-quoted.elixir' 1379 | 'captures': { 1380 | '1': 1381 | 'name': 'punctuation.definition.constant.elixir' 1382 | '2': 1383 | 'patterns': [ 1384 | { 1385 | 'include': '#interpolated_elixir' 1386 | } 1387 | { 1388 | 'include': '#escaped_char' 1389 | } 1390 | ] 1391 | '3': 1392 | 'name': 'punctuation.definition.constant.elixir' 1393 | } 1394 | } 1395 | { 1396 | 'begin': ':"' 1397 | 'captures': 1398 | '0': 1399 | 'name': 'punctuation.definition.constant.elixir' 1400 | 'end': '"' 1401 | 'name': 'constant.other.symbol.double-quoted.elixir' 1402 | 'patterns': [ 1403 | { 1404 | 'include': '#interpolated_elixir' 1405 | } 1406 | { 1407 | 'include': '#escaped_char' 1408 | } 1409 | ] 1410 | } 1411 | { 1412 | 'begin': '(?>\'\'\')' 1413 | 'beginCaptures': 1414 | '0': 1415 | 'name': 'punctuation.definition.string.begin.elixir' 1416 | 'comment': 'Single-quoted heredocs' 1417 | 'end': '^\\s*\'\'\'' 1418 | 'endCaptures': 1419 | '0': 1420 | 'name': 'punctuation.definition.string.end.elixir' 1421 | 'name': 'support.function.variable.quoted.single.heredoc.elixir' 1422 | 'patterns': [ 1423 | { 1424 | 'include': '#interpolated_elixir' 1425 | } 1426 | { 1427 | 'include': '#escaped_char' 1428 | } 1429 | ] 1430 | } 1431 | { 1432 | 'begin': '\'' 1433 | 'beginCaptures': 1434 | '0': 1435 | 'name': 'punctuation.definition.string.begin.elixir' 1436 | 'comment': 'single quoted string (allows for interpolation)' 1437 | 'end': '\'' 1438 | 'endCaptures': 1439 | '0': 1440 | 'name': 'punctuation.definition.string.end.elixir' 1441 | 'name': 'support.function.variable.quoted.single.elixir' 1442 | 'patterns': [ 1443 | { 1444 | 'include': '#interpolated_elixir' 1445 | } 1446 | { 1447 | 'include': '#escaped_char' 1448 | } 1449 | ] 1450 | } 1451 | { 1452 | 'begin': '(?>""")' 1453 | 'beginCaptures': 1454 | '0': 1455 | 'name': 'punctuation.definition.string.begin.elixir' 1456 | 'comment': 'Double-quoted heredocs' 1457 | 'end': '^\\s*"""' 1458 | 'endCaptures': 1459 | '0': 1460 | 'name': 'punctuation.definition.string.end.elixir' 1461 | 'name': 'string.quoted.double.heredoc.elixir' 1462 | 'patterns': [ 1463 | { 1464 | 'include': '#interpolated_elixir' 1465 | } 1466 | { 1467 | 'include': '#escaped_char' 1468 | } 1469 | ] 1470 | } 1471 | { 1472 | 'comment': 'symbols defined by double-quoted string, used as keys in Keyword lists.' 1473 | 'match': '(")((?:[^"\\\\]*(?:\\\\.[^"\\\\]*)*))(":)(?!:)' 1474 | 'name': 'constant.other.symbol.double-quoted.elixir' 1475 | 'captures': { 1476 | '1': 1477 | 'name': 'punctuation.definition.constant.elixir' 1478 | '2': 1479 | 'patterns': [ 1480 | { 1481 | 'include': '#interpolated_elixir' 1482 | } 1483 | { 1484 | 'include': '#escaped_char' 1485 | } 1486 | ] 1487 | '3': 1488 | 'name': 'punctuation.definition.constant.elixir' 1489 | } 1490 | } 1491 | { 1492 | 'begin': '"' 1493 | 'beginCaptures': 1494 | '0': 1495 | 'name': 'punctuation.definition.string.begin.elixir' 1496 | 'comment': 'double quoted string (allows for interpolation)' 1497 | 'end': '"' 1498 | 'endCaptures': 1499 | '0': 1500 | 'name': 'punctuation.definition.string.end.elixir' 1501 | 'name': 'string.quoted.double.elixir' 1502 | 'patterns': [ 1503 | { 1504 | 'include': '#interpolated_elixir' 1505 | } 1506 | { 1507 | 'include': '#escaped_char' 1508 | } 1509 | ] 1510 | } 1511 | 1512 | # Comments 1513 | { 1514 | 'begin': '#' 1515 | 'beginCaptures': 1516 | '0': 1517 | 'name': 'punctuation.definition.comment.elixir' 1518 | 'end': '\\n' 1519 | 'name': 'comment.line.number-sign.elixir' 1520 | } 1521 | { 1522 | 'match': '\\b_([\\w]+[?!]?)' 1523 | 'name': 'unused.comment.elixir' 1524 | } 1525 | { 1526 | 'match': '\\b_\\b' 1527 | 'name': 'wildcard.comment.elixir' 1528 | } 1529 | { 1530 | 'comment': """ 1531 | matches questionmark-letters. 1532 | 1533 | examples (1st alternation = hex): 1534 | ?\\x1 ?\\x61 1535 | 1536 | examples (2nd alternation = octal): 1537 | ?\\0 ?\\07 ?\\017 1538 | 1539 | examples (3rd alternation = escaped): 1540 | ?\\n ?\\b 1541 | 1542 | examples (4th alternation = meta-ctrl): 1543 | ?\\C-a ?\\M-a ?\\C-\\M-\\C-\\M-a 1544 | 1545 | examples (4th alternation = normal): 1546 | ?a ?A ?0 1547 | ?* ?" ?( 1548 | ?. ?# 1549 | 1550 | 1551 | the negative lookbehind prevents against matching 1552 | p(42.tainted?) 1553 | """ 1554 | 'match': '(?>>|~~~)' 1560 | 'name': 'keyword.operator.bitwise.elixir' 1561 | } 1562 | { 1563 | 'comment': 'matches: | ++ -- ** \\ <- <> << >> :: .. |> => -> <|> <~> <~ <<~ ~> ~>>' 1564 | 'match': '\\+\\+|\\-\\-|\\*\\*|\\\\\\\\|\\<\\-|<\\<\\~|\\<\\>|\\<\\<|\\>\\>|\\:\\:|\\.\\.|\\|>|=>|<\\|\\>|<~>|->|~>>|~>|<~|(?=?|=~' 1569 | 'name': 'keyword.operator.comparison.elixir' 1570 | } 1571 | { 1572 | 'match': '(?<=[ \\t])!+|\\bnot\\b|&&|\\band\\b|\\|\\||\\bor\\b|\\bxor\\b' 1573 | 'name': 'keyword.operator.logical.elixir' 1574 | } 1575 | { 1576 | 'match': '(\\*|\\+|\\-|/)' 1577 | 'name': 'keyword.operator.arithmetic.elixir' 1578 | } 1579 | { 1580 | 'match': '=' 1581 | 'name': 'keyword.operator.assignment.elixir' 1582 | } 1583 | # TODO: Do we need this? 1584 | { 1585 | 'match': '\\;' 1586 | 'name': 'punctuation.separator.statement.elixir' 1587 | } 1588 | { 1589 | 'match': ',' 1590 | 'name': 'punctuation.separator.object.elixir' 1591 | } 1592 | { 1593 | 'match': '\\.' 1594 | 'name': 'punctuation.separator.method.elixir' 1595 | } 1596 | { 1597 | 'match': '\\{|\\}' 1598 | 'name': 'punctuation.section.scope.elixir' 1599 | } 1600 | { 1601 | 'match': '\\[\\]|\\[|\\]' 1602 | 'name': 'punctuation.section.array.elixir' 1603 | } 1604 | { 1605 | 'match': '\\(|\\)' 1606 | 'name': 'punctuation.section.function.elixir' 1607 | } 1608 | { 1609 | 'captures': 1610 | '1': 1611 | 'name': 'punctuation.definition.variable.elixir' 1612 | 'match': '(@)[a-zA-Z_]\\w*' 1613 | 'name': 'variable.other.readwrite.module.elixir' 1614 | } 1615 | { 1616 | 'captures': 1617 | '1': 1618 | 'name': 'punctuation.definition.variable.elixir' 1619 | 'match': '(&)\\d*' 1620 | 'name': 'variable.other.anonymous.elixir' 1621 | } 1622 | { 1623 | 'captures': 1624 | '1': 1625 | 'name': 'punctuation.definition.constant.elixir' 1626 | 'comment': 'symbols' 1627 | 'match': '(?[a-zA-Z_][\\w@]*(?>[?!]|=(?![>=]))?|\\<\\>|===?|!==?|<<>>|<<<|>>>|~~~|::|<\\-|\\|>|=>|~|~=|=|/|\\\\\\\\|\\*\\*?|\\.\\.?\\.?|>=?|<=?|&&?&?|\\+\\+?|\\-\\-?|\\|\\|?\\|?|\\!|@|\\%?\\{\\}|%|\\[\\]|\\^(\\^\\^)?)' 1628 | 'name': 'constant.other.symbol.elixir' 1629 | } 1630 | { 1631 | 'match': ':' 1632 | 'name': 'punctuation.separator.other.elixir' 1633 | } 1634 | ] 1635 | 'repository': 1636 | 'escaped_char': 1637 | 'match': '\\\\(?:[0-7]{1,3}|x[\\da-fA-F]{1,2}|.)' 1638 | 'name': 'constant.character.escape.elixir' 1639 | 'function_parameter': 1640 | 'match': '[_$a-z][$\\w]*[?!]?' 1641 | 'name': 'parameter.variable.function.elixir' 1642 | 'interpolated_elixir': 1643 | 'patterns': [ 1644 | { 1645 | 'captures': 1646 | '0': 1647 | 'name': 'punctuation.section.embedded.elixir' 1648 | '1': 1649 | 'name': 'source.elixir.embedded.source.empty' 1650 | 'match': '#\\{(\\})' 1651 | 'name': 'source.elixir.embedded.source' 1652 | } 1653 | { 1654 | 'begin': '#\\{' 1655 | 'captures': 1656 | '0': 1657 | 'name': 'punctuation.section.embedded.elixir' 1658 | 'end': '\\}' 1659 | 'name': 'source.elixir.embedded.source' 1660 | 'patterns': [ 1661 | { 1662 | 'include': '#nest_curly_and_self' 1663 | } 1664 | { 1665 | 'include': '$self' 1666 | } 1667 | ] 1668 | } 1669 | ] 1670 | 'nest_brackets': 1671 | 'begin': '\\[' 1672 | 'captures': 1673 | '0': 1674 | 'name': 'punctuation.section.scope.elixir' 1675 | 'end': '\\]' 1676 | 'patterns': [ 1677 | { 1678 | 'include': '#nest_brackets' 1679 | } 1680 | ] 1681 | 'nest_curly': 1682 | 'begin': '\\{' 1683 | 'captures': 1684 | '0': 1685 | 'name': 'punctuation.section.scope.elixir' 1686 | 'end': '\\}' 1687 | 'patterns': [ 1688 | { 1689 | 'include': '#nest_curly' 1690 | } 1691 | ] 1692 | 'nest_curly_and_self': 1693 | 'patterns': [ 1694 | { 1695 | 'begin': '\\{' 1696 | 'captures': 1697 | '0': 1698 | 'name': 'punctuation.section.scope.elixir' 1699 | 'end': '\\}' 1700 | 'patterns': [ 1701 | { 1702 | 'include': '#nest_curly_and_self' 1703 | } 1704 | ] 1705 | } 1706 | { 1707 | 'include': '$self' 1708 | } 1709 | ] 1710 | 'nest_ltgt': 1711 | 'begin': '\\<' 1712 | 'captures': 1713 | '0': 1714 | 'name': 'punctuation.section.scope.elixir' 1715 | 'end': '\\>' 1716 | 'patterns': [ 1717 | { 1718 | 'include': '#nest_ltgt' 1719 | } 1720 | ] 1721 | 'nest_parens': 1722 | 'begin': '\\(' 1723 | 'captures': 1724 | '0': 1725 | 'name': 'punctuation.section.scope.elixir' 1726 | 'end': '\\)' 1727 | 'patterns': [ 1728 | { 1729 | 'include': '#nest_parens' 1730 | } 1731 | ] 1732 | 'regex_sub': 1733 | 'name': 'string.interpolated.regexp.elixir' 1734 | 'patterns': [ 1735 | { 1736 | 'include': '#interpolated_elixir' 1737 | } 1738 | { 1739 | 'include': '#escaped_char' 1740 | } 1741 | { 1742 | 'name': 'string.regexp.arbitrary-repitition.elixir' 1743 | 'match': '(\\{)\\d+(,\\d+)?(\\})' 1744 | 'captures': 1745 | '1': 1746 | 'name': 'punctuation.definition.arbitrary-repitition.elixir' 1747 | '3': 1748 | 'name': 'punctuation.definition.arbitrary-repitition.elixir' 1749 | } 1750 | { 1751 | 'name': 'string.regexp.character-class.elixir' 1752 | 'begin': '\\[(?:\\^?\\])?' 1753 | 'end': '\\]' 1754 | 'captures': 1755 | '0': 1756 | 'name': 'punctuation.definition.character-class.elixir' 1757 | 'patterns': [ 1758 | { 1759 | 'include': '#escaped_char' 1760 | } 1761 | ] 1762 | } 1763 | { 1764 | 'begin': '\\(' 1765 | 'captures': 1766 | '0': 1767 | 'name': 'punctuation.definition.group.elixir' 1768 | 'end': '\\)' 1769 | 'name': 'string.regexp.group.elixir' 1770 | 'patterns': [ 1771 | { 1772 | 'include': '#regex_sub' 1773 | } 1774 | ] 1775 | } 1776 | { 1777 | 'begin': '(?<=^|\\s)(#)\\s(?=[[a-zA-Z0-9,. \\t?!-][^\\x{00}-\\x{7F}]]*$)' 1778 | 'beginCaptures': 1779 | '1': 1780 | 'name': 'punctuation.definition.comment.elixir' 1781 | 'comment': 'We are restrictive in what we allow to go after the comment character to avoid false positives, since the availability of comments depend on regexp flags.' 1782 | 'end': '$\\n?' 1783 | 'endCaptures': 1784 | '0': 1785 | 'name': 'punctuation.definition.comment.elixir' 1786 | 'name': 'comment.line.number-sign.elixir' 1787 | } 1788 | ] 1789 | 'scopeName': 'source.elixir' 1790 | -------------------------------------------------------------------------------- /grammars/html (eex).cson: -------------------------------------------------------------------------------- 1 | 'fileTypes': [ 2 | 'html.eex', 3 | 'html.leex', 4 | 'html.heex' 5 | ] 6 | 'foldingStartMarker': '(?x)\n\t\t(<(?i:head|body|table|thead|tbody|tfoot|tr|div|select|fieldset|style|script|ul|ol|form|dl)\\b.*?>\n\t\t|)\n\t\t|\\{\\s*($|\\?>\\s*$|//|/\\*(.*\\*/\\s*$|(?!.*?\\*/)))\n\t\t)' 7 | 'foldingStopMarker': '(?x)\n\t\t(\n\t\t|^\\s*-->\n\t\t|(^|\\s)\\}\n\t\t)' 8 | 'name': 'HTML (EEx)' 9 | 'patterns': [ 10 | { 11 | 'include': 'text.elixir' 12 | } 13 | { 14 | 'include': 'text.html.basic' 15 | } 16 | ] 17 | 'scopeName': 'text.html.elixir' 18 | -------------------------------------------------------------------------------- /grammars/xml (eex).cson: -------------------------------------------------------------------------------- 1 | 'fileTypes': [ 2 | 'xml.eex' 3 | ] 4 | 'name': 'XML (EEx)' 5 | 'patterns': [ 6 | { 7 | 'include': 'text.elixir' 8 | }, 9 | { 10 | 'include': 'text.xml' 11 | } 12 | ] 13 | 'scopeName': 'text.xml.elixir' 14 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Elixir language support for Atom.", 3 | "engines": { 4 | "atom": ">= 0.189.0" 5 | }, 6 | "license": "Apache 2.0", 7 | "name": "language-elixir", 8 | "repository": "https://github.com/elixir-lang/language-elixir", 9 | "version": "0.25.2" 10 | } 11 | -------------------------------------------------------------------------------- /settings/language-elixir.cson: -------------------------------------------------------------------------------- 1 | '.source.elixir': 2 | 'editor': 3 | 'commentStart': '# ' 4 | 'increaseIndentPattern': '(after|else|catch|rescue|fn|^.*(do|<\\-|\\->|\\{|\\[|\\=))\\s*$' 5 | 'decreaseIndentPattern': '^\\s*((\\}|\\])\\s*$|(after|else|catch|rescue|end)\\b)' 6 | -------------------------------------------------------------------------------- /snippets/language-elixir.cson: -------------------------------------------------------------------------------- 1 | '.source.elixir': 2 | 'callback': 3 | 'prefix': 'callback' 4 | 'body': '@callback ${1:function_name}($2) :: ${3:type}' 5 | 'case': 6 | 'prefix': 'case' 7 | 'body': 'case $1 do\n\t$0\nend' 8 | 'cond': 9 | 'prefix': 'cond' 10 | 'body': 'cond do\n\t$0\nend' 11 | 'def': 12 | 'prefix': 'def' 13 | 'body': 'def ${1:function_name} do\n\t$0\nend' 14 | 'defp': 15 | 'prefix': 'defp' 16 | 'body': 'defp ${1:function_name} do\n\t$0\nend' 17 | 'defguard': 18 | 'prefix': 'defguard' 19 | 'body': 'defguard ${1:guard_name}($2) when $3' 20 | 'defguardp': 21 | 'prefix': 'defguardp' 22 | 'body': 'defguardp ${1:guard_name}($2) when $3' 23 | 'defmacro': 24 | 'prefix': 'defmacro' 25 | 'body': 'defmacro ${1:macro_name} do\n\t$0\nend' 26 | 'defmacrop': 27 | 'prefix': 'defmacrop' 28 | 'body': 'defmacrop ${1:macro_name} do\n\t$0\nend' 29 | 'defmodule': 30 | 'prefix': 'defmodule' 31 | 'body': 'defmodule $1 do\n\t$0\nend' 32 | 'defstruct': 33 | 'prefix': 'defstruct' 34 | 'body': 'defstruct [$1]' 35 | 'doc': 36 | 'prefix': 'doc' 37 | 'body': '@doc """\n$0\n"""' 38 | 'do': 39 | 'prefix': 'do' 40 | 'body': 'do\n\t$0\nend' 41 | 'for': 42 | 'prefix': 'for' 43 | 'body': 'for $1 <- $2 do\n\t$3\nend' 44 | 'if': 45 | 'prefix': 'if' 46 | 'body': 'if $1 do\n\t$0\nend' 47 | 'if/else': 48 | 'prefix': 'ife' 49 | 'body': 'if $1 do\n\t$2\nelse\n\t$3\nend' 50 | 'inline def': 51 | 'prefix': 'idef' 52 | 'body': 'def $1, do: $2' 53 | 'inline defp': 54 | 'prefix': 'idefp' 55 | 'body': 'defp $1, do: $2' 56 | 'moduledoc': 57 | 'prefix': 'moduledoc' 58 | 'body': '@moduledoc """\n$0\n"""' 59 | 'moduletag': 60 | 'prefix': 'moduletag' 61 | 'body': '@moduletag ${1::${2:your_tag}}' 62 | 'tag': 63 | 'prefix': 'tag' 64 | 'body': '@tag ${1::${2:your_tag}}' 65 | 'defprotocol': 66 | 'prefix': 'defpro' 67 | 'body': 'defprotocol $1 do\n\t$0\nend' 68 | 'defimpl': 69 | 'prefix': 'defi' 70 | 'body': 'defimpl ${1:protocol}, for: ${2:type} do\n\t$0\nend' 71 | 'defoverridable': 72 | 'prefix': 'defover' 73 | 'body': 'defoverridable [${1:function_name}: ${2:arity}]' 74 | 'IEx.pry': 75 | 'prefix': 'pry' 76 | 'body': 'require IEx\nIEx.pry($0)' 77 | 'IO.inspect': 78 | 'prefix': 'ii' 79 | 'body': 'IO.inspect($0)' 80 | 'IO.puts': 81 | 'prefix': 'ip' 82 | 'body': 'IO.puts($0)' 83 | 'Logger.debug': 84 | 'prefix': 'deb' 85 | 'body': 'Logger.debug "$0"' 86 | 'Logger.debug inspect': 87 | 'prefix': 'debi' 88 | 'body': 'Logger.debug "\#{inspect $0}"' 89 | 'spec': 90 | 'prefix': 'spec' 91 | 'body': '@spec ${1:function_name}($2) :: ${3:type}' 92 | 93 | 'describe': 94 | 'prefix': 'describe' 95 | 'body': """ 96 | describe "${1:tests}" do 97 | $2 98 | end 99 | """ 100 | 101 | 'setup': 102 | 'prefix': 'setup' 103 | 'body': """ 104 | setup ${1:%{$2\\}} do 105 | $3 106 | end 107 | """ 108 | 109 | 'inline setup': 110 | 'prefix': 'isetup' 111 | 'body': 'setup [$1]' 112 | 113 | 'test': 114 | 'prefix': 'test' 115 | 'body': 'test "$1" do\n\t$0\nend' 116 | 'type': 117 | 'prefix': 'type' 118 | 'body': '@type ${1:type_name} :: ${2:type}' 119 | 'typedoc': 120 | 'prefix': 'typedoc' 121 | 'body': '@typedoc """\n$0\n"""' 122 | 'fn': 123 | 'prefix': 'fn' 124 | 'body': 'fn($1) -> ${2:...} end' 125 | 'with': 126 | 'prefix': 'with' 127 | 'body': """ 128 | with $1 <- $2, 129 | $3 <- $4 do 130 | $5 131 | else 132 | _ -> nil 133 | end 134 | """ 135 | 'todo': 136 | 'prefix': 'todo' 137 | 'body': '# TODO: $0' 138 | 'fixme': 139 | 'prefix': 'fix' 140 | 'body': '# FIXME: $0' 141 | 'xxx': 142 | 'prefix': 'xxx' 143 | 'body': '# XXX: $0' 144 | 'idea': 145 | 'prefix': 'idea' 146 | 'body': '# IDEA: $0' 147 | 'hack': 148 | 'prefix': 'hack' 149 | 'body': '# HACK: $0' 150 | 'note': 151 | 'prefix': 'note' 152 | 'body': '# NOTE: $0' 153 | 'review': 154 | 'prefix': 'review' 155 | 'body': '# REVIEW: $0' 156 | 'bug': 157 | 'prefix': 'bug' 158 | 'body': '# BUG: $0' 159 | 'question': 160 | 'prefix': 'question' 161 | 'body': '# QUESTION: $0' 162 | '.text.elixir': 163 | '<% inline %>': 164 | 'prefix': '%' 165 | 'body': '<% $0 %>' 166 | '<%= replace %>': 167 | 'prefix': '%=' 168 | 'body': '<%= $0 %>' 169 | '<%% quotation %>': 170 | 'prefix': '%%' 171 | 'body': '<%% $0 %>' 172 | '<%# comment %>': 173 | 'prefix': '%#' 174 | 'body': '<%# $0 %>' 175 | 'if': 176 | 'prefix': 'if' 177 | 'body': '<%= if ${1:true} do %>\n\t$0\n<% end %>' 178 | 'if/else': 179 | 'prefix': 'ife' 180 | 'body': '<%= if ${1:true} do %>\n\t$2\n<% else %>\n\t$0\n<% end %>' 181 | 'for': 182 | 'prefix': 'for' 183 | 'body': '<%= for ${1:el} <- ${2:list} do %>\n\t$0\n<% end %>' 184 | 'todo': 185 | 'prefix': 'todo' 186 | 'body': '<%# TODO: $0 %>' 187 | 'fixme': 188 | 'prefix': 'fix' 189 | 'body': '<%# FIXME: $0 %>' 190 | 'xxx': 191 | 'prefix': 'xxx' 192 | 'body': '<%# XXX: $0 %>' 193 | 'idea': 194 | 'prefix': 'idea' 195 | 'body': '<%# IDEA: $0 %>' 196 | 'hack': 197 | 'prefix': 'hack' 198 | 'body': '<%# HACK: $0 %>' 199 | 'note': 200 | 'prefix': 'note' 201 | 'body': '<%# NOTE: $0 %>' 202 | 'review': 203 | 'prefix': 'review' 204 | 'body': '<%# REVIEW: $0 %>' 205 | 'bug': 206 | 'prefix': 'bug' 207 | 'body': '<%# BUG: $0 %>' 208 | 'question': 209 | 'prefix': 'question' 210 | 'body': '<%# QUESTION: $0 %>' 211 | -------------------------------------------------------------------------------- /spec/elixir-spec.coffee: -------------------------------------------------------------------------------- 1 | describe "Elixir grammar", -> 2 | grammar = null 3 | 4 | beforeEach -> 5 | waitsForPromise -> 6 | atom.packages.activatePackage("language-elixir") 7 | 8 | runs -> 9 | grammar = atom.grammars.grammarForScopeName("source.elixir") 10 | 11 | it "parses the grammar", -> 12 | expect(grammar).toBeTruthy() 13 | expect(grammar.scopeName).toBe "source.elixir" 14 | 15 | it "tokenizes underscore variables as comments", -> 16 | {tokens} = grammar.tokenizeLine('_some_variable?') 17 | expect(tokens[0]).toEqual value: '_some_variable?', scopes: ['source.elixir', 'unused.comment.elixir'] 18 | 19 | {tokens} = grammar.tokenizeLine('some_variable') 20 | expect(tokens[0]).toEqual value: 'some_variable', scopes: ['source.elixir'] 21 | 22 | it "tokenizes underscore as wildcard variable", -> 23 | {tokens} = grammar.tokenizeLine('this _ other_thing') 24 | expect(tokens[0]).not.toEqual value: 'this ', scopes: ['source.elixir', 'wildcard.comment.elixir'] 25 | expect(tokens[1]).toEqual value: '_', scopes: ['source.elixir', 'wildcard.comment.elixir'] 26 | expect(tokens[2]).not.toEqual value: ' other_thing', scopes: ['source.elixir', 'wildcard.comment.elixir'] 27 | 28 | {tokens} = grammar.tokenizeLine('some_variable') 29 | expect(tokens[0]).toEqual value: 'some_variable', scopes: ['source.elixir'] 30 | 31 | it "tokenizes bitwise operators", -> 32 | {tokens} = grammar.tokenizeLine('left &&& right') 33 | expect(tokens[1]).toEqual value: '&&&', scopes: ['source.elixir', 'keyword.operator.bitwise.elixir'] 34 | 35 | {tokens} = grammar.tokenizeLine('left >>> right') 36 | expect(tokens[1]).toEqual value: '>>>', scopes: ['source.elixir', 'keyword.operator.bitwise.elixir'] 37 | 38 | {tokens} = grammar.tokenizeLine('left <<< right') 39 | expect(tokens[1]).toEqual value: '<<<', scopes: ['source.elixir', 'keyword.operator.bitwise.elixir'] 40 | 41 | {tokens} = grammar.tokenizeLine('left ^^^ right') 42 | expect(tokens[1]).toEqual value: '^^^', scopes: ['source.elixir', 'keyword.operator.bitwise.elixir'] 43 | 44 | {tokens} = grammar.tokenizeLine('left ||| right') 45 | expect(tokens[1]).toEqual value: '|||', scopes: ['source.elixir', 'keyword.operator.bitwise.elixir'] 46 | 47 | {tokens} = grammar.tokenizeLine('~~~exp') 48 | expect(tokens[0]).toEqual value: '~~~', scopes: ['source.elixir', 'keyword.operator.bitwise.elixir'] 49 | 50 | it "tokenizes comparison operators", -> 51 | {tokens} = grammar.tokenizeLine('left === right') 52 | expect(tokens[1]).toEqual value: '===', scopes: ['source.elixir', 'keyword.operator.comparison.elixir'] 53 | 54 | {tokens} = grammar.tokenizeLine('left == right') 55 | expect(tokens[1]).toEqual value: '==', scopes: ['source.elixir', 'keyword.operator.comparison.elixir'] 56 | 57 | {tokens} = grammar.tokenizeLine('left != right') 58 | expect(tokens[1]).toEqual value: '!=', scopes: ['source.elixir', 'keyword.operator.comparison.elixir'] 59 | 60 | {tokens} = grammar.tokenizeLine('left !== right') 61 | expect(tokens[1]).toEqual value: '!==', scopes: ['source.elixir', 'keyword.operator.comparison.elixir'] 62 | 63 | {tokens} = grammar.tokenizeLine('left <= right') 64 | expect(tokens[1]).toEqual value: '<=', scopes: ['source.elixir', 'keyword.operator.comparison.elixir'] 65 | 66 | {tokens} = grammar.tokenizeLine('left >= right') 67 | expect(tokens[1]).toEqual value: '>=', scopes: ['source.elixir', 'keyword.operator.comparison.elixir'] 68 | 69 | {tokens} = grammar.tokenizeLine('left =~ right') 70 | expect(tokens[1]).toEqual value: '=~', scopes: ['source.elixir', 'keyword.operator.comparison.elixir'] 71 | 72 | it "tokenizes logical operators", -> 73 | {tokens} = grammar.tokenizeLine('left || right') 74 | expect(tokens[1]).toEqual value: '||', scopes: ['source.elixir', 'keyword.operator.logical.elixir'] 75 | 76 | {tokens} = grammar.tokenizeLine('left && right') 77 | expect(tokens[1]).toEqual value: '&&', scopes: ['source.elixir', 'keyword.operator.logical.elixir'] 78 | 79 | it "tokenizes other operators", -> 80 | {tokens} = grammar.tokenizeLine('left |> right') 81 | expect(tokens[1]).toEqual value: '|>', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 82 | 83 | {tokens} = grammar.tokenizeLine('left | right') 84 | expect(tokens[1]).toEqual value: '|', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 85 | 86 | {tokens} = grammar.tokenizeLine('left ++ right') 87 | expect(tokens[1]).toEqual value: '++', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 88 | 89 | {tokens} = grammar.tokenizeLine('left -- right') 90 | expect(tokens[1]).toEqual value: '--', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 91 | 92 | {tokens} = grammar.tokenizeLine('left \\\\ right') 93 | expect(tokens[1]).toEqual value: '\\\\', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 94 | 95 | {tokens} = grammar.tokenizeLine('left <- right') 96 | expect(tokens[1]).toEqual value: '<-', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 97 | 98 | {tokens} = grammar.tokenizeLine('left <> right') 99 | expect(tokens[1]).toEqual value: '<>', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 100 | 101 | {tokens} = grammar.tokenizeLine('left :: right') 102 | expect(tokens[1]).toEqual value: '::', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 103 | 104 | {tokens} = grammar.tokenizeLine('left .. right') 105 | expect(tokens[1]).toEqual value: '..', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 106 | 107 | {tokens} = grammar.tokenizeLine('left => right') 108 | expect(tokens[1]).toEqual value: '=>', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 109 | 110 | {tokens} = grammar.tokenizeLine('left -> right') 111 | expect(tokens[1]).toEqual value: '->', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 112 | 113 | {tokens} = grammar.tokenizeLine('left <<~ right') 114 | expect(tokens[1]).toEqual value: '<<~', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 115 | 116 | {tokens} = grammar.tokenizeLine('left <~ right') 117 | expect(tokens[1]).toEqual value: '<~', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 118 | 119 | {tokens} = grammar.tokenizeLine('left ~>> right') 120 | expect(tokens[1]).toEqual value: '~>>', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 121 | 122 | {tokens} = grammar.tokenizeLine('left ~> right') 123 | expect(tokens[1]).toEqual value: '~>', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 124 | 125 | {tokens} = grammar.tokenizeLine('left <~> right') 126 | expect(tokens[1]).toEqual value: '<~>', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 127 | 128 | {tokens} = grammar.tokenizeLine('left <|> right') 129 | expect(tokens[1]).toEqual value: '<|>', scopes: ['source.elixir', 'keyword.operator.other.elixir'] 130 | 131 | it "tokenizes arrays", -> 132 | {tokens} = grammar.tokenizeLine('[]') 133 | expect(tokens[0]).toEqual value: '[]', scopes: ['source.elixir', 'punctuation.section.array.elixir'] 134 | 135 | {tokens} = grammar.tokenizeLine('[1,2,3]') 136 | expect(tokens[0]).toEqual value: '[', scopes: ['source.elixir', 'punctuation.section.array.elixir'] 137 | expect(tokens[6]).toEqual value: ']', scopes: ['source.elixir', 'punctuation.section.array.elixir'] 138 | 139 | it "tokenizes symbols", -> 140 | {tokens} = grammar.tokenizeLine(':erlang.system_info') 141 | expect(tokens[0]).toEqual value: ':', scopes: ['source.elixir', 'constant.other.symbol.elixir', 'punctuation.definition.constant.elixir'] 142 | expect(tokens[1]).toEqual value: 'erlang', scopes: ['source.elixir', 'constant.other.symbol.elixir'] 143 | 144 | {tokens} = grammar.tokenizeLine('size: 0') 145 | expect(tokens[0]).toEqual value: 'size', scopes: ['source.elixir', 'constant.other.symbol.elixir'] 146 | expect(tokens[1]).toEqual value: ':', scopes: ['source.elixir', 'constant.other.symbol.elixir', 'punctuation.definition.constant.elixir'] 147 | 148 | {tokens} = grammar.tokenizeLine('size: :erlang.system_info') 149 | expect(tokens[0]).toEqual value: 'size', scopes: ['source.elixir', 'constant.other.symbol.elixir'] 150 | expect(tokens[1]).toEqual value: ':', scopes: ['source.elixir', 'constant.other.symbol.elixir', 'punctuation.definition.constant.elixir'] 151 | expect(tokens[3]).toEqual value: ':', scopes: ['source.elixir', 'constant.other.symbol.elixir', 'punctuation.definition.constant.elixir'] 152 | expect(tokens[4]).toEqual value: 'erlang', scopes: ['source.elixir', 'constant.other.symbol.elixir'] 153 | 154 | {tokens} = grammar.tokenizeLine('case: case') 155 | expect(tokens[0]).toEqual value: 'case', scopes: ['source.elixir', 'constant.other.symbol.elixir'] 156 | expect(tokens[1]).toEqual value: ':', scopes: ['source.elixir', 'constant.other.symbol.elixir', 'punctuation.definition.constant.elixir'] 157 | 158 | {tokens} = grammar.tokenizeLine(':"symbol"') 159 | expect(tokens[0]).toEqual value: ':"', scopes: ['source.elixir', 'constant.other.symbol.double-quoted.elixir', 'punctuation.definition.constant.elixir'] 160 | expect(tokens[1]).toEqual value: 'symbol', scopes: ['source.elixir', 'constant.other.symbol.double-quoted.elixir'] 161 | expect(tokens[2]).toEqual value: '"', scopes: ['source.elixir', 'constant.other.symbol.double-quoted.elixir', 'punctuation.definition.constant.elixir'] 162 | 163 | {tokens} = grammar.tokenizeLine('"symbol as key":') 164 | expect(tokens[0]).toEqual value: '"', scopes: ['source.elixir', 'constant.other.symbol.double-quoted.elixir', 'punctuation.definition.constant.elixir'] 165 | expect(tokens[1]).toEqual value: 'symbol as key', scopes: ['source.elixir', 'constant.other.symbol.double-quoted.elixir'] 166 | expect(tokens[2]).toEqual value: '":', scopes: ['source.elixir', 'constant.other.symbol.double-quoted.elixir', 'punctuation.definition.constant.elixir'] 167 | 168 | {tokens} = grammar.tokenizeLine('"symbol as key with unescaped " inside":') 169 | expect(tokens[0]).toEqual value: '"', scopes: ['source.elixir', 'string.quoted.double.elixir', 'punctuation.definition.string.begin.elixir'] 170 | expect(tokens[1]).toEqual value: 'symbol as key with unescaped ', scopes: ['source.elixir', 'string.quoted.double.elixir'] 171 | expect(tokens[2]).toEqual value: '"', scopes: ['source.elixir', 'string.quoted.double.elixir', 'punctuation.definition.string.end.elixir'] 172 | expect(tokens[3]).toEqual value: ' inside', scopes: ['source.elixir'] 173 | expect(tokens[4]).toEqual value: '"', scopes: ['source.elixir', 'string.quoted.double.elixir', 'punctuation.definition.string.begin.elixir'] 174 | expect(tokens[5]).toEqual value: ':', scopes: ['source.elixir', 'string.quoted.double.elixir'] 175 | 176 | {tokens} = grammar.tokenizeLine('"symbol as key with escaped \\" inside":') 177 | expect(tokens[1]).toEqual value: 'symbol as key with escaped ', scopes: ['source.elixir', 'constant.other.symbol.double-quoted.elixir'] 178 | expect(tokens[2]).toEqual value: '\\"', scopes: ['source.elixir', 'constant.other.symbol.double-quoted.elixir', 'constant.character.escape.elixir'] 179 | expect(tokens[3]).toEqual value: ' inside', scopes: ['source.elixir', 'constant.other.symbol.double-quoted.elixir'] 180 | expect(tokens[4]).toEqual value: '":', scopes: ['source.elixir', 'constant.other.symbol.double-quoted.elixir', 'punctuation.definition.constant.elixir'] 181 | 182 | {tokens} = grammar.tokenizeLine("'charlist as key':") 183 | expect(tokens[0]).toEqual value: "'", scopes: ['source.elixir', 'constant.other.symbol.single-quoted.elixir', 'punctuation.definition.constant.elixir'] 184 | expect(tokens[1]).toEqual value: 'charlist as key', scopes: ['source.elixir', 'constant.other.symbol.single-quoted.elixir'] 185 | expect(tokens[2]).toEqual value: "':", scopes: ['source.elixir', 'constant.other.symbol.single-quoted.elixir', 'punctuation.definition.constant.elixir'] 186 | 187 | {tokens} = grammar.tokenizeLine("'charlist as key with escaped \\' inside':") 188 | expect(tokens[0]).toEqual value: "'", scopes: ['source.elixir', 'constant.other.symbol.single-quoted.elixir', 'punctuation.definition.constant.elixir'] 189 | expect(tokens[1]).toEqual value: 'charlist as key with escaped ', scopes: ['source.elixir', 'constant.other.symbol.single-quoted.elixir'] 190 | expect(tokens[2]).toEqual value: "\\'", scopes: ['source.elixir', 'constant.other.symbol.single-quoted.elixir', 'constant.character.escape.elixir'] 191 | expect(tokens[3]).toEqual value: " inside", scopes: ['source.elixir', 'constant.other.symbol.single-quoted.elixir'] 192 | expect(tokens[4]).toEqual value: "':", scopes: ['source.elixir', 'constant.other.symbol.single-quoted.elixir', 'punctuation.definition.constant.elixir'] 193 | 194 | it "tokenizes comments", -> 195 | {tokens} = grammar.tokenizeLine("# TODO: stuff") 196 | expect(tokens[0]).toEqual value: '#', scopes: ['source.elixir', 'comment.line.number-sign.elixir', 'punctuation.definition.comment.elixir'] 197 | expect(tokens[1]).toEqual value: ' TODO: stuff', scopes: ['source.elixir', 'comment.line.number-sign.elixir'] 198 | 199 | it "tokenizes do's", -> 200 | {tokens} = grammar.tokenizeLine("do") 201 | expect(tokens[0]).toEqual value: 'do', scopes: ['source.elixir', 'keyword.control.elixir'] 202 | 203 | it "tokenizes interpolated regex sigils", -> 204 | {tokens} = grammar.tokenizeLine('~r/test #{foo}/') 205 | expect(tokens[0]).toEqual value: '~r/', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.begin.elixir'] 206 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'string.regexp.interpolated.elixir'] 207 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 208 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source'] 209 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 210 | expect(tokens[5]).toEqual value: '/', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.end.elixir'] 211 | 212 | {tokens} = grammar.tokenizeLine('~r|test #{foo}|') 213 | expect(tokens[0]).toEqual value: '~r|', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.begin.elixir'] 214 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'string.regexp.interpolated.elixir'] 215 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 216 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source'] 217 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 218 | expect(tokens[5]).toEqual value: '|', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.end.elixir'] 219 | 220 | {tokens} = grammar.tokenizeLine('~r"test #{foo}"') 221 | expect(tokens[0]).toEqual value: '~r"', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.begin.elixir'] 222 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'string.regexp.interpolated.elixir'] 223 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 224 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source'] 225 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 226 | expect(tokens[5]).toEqual value: '"', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.end.elixir'] 227 | 228 | {tokens} = grammar.tokenizeLine('~r\'test #{foo}\'') 229 | expect(tokens[0]).toEqual value: "~r'", scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.begin.elixir'] 230 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'string.regexp.interpolated.elixir'] 231 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 232 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source'] 233 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 234 | expect(tokens[5]).toEqual value: "'", scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.end.elixir'] 235 | 236 | {tokens} = grammar.tokenizeLine('~r(test #{foo})') 237 | expect(tokens[0]).toEqual value: '~r(', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.begin.elixir'] 238 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'string.regexp.interpolated.elixir'] 239 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 240 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source'] 241 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 242 | expect(tokens[5]).toEqual value: ')', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.end.elixir'] 243 | 244 | {tokens} = grammar.tokenizeLine('~r[test #{foo}]') 245 | expect(tokens[0]).toEqual value: '~r[', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.begin.elixir'] 246 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'string.regexp.interpolated.elixir'] 247 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 248 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source'] 249 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 250 | expect(tokens[5]).toEqual value: ']', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.end.elixir'] 251 | 252 | {tokens} = grammar.tokenizeLine('~r{test #{foo}}') 253 | expect(tokens[0]).toEqual value: '~r{', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.begin.elixir'] 254 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'string.regexp.interpolated.elixir'] 255 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 256 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source'] 257 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 258 | expect(tokens[5]).toEqual value: '}', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.end.elixir'] 259 | 260 | {tokens} = grammar.tokenizeLine('~r') 261 | expect(tokens[0]).toEqual value: '~r<', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.begin.elixir'] 262 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'string.regexp.interpolated.elixir'] 263 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 264 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source'] 265 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 266 | expect(tokens[5]).toEqual value: '>', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.end.elixir'] 267 | 268 | {tokens} = grammar.tokenizeLine('~r"""test #{foo}"""') 269 | expect(tokens[0]).toEqual value: '~r"""', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.begin.elixir'] 270 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'string.regexp.interpolated.elixir'] 271 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 272 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source'] 273 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 274 | expect(tokens[5]).toEqual value: '"""', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.end.elixir'] 275 | 276 | {tokens} = grammar.tokenizeLine('~r\'\'\'test #{foo}\'\'\'') 277 | expect(tokens[0]).toEqual value: '~r\'\'\'', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.begin.elixir'] 278 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'string.regexp.interpolated.elixir'] 279 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 280 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source'] 281 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 282 | expect(tokens[5]).toEqual value: '\'\'\'', scopes: ['source.elixir', 'string.regexp.interpolated.elixir', 'punctuation.section.regexp.end.elixir'] 283 | 284 | it "tokenizes literal regex sigils", -> 285 | {tokens} = grammar.tokenizeLine('~R/test #{foo}/') 286 | expect(tokens[0]).toEqual value: '~R/', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.begin.elixir'] 287 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'string.regexp.literal.elixir'] 288 | expect(tokens[2]).toEqual value: '/', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.end.elixir'] 289 | 290 | {tokens} = grammar.tokenizeLine('~R|test #{foo}|') 291 | expect(tokens[0]).toEqual value: '~R|', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.begin.elixir'] 292 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'string.regexp.literal.elixir'] 293 | expect(tokens[2]).toEqual value: '|', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.end.elixir'] 294 | 295 | {tokens} = grammar.tokenizeLine('~R{test #{foo}}') 296 | expect(tokens[0]).toEqual value: '~R{', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.begin.elixir'] 297 | expect(tokens[1]).toEqual value: 'test #', scopes: ['source.elixir', 'string.regexp.literal.elixir'] 298 | expect(tokens[2]).toEqual value : '{', scopes : ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.scope.elixir'] 299 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'string.regexp.literal.elixir'] 300 | expect(tokens[4]).toEqual value : '}', scopes : ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.scope.elixir'] 301 | expect(tokens[5]).toEqual value: '}', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.end.elixir'] 302 | 303 | {tokens} = grammar.tokenizeLine('~R[test #{foo}]') 304 | expect(tokens[0]).toEqual value: '~R[', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.begin.elixir'] 305 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'string.regexp.literal.elixir'] 306 | expect(tokens[2]).toEqual value: ']', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.end.elixir'] 307 | 308 | {tokens} = grammar.tokenizeLine('~R(test #{foo})') 309 | expect(tokens[0]).toEqual value: '~R(', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.begin.elixir'] 310 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'string.regexp.literal.elixir'] 311 | expect(tokens[2]).toEqual value: ')', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.end.elixir'] 312 | 313 | {tokens} = grammar.tokenizeLine('~R') 314 | expect(tokens[0]).toEqual value: '~R<', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.begin.elixir'] 315 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'string.regexp.literal.elixir'] 316 | expect(tokens[2]).toEqual value: '>', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.end.elixir'] 317 | 318 | {tokens} = grammar.tokenizeLine('~R"test #{foo}"') 319 | expect(tokens[0]).toEqual value: '~R"', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.begin.elixir'] 320 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'string.regexp.literal.elixir'] 321 | expect(tokens[2]).toEqual value: '"', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.end.elixir'] 322 | 323 | {tokens} = grammar.tokenizeLine('~R\'test #{foo}\'') 324 | expect(tokens[0]).toEqual value: '~R\'', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.begin.elixir'] 325 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'string.regexp.literal.elixir'] 326 | expect(tokens[2]).toEqual value: '\'', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.end.elixir'] 327 | 328 | {tokens} = grammar.tokenizeLine('~R"""test #{foo}"""') 329 | expect(tokens[0]).toEqual value: '~R"""', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.begin.elixir'] 330 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'string.regexp.literal.elixir'] 331 | expect(tokens[2]).toEqual value: '"""', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.end.elixir'] 332 | 333 | {tokens} = grammar.tokenizeLine('~R\'\'\'test #{foo}\'\'\'') 334 | expect(tokens[0]).toEqual value: '~R\'\'\'', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.begin.elixir'] 335 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'string.regexp.literal.elixir'] 336 | expect(tokens[2]).toEqual value: '\'\'\'', scopes: ['source.elixir', 'string.regexp.literal.elixir', 'punctuation.section.regexp.end.elixir'] 337 | 338 | it "tokenizes interpolated character lists", -> 339 | {tokens} = grammar.tokenizeLine('~c(test #{foo})') 340 | expect(tokens[0]).toEqual value: '~c(', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 341 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 342 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 343 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source'] 344 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 345 | expect(tokens[5]).toEqual value: ')', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 346 | 347 | {tokens} = grammar.tokenizeLine('~c/test #{foo}/') 348 | expect(tokens[0]).toEqual value: '~c/', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 349 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 350 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 351 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source'] 352 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 353 | expect(tokens[5]).toEqual value: '/', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 354 | 355 | {tokens} = grammar.tokenizeLine('~c[test #{foo}]') 356 | expect(tokens[0]).toEqual value: '~c[', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 357 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 358 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 359 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source'] 360 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 361 | expect(tokens[5]).toEqual value: ']', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 362 | 363 | {tokens} = grammar.tokenizeLine('~c{test #{foo}}') 364 | expect(tokens[0]).toEqual value: '~c{', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 365 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 366 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 367 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source'] 368 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 369 | expect(tokens[5]).toEqual value: '}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 370 | 371 | {tokens} = grammar.tokenizeLine('~c') 372 | expect(tokens[0]).toEqual value: '~c<', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 373 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 374 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 375 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source'] 376 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 377 | expect(tokens[5]).toEqual value: '>', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 378 | 379 | {tokens} = grammar.tokenizeLine('~c"test #{foo}"') 380 | expect(tokens[0]).toEqual value: '~c"', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 381 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 382 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 383 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source'] 384 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 385 | expect(tokens[5]).toEqual value: '"', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 386 | 387 | {tokens} = grammar.tokenizeLine('~c\'test #{foo}\'') 388 | expect(tokens[0]).toEqual value: '~c\'', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 389 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 390 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 391 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source'] 392 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 393 | expect(tokens[5]).toEqual value: '\'', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 394 | 395 | {tokens} = grammar.tokenizeLine('~c|test #{foo}|') 396 | expect(tokens[0]).toEqual value: '~c|', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 397 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 398 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 399 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source'] 400 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 401 | expect(tokens[5]).toEqual value: '|', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 402 | 403 | {tokens} = grammar.tokenizeLine('~c"""test #{foo}"""') 404 | expect(tokens[0]).toEqual value: '~c"""', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 405 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 406 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 407 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source'] 408 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 409 | expect(tokens[5]).toEqual value: '"""', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 410 | 411 | {tokens} = grammar.tokenizeLine('~c\'\'\'test #{foo}\'\'\'') 412 | expect(tokens[0]).toEqual value: '~c\'\'\'', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 413 | expect(tokens[1]).toEqual value: 'test ', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 414 | expect(tokens[2]).toEqual value: '#{', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 415 | expect(tokens[3]).toEqual value: 'foo', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source'] 416 | expect(tokens[4]).toEqual value: '}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 417 | expect(tokens[5]).toEqual value: '\'\'\'', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 418 | 419 | it "tokenizes Literal character lists", -> 420 | {tokens} = grammar.tokenizeLine('~C(test #{foo})') 421 | expect(tokens[0]).toEqual value: '~C(', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 422 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 423 | expect(tokens[2]).toEqual value: ')', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 424 | 425 | {tokens} = grammar.tokenizeLine('~C[test #{foo}]') 426 | expect(tokens[0]).toEqual value: '~C[', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 427 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 428 | expect(tokens[2]).toEqual value: ']', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 429 | 430 | {tokens} = grammar.tokenizeLine('~C{test #{foo}') 431 | expect(tokens[0]).toEqual value: '~C{', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 432 | expect(tokens[1]).toEqual value: 'test #{foo', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 433 | expect(tokens[2]).toEqual value: '}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 434 | 435 | {tokens} = grammar.tokenizeLine('~C/test #{foo}/') 436 | expect(tokens[0]).toEqual value: '~C/', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 437 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 438 | expect(tokens[2]).toEqual value: '/', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 439 | 440 | {tokens} = grammar.tokenizeLine('~C\'test #{foo}\'') 441 | expect(tokens[0]).toEqual value: '~C\'', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 442 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 443 | expect(tokens[2]).toEqual value: '\'', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 444 | 445 | {tokens} = grammar.tokenizeLine('~C') 446 | expect(tokens[0]).toEqual value: '~C<', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 447 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 448 | expect(tokens[2]).toEqual value: '>', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 449 | 450 | {tokens} = grammar.tokenizeLine('~C|test #{foo}|') 451 | expect(tokens[0]).toEqual value: '~C|', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 452 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 453 | expect(tokens[2]).toEqual value: '|', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 454 | 455 | {tokens} = grammar.tokenizeLine('~C"test #{foo}"') 456 | expect(tokens[0]).toEqual value: '~C"', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 457 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 458 | expect(tokens[2]).toEqual value: '"', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 459 | 460 | {tokens} = grammar.tokenizeLine('~C"""test #{foo}"""') 461 | expect(tokens[0]).toEqual value: '~C"""', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 462 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 463 | expect(tokens[2]).toEqual value: '"""', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 464 | 465 | {tokens} = grammar.tokenizeLine('~C\'\'\'test #{foo}\'\'\'') 466 | expect(tokens[0]).toEqual value: '~C\'\'\'', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.begin.elixir'] 467 | expect(tokens[1]).toEqual value: 'test #{foo}', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir'] 468 | expect(tokens[2]).toEqual value: '\'\'\'', scopes: ['source.elixir', 'support.function.variable.quoted.single.elixir', 'punctuation.definition.string.end.elixir'] 469 | 470 | it "tokenizes ExUnit macros", -> 471 | {tokens} = grammar.tokenizeLine("describe 'some description' do") 472 | expect(tokens[0]).toEqual value: 'describe', scopes: ['source.elixir', 'keyword.control.elixir'] 473 | 474 | {tokens} = grammar.tokenizeLine("test 'some assertion' do") 475 | expect(tokens[0]).toEqual value: 'test', scopes: ['source.elixir', 'keyword.control.elixir'] 476 | 477 | describe "word lists", -> 478 | it "tokenizes interpolated word lists", -> 479 | {tokens} = grammar.tokenizeLine('~w"#{foo} bar"') 480 | expect(tokens[0]).toEqual value: '~w"', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.begin.elixir'] 481 | expect(tokens[1]).toEqual value: '#{', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 482 | expect(tokens[2]).toEqual value: 'foo', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source'] 483 | expect(tokens[3]).toEqual value: '}', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 484 | expect(tokens[4]).toEqual value: ' bar', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir'] 485 | expect(tokens[5]).toEqual value: '"', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.end.elixir'] 486 | 487 | {tokens} = grammar.tokenizeLine('~w[#{foo} bar]') 488 | expect(tokens[0]).toEqual value: '~w[', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.begin.elixir'] 489 | expect(tokens[1]).toEqual value: '#{', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 490 | expect(tokens[2]).toEqual value: 'foo', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source'] 491 | expect(tokens[3]).toEqual value: '}', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 492 | expect(tokens[4]).toEqual value: ' bar', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir'] 493 | expect(tokens[5]).toEqual value: ']', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.end.elixir'] 494 | 495 | {tokens} = grammar.tokenizeLine('~w{#{foo} bar}') 496 | expect(tokens[0]).toEqual value: '~w{', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.begin.elixir'] 497 | expect(tokens[1]).toEqual value: '#{', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 498 | expect(tokens[2]).toEqual value: 'foo', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source'] 499 | expect(tokens[3]).toEqual value: '}', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 500 | expect(tokens[4]).toEqual value: ' bar', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir'] 501 | expect(tokens[5]).toEqual value: '}', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.end.elixir'] 502 | 503 | {tokens} = grammar.tokenizeLine('~w\'#{foo} bar\'') 504 | expect(tokens[0]).toEqual value: '~w\'', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.begin.elixir'] 505 | expect(tokens[1]).toEqual value: '#{', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 506 | expect(tokens[2]).toEqual value: 'foo', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source'] 507 | expect(tokens[3]).toEqual value: '}', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 508 | expect(tokens[4]).toEqual value: ' bar', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir'] 509 | expect(tokens[5]).toEqual value: '\'', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.end.elixir'] 510 | 511 | {tokens} = grammar.tokenizeLine('~w|#{foo} bar|') 512 | expect(tokens[0]).toEqual value: '~w|', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.begin.elixir'] 513 | expect(tokens[1]).toEqual value: '#{', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 514 | expect(tokens[2]).toEqual value: 'foo', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source'] 515 | expect(tokens[3]).toEqual value: '}', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 516 | expect(tokens[4]).toEqual value: ' bar', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir'] 517 | expect(tokens[5]).toEqual value: '|', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.end.elixir'] 518 | 519 | {tokens} = grammar.tokenizeLine('~w(#{foo} bar)') 520 | expect(tokens[0]).toEqual value: '~w(', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.begin.elixir'] 521 | expect(tokens[1]).toEqual value: '#{', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 522 | expect(tokens[2]).toEqual value: 'foo', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source'] 523 | expect(tokens[3]).toEqual value: '}', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 524 | expect(tokens[4]).toEqual value: ' bar', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir'] 525 | expect(tokens[5]).toEqual value: ')', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.end.elixir'] 526 | 527 | {tokens} = grammar.tokenizeLine('~w<#{foo} bar>') 528 | expect(tokens[0]).toEqual value: '~w<', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.begin.elixir'] 529 | expect(tokens[1]).toEqual value: '#{', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 530 | expect(tokens[2]).toEqual value: 'foo', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source'] 531 | expect(tokens[3]).toEqual value: '}', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 532 | expect(tokens[4]).toEqual value: ' bar', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir'] 533 | expect(tokens[5]).toEqual value: '>', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.end.elixir'] 534 | 535 | {tokens} = grammar.tokenizeLine('~w/#{foo} bar/') 536 | expect(tokens[0]).toEqual value: '~w/', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.begin.elixir'] 537 | expect(tokens[1]).toEqual value: '#{', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 538 | expect(tokens[2]).toEqual value: 'foo', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source'] 539 | expect(tokens[3]).toEqual value: '}', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 540 | expect(tokens[4]).toEqual value: ' bar', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir'] 541 | expect(tokens[5]).toEqual value: '/', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.end.elixir'] 542 | 543 | {tokens} = grammar.tokenizeLine('~w"""#{foo} bar"""') 544 | expect(tokens[0]).toEqual value: '~w"""', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.begin.elixir'] 545 | expect(tokens[1]).toEqual value: '#{', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 546 | expect(tokens[2]).toEqual value: 'foo', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source'] 547 | expect(tokens[3]).toEqual value: '}', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 548 | expect(tokens[4]).toEqual value: ' bar', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir'] 549 | expect(tokens[5]).toEqual value: '"""', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.end.elixir'] 550 | 551 | {tokens} = grammar.tokenizeLine('~w\'\'\'#{foo} bar\'\'\'') 552 | expect(tokens[0]).toEqual value: '~w\'\'\'', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.begin.elixir'] 553 | expect(tokens[1]).toEqual value: '#{', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 554 | expect(tokens[2]).toEqual value: 'foo', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source'] 555 | expect(tokens[3]).toEqual value: '}', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'source.elixir.embedded.source', 'punctuation.section.embedded.elixir'] 556 | expect(tokens[4]).toEqual value: ' bar', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir'] 557 | expect(tokens[5]).toEqual value: '\'\'\'', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.end.elixir'] 558 | 559 | it "tokenizes literal word lists", -> 560 | {tokens} = grammar.tokenizeLine('~W"#{foo} bar"') 561 | expect(tokens[0]).toEqual value: '~W"', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.begin.elixir'] 562 | expect(tokens[1]).toEqual value: '#{foo} bar', scopes: ['source.elixir', 'string.quoted.double.literal.elixir'] 563 | expect(tokens[2]).toEqual value: '"', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.end.elixir'] 564 | 565 | {tokens} = grammar.tokenizeLine('~W\'#{foo} bar\'') 566 | expect(tokens[0]).toEqual value: '~W\'', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.begin.elixir'] 567 | expect(tokens[1]).toEqual value: '#{foo} bar', scopes: ['source.elixir', 'string.quoted.double.literal.elixir'] 568 | expect(tokens[2]).toEqual value: '\'', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.end.elixir'] 569 | 570 | {tokens} = grammar.tokenizeLine('~W[#{foo} bar]') 571 | expect(tokens[0]).toEqual value: '~W[', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.begin.elixir'] 572 | expect(tokens[1]).toEqual value: '#{foo} bar', scopes: ['source.elixir', 'string.quoted.double.literal.elixir'] 573 | expect(tokens[2]).toEqual value: ']', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.end.elixir'] 574 | 575 | {tokens} = grammar.tokenizeLine('~W|#{foo} bar|') 576 | expect(tokens[0]).toEqual value: '~W|', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.begin.elixir'] 577 | expect(tokens[1]).toEqual value: '#{foo} bar', scopes: ['source.elixir', 'string.quoted.double.literal.elixir'] 578 | expect(tokens[2]).toEqual value: '|', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.end.elixir'] 579 | 580 | {tokens} = grammar.tokenizeLine('~W{#{foo} bar}') 581 | expect(tokens[0]).toEqual value: '~W{', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.begin.elixir'] 582 | expect(tokens[1]).toEqual value: '#{foo', scopes: ['source.elixir', 'string.quoted.double.literal.elixir'] 583 | expect(tokens[2]).toEqual value: '}', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.end.elixir'] 584 | 585 | {tokens} = grammar.tokenizeLine('~W/#{foo} bar/') 586 | expect(tokens[0]).toEqual value: '~W/', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.begin.elixir'] 587 | expect(tokens[1]).toEqual value: '#{foo} bar', scopes: ['source.elixir', 'string.quoted.double.literal.elixir'] 588 | expect(tokens[2]).toEqual value: '/', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.end.elixir'] 589 | 590 | {tokens} = grammar.tokenizeLine('~W<#{foo} bar>') 591 | expect(tokens[0]).toEqual value: '~W<', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.begin.elixir'] 592 | expect(tokens[1]).toEqual value: '#{foo} bar', scopes: ['source.elixir', 'string.quoted.double.literal.elixir'] 593 | expect(tokens[2]).toEqual value: '>', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.end.elixir'] 594 | 595 | {tokens} = grammar.tokenizeLine('~W(#{foo} bar)') 596 | expect(tokens[0]).toEqual value: '~W(', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.begin.elixir'] 597 | expect(tokens[1]).toEqual value: '#{foo} bar', scopes: ['source.elixir', 'string.quoted.double.literal.elixir'] 598 | expect(tokens[2]).toEqual value: ')', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.end.elixir'] 599 | 600 | {tokens} = grammar.tokenizeLine('~W"""#{foo} bar"""') 601 | expect(tokens[0]).toEqual value: '~W"""', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.begin.elixir'] 602 | expect(tokens[1]).toEqual value: '#{foo} bar', scopes: ['source.elixir', 'string.quoted.double.literal.elixir'] 603 | expect(tokens[2]).toEqual value: '"""', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.end.elixir'] 604 | 605 | {tokens} = grammar.tokenizeLine('~W\'\'\'#{foo} bar\'\'\'') 606 | expect(tokens[0]).toEqual value: '~W\'\'\'', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.begin.elixir'] 607 | expect(tokens[1]).toEqual value: '#{foo} bar', scopes: ['source.elixir', 'string.quoted.double.literal.elixir'] 608 | expect(tokens[2]).toEqual value: '\'\'\'', scopes: ['source.elixir', 'string.quoted.double.literal.elixir', 'punctuation.section.list.end.elixir'] 609 | 610 | it "only tokenizes the proper modifiers", -> 611 | {tokens} = grammar.tokenizeLine('~w[foo]a') 612 | expect(tokens[2]).toEqual value: ']a', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.end.elixir'] 613 | 614 | {tokens} = grammar.tokenizeLine('~w[foo]c') 615 | expect(tokens[2]).toEqual value: ']c', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.end.elixir'] 616 | 617 | {tokens} = grammar.tokenizeLine('~w[foo]s') 618 | expect(tokens[2]).toEqual value: ']s', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.end.elixir'] 619 | 620 | {tokens} = grammar.tokenizeLine('~w[foo]z') 621 | expect(tokens[2]).toEqual value: ']', scopes: ['source.elixir', 'string.quoted.double.interpolated.elixir', 'punctuation.section.list.end.elixir'] 622 | 623 | it "does not tokenize sigils with improper delimiters", -> 624 | {tokens} = grammar.tokenizeLine('~a.test.') 625 | expect(tokens[0]).toEqual value: '~a', scopes: ['source.elixir'] 626 | expect(tokens[1]).toEqual value: '.', scopes: ['source.elixir', 'punctuation.separator.method.elixir'] 627 | expect(tokens[2]).toEqual value: 'test', scopes: ['source.elixir'] 628 | expect(tokens[3]).toEqual value: '.', scopes: ['source.elixir', 'punctuation.separator.method.elixir'] 629 | 630 | describe "doc attributes", -> 631 | it "highlights string heredocs as comments", -> 632 | {tokens} = grammar.tokenizeLine('@doc """\nTest\n"""') 633 | expect(tokens[0]).toEqual value: '@doc """', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 634 | expect(tokens[1]).toEqual value: '\nTest', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 635 | expect(tokens[2]).toEqual value: '\n"""', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 636 | 637 | {tokens} = grammar.tokenizeLine('@doc ~s"""\nTest\n"""') 638 | expect(tokens[0]).toEqual value: '@doc ~s"""', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 639 | expect(tokens[1]).toEqual value: '\nTest', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 640 | expect(tokens[2]).toEqual value: '\n"""', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 641 | 642 | {tokens} = grammar.tokenizeLine('@doc ~S"""\nTest\n"""') 643 | expect(tokens[0]).toEqual value: '@doc ~S"""', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 644 | expect(tokens[1]).toEqual value: '\nTest', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 645 | expect(tokens[2]).toEqual value: '\n"""', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 646 | 647 | {tokens} = grammar.tokenizeLine("@doc ~S'''\nTest\n'''") 648 | expect(tokens[0]).toEqual value: "@doc ~S'''", scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 649 | expect(tokens[1]).toEqual value: '\nTest', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 650 | expect(tokens[2]).toEqual value: "\n'''", scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 651 | 652 | it "does not highlight other sigil heredocs as comments", -> 653 | {tokens} = grammar.tokenizeLine("@doc '''\nTest\n'''") 654 | expect(tokens[0]).not.toEqual value: "@doc '''", scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 655 | 656 | {tokens} = grammar.tokenizeLine('@doc ~r"""\nTest\n"""') 657 | expect(tokens[0]).not.toEqual value: '@doc ~r"""', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 658 | 659 | {tokens} = grammar.tokenizeLine('@doc ~R"""\nTest\n"""') 660 | expect(tokens[0]).not.toEqual value: '@doc ~R"""', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 661 | 662 | {tokens} = grammar.tokenizeLine('@doc ~c"""\nTest\n"""') 663 | expect(tokens[0]).not.toEqual value: '@doc ~c"""', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 664 | 665 | {tokens} = grammar.tokenizeLine('@doc ~C"""\nTest\n"""') 666 | expect(tokens[0]).not.toEqual value: '@doc ~C"""', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 667 | 668 | {tokens} = grammar.tokenizeLine('@doc ~w"""\nTest\n"""') 669 | expect(tokens[0]).not.toEqual value: '@doc ~w"""', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 670 | 671 | {tokens} = grammar.tokenizeLine('@doc ~W"""\nTest\n"""') 672 | expect(tokens[0]).not.toEqual value: '@doc ~W"""', scopes: ['source.elixir', 'comment.documentation.heredoc.elixir'] 673 | 674 | describe "functions", -> 675 | it "tokenizes single line functions without parameters", -> 676 | {tokens} = grammar.tokenizeLine('def foo, do: :ok') 677 | expect(tokens[0]).toEqual value: 'def', scopes: ['source.elixir', 'keyword.control.elixir'] 678 | expect(tokens[1]).toEqual value: ' ', scopes: ['source.elixir'] 679 | expect(tokens[2]).toEqual value: 'foo', scopes: ['source.elixir', 'entity.name.function.elixir'] 680 | expect(tokens[3]).toEqual value: ',', scopes: ['source.elixir', 'punctuation.separator.object.elixir'] 681 | expect(tokens[4]).toEqual value: ' ', scopes: ['source.elixir'] 682 | expect(tokens[5]).toEqual value: 'do', scopes: ['source.elixir', 'constant.other.symbol.elixir'] 683 | expect(tokens[6]).toEqual value: ':', scopes: ['source.elixir', 'constant.other.symbol.elixir', 'punctuation.definition.constant.elixir'] 684 | expect(tokens[7]).toEqual value: ' ', scopes: ['source.elixir'] 685 | expect(tokens[8]).toEqual value: ':', scopes: ['source.elixir', 'constant.other.symbol.elixir', 'punctuation.definition.constant.elixir'] 686 | expect(tokens[9]).toEqual value: 'ok', scopes: ['source.elixir', 'constant.other.symbol.elixir'] 687 | 688 | it "tokenizes single line functions with parameters", -> 689 | {tokens} = grammar.tokenizeLine('def foo(bar, [baz: value], _opts), do: :ok') 690 | expect(tokens[0]).toEqual value: 'def', scopes: ['source.elixir', 'keyword.control.elixir'] 691 | expect(tokens[1]).toEqual value: ' ', scopes: ['source.elixir'] 692 | expect(tokens[2]).toEqual value: 'foo', scopes: ['source.elixir', 'entity.name.function.elixir'] 693 | expect(tokens[3]).toEqual value: '(', scopes: ['source.elixir'] 694 | expect(tokens[4]).toEqual value: 'bar', scopes: ['source.elixir', 'parameter.variable.function.elixir'] 695 | expect(tokens[5]).toEqual value: ',', scopes: ['source.elixir', 'punctuation.separator.object.elixir'] 696 | expect(tokens[6]).toEqual value: ' ', scopes: ['source.elixir'] 697 | expect(tokens[7]).toEqual value: '[', scopes: ['source.elixir', 'punctuation.section.array.elixir'] 698 | expect(tokens[8]).toEqual value: 'baz', scopes: ['source.elixir', 'constant.other.symbol.elixir'] 699 | expect(tokens[9]).toEqual value: ':', scopes: ['source.elixir', 'constant.other.symbol.elixir', 'punctuation.definition.constant.elixir'] 700 | expect(tokens[10]).toEqual value: ' ', scopes: ['source.elixir'] 701 | expect(tokens[11]).toEqual value: 'value', scopes: ['source.elixir', 'parameter.variable.function.elixir'] 702 | expect(tokens[12]).toEqual value: ']', scopes: ['source.elixir', 'punctuation.section.array.elixir'] 703 | expect(tokens[13]).toEqual value: ',', scopes: ['source.elixir', 'punctuation.separator.object.elixir'] 704 | expect(tokens[14]).toEqual value: ' ', scopes: ['source.elixir'] 705 | expect(tokens[15]).toEqual value: '_opts', scopes: ['source.elixir', 'unused.comment.elixir'] 706 | expect(tokens[16]).toEqual value: ')', scopes: ['source.elixir'] 707 | expect(tokens[17]).toEqual value: ',', scopes: ['source.elixir', 'punctuation.separator.object.elixir'] 708 | expect(tokens[18]).toEqual value: ' ', scopes: ['source.elixir'] 709 | expect(tokens[19]).toEqual value: 'do', scopes: ['source.elixir', 'constant.other.symbol.elixir'] 710 | expect(tokens[20]).toEqual value: ':', scopes: ['source.elixir', 'constant.other.symbol.elixir', 'punctuation.definition.constant.elixir'] 711 | expect(tokens[21]).toEqual value: ' ', scopes: ['source.elixir'] 712 | expect(tokens[22]).toEqual value: ':', scopes: ['source.elixir', 'constant.other.symbol.elixir', 'punctuation.definition.constant.elixir'] 713 | expect(tokens[23]).toEqual value: 'ok', scopes: ['source.elixir', 'constant.other.symbol.elixir'] 714 | 715 | it "tokenizes multiline functions without parameters", -> 716 | {tokens} = grammar.tokenizeLine('def foo() do\\n :ok\\nend') 717 | expect(tokens[0]).toEqual value: 'def', scopes: ['source.elixir', 'keyword.control.elixir'] 718 | expect(tokens[1]).toEqual value: ' ', scopes: ['source.elixir'] 719 | expect(tokens[2]).toEqual value: 'foo', scopes: ['source.elixir', 'entity.name.function.elixir'] 720 | expect(tokens[3]).toEqual value: '(', scopes: ['source.elixir'] 721 | expect(tokens[4]).toEqual value: ')', scopes: ['source.elixir'] 722 | expect(tokens[5]).toEqual value: ' ', scopes: ['source.elixir'] 723 | expect(tokens[6]).toEqual value: 'do', scopes: ['source.elixir', 'keyword.control.elixir'] 724 | expect(tokens[7]).toEqual value: '\\n ', scopes: ['source.elixir'] 725 | expect(tokens[8]).toEqual value: ':', scopes: [ 'source.elixir', 'constant.other.symbol.elixir', 'punctuation.definition.constant.elixir'] 726 | expect(tokens[9]).toEqual value: 'ok', scopes: ['source.elixir', 'constant.other.symbol.elixir'] 727 | expect(tokens[10]).toEqual value: '\\nend', scopes: ['source.elixir'] 728 | 729 | it "tokenizes multiline functions with parameters", -> 730 | {tokens} = grammar.tokenizeLine('def foo(bar, [baz: value], _opts) do\\n :ok\\nend') 731 | expect(tokens[0]).toEqual value: 'def', scopes: ['source.elixir', 'keyword.control.elixir'] 732 | expect(tokens[1]).toEqual value: ' ', scopes: ['source.elixir'] 733 | expect(tokens[2]).toEqual value: 'foo', scopes: ['source.elixir', 'entity.name.function.elixir'] 734 | expect(tokens[3]).toEqual value: '(', scopes: ['source.elixir'] 735 | expect(tokens[4]).toEqual value: 'bar', scopes: ['source.elixir', 'parameter.variable.function.elixir'] 736 | expect(tokens[5]).toEqual value: ',', scopes: ['source.elixir', 'punctuation.separator.object.elixir'] 737 | expect(tokens[6]).toEqual value: ' ', scopes: ['source.elixir'] 738 | expect(tokens[7]).toEqual value: '[', scopes: ['source.elixir', 'punctuation.section.array.elixir'] 739 | expect(tokens[8]).toEqual value: 'baz', scopes: ['source.elixir', 'constant.other.symbol.elixir'] 740 | expect(tokens[9]).toEqual value: ':', scopes: [ 'source.elixir', 'constant.other.symbol.elixir', 'punctuation.definition.constant.elixir' ] 741 | expect(tokens[10]).toEqual value: ' ', scopes: ['source.elixir'] 742 | expect(tokens[11]).toEqual value: 'value', scopes: ['source.elixir', 'parameter.variable.function.elixir'] 743 | expect(tokens[12]).toEqual value: ']', scopes: ['source.elixir', 'punctuation.section.array.elixir'] 744 | expect(tokens[13]).toEqual value: ',', scopes: ['source.elixir', 'punctuation.separator.object.elixir'] 745 | expect(tokens[14]).toEqual value: ' ', scopes: ['source.elixir'] 746 | expect(tokens[15]).toEqual value: '_opts', scopes: ['source.elixir', 'unused.comment.elixir'] 747 | expect(tokens[16]).toEqual value: ')', scopes: ['source.elixir'] 748 | expect(tokens[17]).toEqual value: ' ', scopes: ['source.elixir'] 749 | expect(tokens[18]).toEqual value: 'do', scopes: ['source.elixir', 'keyword.control.elixir'] 750 | expect(tokens[19]).toEqual value: '\\n ', scopes: ['source.elixir'] 751 | expect(tokens[20]).toEqual value: ':', scopes: [ 'source.elixir', 'constant.other.symbol.elixir', 'punctuation.definition.constant.elixir' ] 752 | expect(tokens[21]).toEqual value: 'ok', scopes: ['source.elixir', 'constant.other.symbol.elixir'] 753 | expect(tokens[22]).toEqual value: '\\nend', scopes: ['source.elixir'] 754 | --------------------------------------------------------------------------------