├── .gitignore ├── .coffeelintignore ├── appveyor.yml ├── CONTRIBUTING.md ├── settings └── language-python.cson ├── grammars ├── python-console.cson ├── python-traceback.cson ├── regular expressions (python).cson ├── tree-sitter-python.cson └── python.cson ├── spec ├── fixtures │ └── grammar │ │ ├── syntax_test_python.py │ │ ├── syntax_test_python_typing.py │ │ ├── syntax_test_python_lambdas.py │ │ └── syntax_test_python_functions.py ├── python-regex-spec.coffee ├── language-python-spec.coffee └── python-spec.coffee ├── package.json ├── .github ├── no-response.yml └── workflows │ └── ci.yml ├── coffeelint.json ├── README.md ├── PULL_REQUEST_TEMPLATE.md ├── LICENSE.md ├── ISSUE_TEMPLATE.md └── snippets └── language-python.cson /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | -------------------------------------------------------------------------------- /.coffeelintignore: -------------------------------------------------------------------------------- 1 | spec/fixtures 2 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | # empty appveyor 2 | build: off 3 | 4 | branches: 5 | only: 6 | - non-existing 7 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | See the [Atom contributing guide](https://github.com/atom/atom/blob/master/CONTRIBUTING.md). 2 | -------------------------------------------------------------------------------- /settings/language-python.cson: -------------------------------------------------------------------------------- 1 | '.source.python': 2 | 'editor': 3 | 'autoIndentOnPaste': false 4 | 'softTabs': true 5 | 'tabLength': 4 6 | 'commentStart': '# ' 7 | 'foldEndPattern': '^\\s*[}\\])]' 8 | 'increaseIndentPattern': '^\\s*(class|def|elif|else|except|finally|for|if|try|with|while|async\\s+(def|for|with))\\b.*:\\s*$' 9 | 'decreaseIndentPattern': '^\\s*(elif|else|except|finally)\\b.*:\\s*$' 10 | -------------------------------------------------------------------------------- /grammars/python-console.cson: -------------------------------------------------------------------------------- 1 | 'scopeName': 'text.python.console' 2 | 'name': 'Python Console' 3 | 'fileTypes': [ 4 | 'doctest' 5 | 'pycon' 6 | ] 7 | 'patterns': [ 8 | { 9 | 'match': '^(>{3}|\\.{3}|In \\[\\d+\\]:) (.+)$' 10 | 'captures': 11 | '1': 12 | 'name': 'punctuation.separator.prompt.python.console' 13 | '2': 14 | 'patterns': [ 15 | 'include': 'source.python' 16 | ] 17 | } 18 | ] 19 | -------------------------------------------------------------------------------- /spec/fixtures/grammar/syntax_test_python.py: -------------------------------------------------------------------------------- 1 | # SYNTAX TEST "source.python" 2 | 3 | 4 | def my_func(first, second=False, *third, **forth): 5 | # <- storage.type.function 6 | # ^^^^^^^ entity.name.function 7 | # ^ punctuation.definition.parameters.begin 8 | # ^^^^^ ^^^^^^ ^^^^^ ^^^^^ variable.parameter.function 9 | # ^ ^ ^ punctuation.separator.parameters 10 | # ^ keyword.operator.assignment 11 | # ^^^^^ constant 12 | # ^ ^^ keyword.operator.unpacking.arguments 13 | # ^ punctuation.definition.function.begin 14 | pass 15 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "language-python", 3 | "version": "0.53.6", 4 | "engines": { 5 | "atom": "*", 6 | "node": "*" 7 | }, 8 | "description": "Python language support in Atom", 9 | "keywords": [ 10 | "tree-sitter" 11 | ], 12 | "homepage": "https://atom.github.io/language-python", 13 | "repository": { 14 | "type": "git", 15 | "url": "https://github.com/atom/language-python.git" 16 | }, 17 | "license": "MIT", 18 | "bugs": { 19 | "url": "https://github.com/atom/language-python/issues" 20 | }, 21 | "dependencies": { 22 | "atom-grammar-test": "^0.6.4", 23 | "tree-sitter-python": "^0.17.0" 24 | }, 25 | "devDependencies": { 26 | "coffeelint": "^1.10.1" 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /.github/no-response.yml: -------------------------------------------------------------------------------- 1 | # Configuration for probot-no-response - https://github.com/probot/no-response 2 | 3 | # Number of days of inactivity before an issue is closed for lack of response 4 | daysUntilClose: 28 5 | 6 | # Label requiring a response 7 | responseRequiredLabel: more-information-needed 8 | 9 | # Comment to post when closing an issue for lack of response. Set to `false` to disable. 10 | closeComment: > 11 | This issue has been automatically closed because there has been no response 12 | to our request for more information from the original author. With only the 13 | information that is currently in the issue, we don't have enough information 14 | to take action. Please reach out if you have or find the answers we need so 15 | that we can investigate further. 16 | -------------------------------------------------------------------------------- /coffeelint.json: -------------------------------------------------------------------------------- 1 | { 2 | "max_line_length": { 3 | "level": "ignore" 4 | }, 5 | "no_empty_param_list": { 6 | "level": "error" 7 | }, 8 | "arrow_spacing": { 9 | "level": "error" 10 | }, 11 | "no_interpolation_in_single_quotes": { 12 | "level": "error" 13 | }, 14 | "no_debugger": { 15 | "level": "error" 16 | }, 17 | "prefer_english_operator": { 18 | "level": "error" 19 | }, 20 | "colon_assignment_spacing": { 21 | "spacing": { 22 | "left": 0, 23 | "right": 1 24 | }, 25 | "level": "error" 26 | }, 27 | "braces_spacing": { 28 | "spaces": 0, 29 | "level": "error" 30 | }, 31 | "spacing_after_comma": { 32 | "level": "error" 33 | }, 34 | "no_stand_alone_at": { 35 | "level": "error" 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ##### Atom and all repositories under Atom will be archived on December 15, 2022. Learn more in our [official announcement](https://github.blog/2022-06-08-sunsetting-atom/) 2 | # Python language support in Atom 3 | ![ci](https://github.com/atom/language-python/workflows/ci/badge.svg) 4 | [![Dependency Status](https://david-dm.org/atom/language-python.svg)](https://david-dm.org/atom/language-python) 5 | 6 | Adds syntax highlighting and snippets to Python files in Atom. 7 | 8 | Originally [converted](http://flight-manual.atom.io/hacking-atom/sections/converting-from-textmate) from the [Python TextMate bundle](https://github.com/textmate/python.tmbundle). 9 | 10 | Contributions are greatly appreciated. Please fork this repository and open a pull request to add snippets, make grammar tweaks, etc. 11 | -------------------------------------------------------------------------------- /grammars/python-traceback.cson: -------------------------------------------------------------------------------- 1 | 'scopeName': 'text.python.traceback' 2 | 'name': 'Python Traceback' 3 | 'fileTypes': [ 4 | 'pytb' 5 | ] 6 | 'patterns': [ 7 | { 8 | 'match': '^ File ("[^"]+"), line (\\d+)(?:, in (.+))?$' 9 | 'captures': { 10 | '1': 11 | 'name': 'string.python.traceback' 12 | '2': 13 | 'name': 'constant.numeric.python.traceback' 14 | '3': 15 | 'name': 'entity.name.function.python.traceback' 16 | } 17 | } 18 | { 19 | 'match': '^ (.+)$' 20 | 'captures': 21 | '1': 22 | 'patterns': [ 23 | 'include': 'source.python' 24 | ] 25 | } 26 | { 27 | 'match': '^([^\\s:]+):(?: (.+))?$' 28 | 'captures': 29 | '1': 30 | 'name': 'entity.name.type.class.python.traceback' 31 | '2': 32 | 'name': 'string.python.traceback' 33 | } 34 | ] 35 | -------------------------------------------------------------------------------- /spec/fixtures/grammar/syntax_test_python_typing.py: -------------------------------------------------------------------------------- 1 | # SYNTAX TEST "source.python" 2 | 3 | 4 | def right_hand_split( 5 | # <- storage.type.function 6 | # ^^^^^^^^^^^^^^^^ entity.name.function 7 | # ^ punctuation.definition.parameters.begin 8 | line: Line, py36: bool = False, omit: Collection[LeafID] = () 9 | # ^^^^ variable.parameter.function 10 | # ^ punctuation.separator 11 | # ^^^^ storage.type 12 | # ^ punctuation.separator.parameters 13 | # ^^^^ variable.parameter.function 14 | # ^ punctuation.separator 15 | # ^^^^ storage.type 16 | # ^ keyword.operator.assignment 17 | # ^^^^^ constant 18 | # ^ punctuation.separator.parameters 19 | # ^^^^ variable.parameter.function 20 | # ^ punctuation.separator 21 | ) -> Iterator[Line]: 22 | # ^ punctuation.definition.function.begin 23 | pass 24 | -------------------------------------------------------------------------------- /PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ### Requirements 2 | 3 | * Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion. 4 | * All new code requires tests to ensure against regressions 5 | 6 | ### Description of the Change 7 | 8 | 13 | 14 | ### Alternate Designs 15 | 16 | 17 | 18 | ### Benefits 19 | 20 | 21 | 22 | ### Possible Drawbacks 23 | 24 | 25 | 26 | ### Applicable Issues 27 | 28 | 29 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | on: 3 | - pull_request 4 | - push 5 | 6 | jobs: 7 | Test: 8 | if: "!contains(github.event.head_commit.message, '[skip ci]')" 9 | runs-on: ${{ matrix.os }} 10 | strategy: 11 | fail-fast: false 12 | matrix: 13 | os: 14 | - ubuntu-latest 15 | - macos-latest 16 | - windows-latest 17 | atom_channel: 18 | - stable 19 | - nightly 20 | steps: 21 | - uses: actions/checkout@v2 22 | - name: Cache 23 | uses: actions/cache@v2 24 | with: 25 | path: | 26 | 'node_modules' 27 | 'C:/Program Files (x86)/MSBuild/Microsoft.Cpp/v4.0/v140' 28 | key: ${{ runner.os }}-${{ matrix.atom_channel }}-${{ hashFiles('package.json') }} 29 | 30 | - uses: UziTech/action-setup-atom@v1 31 | with: 32 | channel: ${{ matrix.atom_channel }} 33 | 34 | - name: Install Visual Studio 2015 on Windows 35 | if: ${{ contains(matrix.os, 'windows') }} 36 | run: | 37 | choco install visualcpp-build-tools --version=14.0.25420.1 --ignore-dependencies -y --params "'/IncludeRequired'" 38 | echo ::set-env name=VCTargetsPath::'C:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\v140' 39 | 40 | - name: Install dependencies 41 | run: apm install 42 | 43 | - name: Run tests 44 | run: apm test 45 | 46 | Skip: 47 | if: contains(github.event.head_commit.message, '[skip ci]') 48 | runs-on: ubuntu-latest 49 | steps: 50 | - name: Skip CI 🚫 51 | run: echo skip ci 52 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Copyright (c) 2014 GitHub Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining 4 | a copy of this software and associated documentation files (the 5 | "Software"), to deal in the Software without restriction, including 6 | without limitation the rights to use, copy, modify, merge, publish, 7 | distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to 9 | the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be 12 | included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | 22 | -------------------------------------------------------------------- 23 | 24 | This package was derived from a TextMate bundle located at 25 | https://github.com/textmate/python.tmbundle and distributed under the following 26 | license, located in `README.mdown`: 27 | 28 | Permission to copy, use, modify, sell and distribute this 29 | software is granted. This software is provided "as is" without 30 | express or implied warranty, and with no claim as to its 31 | suitability for any purpose. 32 | -------------------------------------------------------------------------------- /spec/fixtures/grammar/syntax_test_python_lambdas.py: -------------------------------------------------------------------------------- 1 | # SYNTAX TEST "source.python" 2 | 3 | 4 | my_func2 = lambda x, y=2, *z, **kw: x + y + 1 5 | # ^ keyword.operator.assignment 6 | # ^^^^^^^^^^^^^^^^^^^^^^^ meta.function.inline 7 | # ^^^^^ storage.type.function.inline 8 | # ^^^^^^^^^^^^^^^^ meta.function.inline.parameters 9 | # ^ ^ ^ ^^ variable.parameter.function 10 | # ^ ^ ^ punctuation.separator.parameters 11 | # ^ variable.parameter.function 12 | # ^ keyword.operator.assignment 13 | # ^ constant 14 | # ^ ^^ keyword.operator.unpacking.arguments 15 | # ^ variable.parameter.function 16 | # ^ punctuation.definition.function.begin 17 | 18 | 19 | lambda x, z = 4: x * z 20 | # ^^^^^^^^^^^^^ meta.function.inline.python 21 | # <- storage.type.function.inline.python 22 | # ^^^^^^^^ meta.function.inline.parameters.python 23 | # ^ ^ variable.parameter.function.python 24 | # ^ punctuation.separator.parameters.python 25 | # ^ keyword.operator.assignment.python 26 | # ^ constant.numeric.integer.decimal.python 27 | # ^ punctuation.definition.function.begin.python 28 | 29 | 30 | lambda: None 31 | # ^^^^ meta.function.inline.python 32 | # <- storage.type.function.inline.python 33 | # ^ punctuation.definition.function.begin.python 34 | 35 | 36 | not_a_lambda.foo 37 | # <- ! meta.function.inline.python 38 | 39 | 40 | lambda_not.foo 41 | # <- ! meta.function.inline.python 42 | -------------------------------------------------------------------------------- /ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 8 | 9 | ### Prerequisites 10 | 11 | * [ ] Put an X between the brackets on this line if you have done all of the following: 12 | * Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode 13 | * Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/ 14 | * Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq 15 | * Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom 16 | * Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages 17 | 18 | ### Description 19 | 20 | [Description of the issue] 21 | 22 | ### Steps to Reproduce 23 | 24 | 1. [First Step] 25 | 2. [Second Step] 26 | 3. [and so on...] 27 | 28 | **Expected behavior:** [What you expect to happen] 29 | 30 | **Actual behavior:** [What actually happens] 31 | 32 | **Reproduces how often:** [What percentage of the time does it reproduce?] 33 | 34 | ### Versions 35 | 36 | You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running. 37 | 38 | ### Additional Information 39 | 40 | Any additional information, configuration or data that might be necessary to reproduce the issue. 41 | -------------------------------------------------------------------------------- /spec/fixtures/grammar/syntax_test_python_functions.py: -------------------------------------------------------------------------------- 1 | # SYNTAX TEST "source.python" 2 | 3 | 4 | # it "tokenizes async function definitions" 5 | async def test(param): 6 | # <- meta.function.python storage.modifier.async.python 7 | # ^^^ storage.type.function.python 8 | # ^^^^ entity.name.function.python 9 | pass 10 | 11 | 12 | # it "tokenizes comments inside function parameters" 13 | def test(arg, # comment') 14 | # <- meta.function.python storage.type.function.python 15 | # ^^^^ entity.name.function.python 16 | # ^ punctuation.definition.parameters.begin.python 17 | # ^^^^^^^^^^^^^^^^ meta.function.parameters.python 18 | # ^^^ variable.parameter.function.python 19 | # ^ punctuation.separator.parameters.python 20 | # ^ comment.line.number-sign.python punctuation.definition.comment.python 21 | # ^^^^^^^ comment.line.number-sign.python 22 | ): 23 | pass 24 | 25 | 26 | def __init__( 27 | # <- meta.function.python storage.type.function.python 28 | # ^^^^^^^^ entity.name.function.python support.function.magic.python 29 | # ^ punctuation.definition.parameters.begin.python 30 | self, 31 | # ^^^^^ meta.function.parameters.python 32 | # ^^^^ variable.parameter.function.python 33 | # ^ punctuation.separator.parameters.python 34 | codec, # comment 35 | # ^^^^^^^^^^^^^^^^ meta.function.parameters.python 36 | # ^^^^^ variable.parameter.function.python 37 | # ^ punctuation.separator.parameters.python 38 | # ^ comment.line.number-sign.python punctuation.definition.comment.python 39 | # ^^^^^^^ comment.line.number-sign.python 40 | config 41 | # ^^^^^^ meta.function.parameters.python variable.parameter.function.python 42 | # >> meta.function.python 43 | ): 44 | # <- punctuation.definition.parameters.end.python 45 | #^ punctuation.definition.function.begin.python 46 | pass 47 | 48 | 49 | # it "tokenizes a function definition with annotations" 50 | def f(a: None, b: int = 3) -> int: 51 | # <- meta.function.python storage.type.function.python 52 | # ^ entity.name.function.python 53 | # ^ punctuation.definition.parameters.begin.python 54 | # ^^^^^^^^^^^^^^^^^^^ meta.function.parameters.python 55 | # ^ variable.parameter.function.python 56 | # ^ punctuation.separator.python 57 | # ^^^^ storage.type.python 58 | # ^ punctuation.separator.parameters.python 59 | # ^ variable.parameter.function.python 60 | # ^ punctuation.separator.python 61 | # ^^^ storage.type.python 62 | # ^ keyword.operator.assignment.python 63 | # ^ constant.numeric.integer.decimal.python 64 | # ^ punctuation.definition.parameters.end.python 65 | # ^^ keyword.operator.function-annotation.python 66 | # ^^^ storage.type.python 67 | # ^ punctuation.definition.function.begin.python 68 | pass 69 | 70 | 71 | # it "tokenizes complex function calls" 72 | torch.nn.BCELoss()(Variable(bayes_optimal_prob, 1, requires_grad=False), Yvar).data[0] 73 | # ^^^^^^^^^ meta.method-call.python 74 | # ^^^^^^^ entity.name.function.python 75 | # ^ punctuation.definition.arguments.begin.bracket.round.python 76 | # ^ punctuation.definition.arguments.end.bracket.round.python 77 | # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ meta.function-call.python 78 | # ^ punctuation.definition.arguments.begin.bracket.round.python 79 | # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ meta.function-call.arguments.python 80 | # ^^^^^^^^ entity.name.function.python 81 | # ^ punctuation.definition.arguments.begin.bracket.round.python 82 | # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ meta.function-call.arguments.python 83 | # ^^^^^^^^^^^^^ variable.parameter.function.python 84 | # ^^^^^ constant.language.python 85 | # ^ punctuation.definition.arguments.end.bracket.round.python 86 | # ^ punctuation.separator.arguments.python 87 | # ^ punctuation.definition.arguments.end.bracket.round.python 88 | # ^ punctuation.separator.property.period.python 89 | -------------------------------------------------------------------------------- /spec/python-regex-spec.coffee: -------------------------------------------------------------------------------- 1 | describe 'Python regular expression grammar', -> 2 | grammar = null 3 | 4 | beforeEach -> 5 | atom.config.set('core.useTreeSitterParsers', false) 6 | 7 | waitsForPromise -> 8 | atom.packages.activatePackage('language-python') 9 | 10 | runs -> 11 | grammar = atom.grammars.grammarForScopeName('source.regexp.python') 12 | 13 | describe 'character classes', -> 14 | it 'does not recursively match character classes', -> 15 | {tokens} = grammar.tokenizeLine '[.:[\\]@]' 16 | expect(tokens[0]).toEqual value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp'] 17 | expect(tokens[1]).toEqual value: '.:[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp'] 18 | expect(tokens[2]).toEqual value: '\\]', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'constant.character.escape.backslash.regexp'] 19 | expect(tokens[3]).toEqual value: '@', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp'] 20 | expect(tokens[4]).toEqual value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp'] 21 | 22 | it 'does not end the character class early if the first character is a ]', -> 23 | {tokens} = grammar.tokenizeLine '[][]' 24 | expect(tokens[0]).toEqual value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp'] 25 | expect(tokens[1]).toEqual value: '][', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp'] 26 | expect(tokens[2]).toEqual value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp'] 27 | 28 | {tokens} = grammar.tokenizeLine '[^][]' 29 | expect(tokens[0]).toEqual value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp'] 30 | expect(tokens[1]).toEqual value: '^', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'keyword.operator.negation.regexp'] 31 | expect(tokens[2]).toEqual value: '][', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp'] 32 | expect(tokens[3]).toEqual value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp'] 33 | 34 | it 'escapes the character following any backslash', -> 35 | {tokens} = grammar.tokenizeLine '''\\q\\(\\[\\'\\"\\?\\^\\-\\*\\.\\#''' 36 | expect(tokens[0]).toEqual value: '\\q', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] 37 | expect(tokens[1]).toEqual value: '\\(', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] 38 | expect(tokens[2]).toEqual value: '\\[', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] 39 | expect(tokens[3]).toEqual value: '\\\'', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] 40 | expect(tokens[4]).toEqual value: '\\"', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] 41 | expect(tokens[5]).toEqual value: '\\?', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] 42 | expect(tokens[6]).toEqual value: '\\^', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] 43 | expect(tokens[7]).toEqual value: '\\-', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] 44 | expect(tokens[8]).toEqual value: '\\*', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] 45 | expect(tokens[9]).toEqual value: '\\.', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] 46 | expect(tokens[10]).toEqual value: '\\#', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] 47 | 48 | {tokens} = grammar.tokenizeLine '''(\\()\\)''' 49 | expect(tokens[0]).toEqual value: '(', scopes: ['source.regexp.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] 50 | expect(tokens[1]).toEqual value: '\\(', scopes: ['source.regexp.python', 'meta.group.regexp', 'constant.character.escape.backslash.regexp'] 51 | expect(tokens[2]).toEqual value: ')', scopes: ['source.regexp.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] 52 | expect(tokens[3]).toEqual value: '\\)', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] 53 | -------------------------------------------------------------------------------- /grammars/regular expressions (python).cson: -------------------------------------------------------------------------------- 1 | 'name': 'Regular Expressions (Python)' 2 | 'scopeName': 'source.regexp.python' 3 | 'foldingStartMarker': '(/\\*|\\{|\\()' 4 | 'foldingStopMarker': '(\\*/|\\}|\\))' 5 | 'fileTypes': [ 6 | 're' 7 | ] 8 | 'patterns': [ 9 | { 10 | 'match': '\\\\[bBAZzG]|\\^|\\$' 11 | 'name': 'keyword.control.anchor.regexp' 12 | } 13 | { 14 | 'match': '\\\\[1-9][0-9]?' 15 | 'name': 'keyword.other.back-reference.regexp' 16 | } 17 | { 18 | 'match': '\\\\.' 19 | 'name': 'constant.character.escape.backslash.regexp' 20 | } 21 | { 22 | 'match': '[?+*][?+]?|\\{(\\d+,\\d+|\\d+,|,\\d+|\\d+)\\}\\??' 23 | 'name': 'keyword.operator.quantifier.regexp' 24 | } 25 | { 26 | 'match': '\\|' 27 | 'name': 'keyword.operator.or.regexp' 28 | } 29 | { 30 | 'begin': '\\(\\?\\#' 31 | 'end': '\\)' 32 | 'name': 'comment.block.regexp' 33 | } 34 | { 35 | 'comment': 'We are restrictive in what we allow to go after the comment character to avoid false positives, since the availability of comments depend on regexp flags.' 36 | 'match': '(?<=^|\\s)#\\s[[a-zA-Z0-9,. \\t?!-:][^\\x{00}-\\x{7F}]]*$' 37 | 'name': 'comment.line.number-sign.regexp' 38 | } 39 | { 40 | 'match': '\\(\\?[iLmsux]+\\)' 41 | 'name': 'keyword.other.option-toggle.regexp' 42 | } 43 | { 44 | 'match': '(\\()(\\?P=([a-zA-Z_][a-zA-Z_0-9]*\\w*))(\\))' 45 | 'name': 'keyword.other.back-reference.named.regexp' 46 | } 47 | { 48 | 'begin': '(\\()((\\?=)|(\\?!)|(\\?<=)|(\\?)|(\\?:))?' 93 | 'beginCaptures': 94 | '1': 95 | 'name': 'punctuation.definition.group.regexp' 96 | '3': 97 | 'name': 'punctuation.definition.group.capture.regexp' 98 | '4': 99 | 'name': 'entity.name.section.group.regexp' 100 | '5': 101 | 'name': 'punctuation.definition.group.capture.regexp' 102 | '6': 103 | 'name': 'punctuation.definition.group.no-capture.regexp' 104 | 'end': '(\\))' 105 | 'endCaptures': 106 | '1': 107 | 'name': 'punctuation.definition.group.regexp' 108 | 'name': 'meta.group.regexp' 109 | 'patterns': [ 110 | { 111 | 'include': '$self' 112 | } 113 | ] 114 | } 115 | { 116 | 'begin': '(\\[)(\\^)?' 117 | 'beginCaptures': 118 | '1': 119 | 'name': 'punctuation.definition.character-class.begin.regexp' 120 | '2': 121 | 'name': 'keyword.operator.negation.regexp' 122 | 'end': '(?!\\G)\\]' # Character classes cannot be empty (if the first character is a ] it is treated literally) 123 | 'endCaptures': 124 | '0': 125 | 'name': 'punctuation.definition.character-class.end.regexp' 126 | 'name': 'constant.other.character-class.set.regexp' 127 | 'patterns': [ 128 | { 129 | 'match': '\\\\[wWsSdDhH]' 130 | 'name': 'constant.character.character-class.regexp' 131 | } 132 | { 133 | 'match': '\\\\.' 134 | 'name': 'constant.character.escape.backslash.regexp' 135 | } 136 | { 137 | 'captures': 138 | '2': 139 | 'name': 'constant.character.escape.backslash.regexp' 140 | '4': 141 | 'name': 'constant.character.escape.backslash.regexp' 142 | 'match': '((\\\\.)|.)\\-((\\\\.)|[^\\]])' 143 | 'name': 'constant.other.character-class.range.regexp' 144 | } 145 | ] 146 | } 147 | ] 148 | -------------------------------------------------------------------------------- /snippets/language-python.cson: -------------------------------------------------------------------------------- 1 | '.source.python': 2 | '#!/usr/bin/env python': 3 | 'prefix': 'env' 4 | 'body': '#!/usr/bin/env python\n' 5 | '#!/usr/bin/env python3': 6 | 'prefix': 'env3' 7 | 'body': '#!/usr/bin/env python3\n' 8 | '# coding=utf-8': 9 | 'prefix': 'enc' 10 | 'body': '# -*- coding: utf-8 -*-\n' 11 | 'Import': 12 | 'prefix': 'im' 13 | 'body': 'import ${1:package/module}' 14 | 'From/Import': 15 | 'prefix': 'fim' 16 | 'body': 'from ${1:package/module} import ${2:names}' 17 | 'Assert Equal': 18 | 'prefix': 'ase' 19 | 'body': 'self.assertEqual(${1:expected}, ${2:actual}${3:, \'${4:message}\'})$0' 20 | 'Assert Not Equal': 21 | 'prefix': 'asne' 22 | 'body': 'self.assertNotEqual(${1:expected}, ${2:actual}${3:, \'${4:message}\'})$0' 23 | 'Assert Raises': 24 | 'prefix': 'asr' 25 | 'body': 'self.assertRaises(${1:exception}, ${2:callable})$0' 26 | 'Assert True': 27 | 'prefix': 'ast' 28 | 'body': 'self.assertTrue(${1:actual}${2:, \'${3:message}\'})$0' 29 | 'Assert False': 30 | 'prefix': 'asf' 31 | 'body': 'self.assertFalse(${1:actual}${2:, \'${3:message}\'})$0' 32 | 'Assert Is': 33 | 'prefix': 'asi' 34 | 'body': 'self.assertIs(${1:expected}, ${2:actual}${3:, \'${4:message}\'})$0' 35 | 'Assert Is Not': 36 | 'prefix': 'asint' 37 | 'body': 'self.assertIsNot(${1:expected}, ${2:actual}${3:, \'${4:message}\'})$0' 38 | 'Assert Is None': 39 | 'prefix': 'asino' 40 | 'body': 'self.assertIsNone(${1:actual}${2:, \'${3:message}\'})$0' 41 | 'Assert Is Not None': 42 | 'prefix': 'asinno' 43 | 'body': 'self.assertIsNotNone(${1:actual}${2:, \'${3:message}\'})$0' 44 | 'Assert In': 45 | 'prefix': 'asin' 46 | 'body': 'self.assertIn(${1:needle}, ${2:haystack}${3:, \'${4:message}\'})$0' 47 | 'Assert Not In': 48 | 'prefix': 'asni' 49 | 'body': 'self.assertNotIn(${1:needle}, ${2:haystack}${3:, \'${4:message}\'})$0' 50 | 'Assert': 51 | 'prefix': 'as' 52 | 'body': 'self.assert_(${1:boolean expression}${2:, \'${3:message}\'})$0' 53 | 'Fail (a test)': 54 | 'prefix': 'fail' 55 | 'body': 'self.fail(\'${1:message}\')$0' 56 | 'New Class': 57 | 'prefix': 'class' 58 | 'body': 'class ${1:ClassName}(${2:object}):\n\t"""${3:docstring for $1.}"""\n\n\tdef __init__(self, ${4:arg}):\n\t\t${5:super($1, self).__init__()}\n\t\tself.arg = arg\n\t\t$0' 59 | 'New Method': 60 | 'prefix': 'defs' 61 | 'body': 'def ${1:mname}(self, ${2:arg}):\n\t${3:pass}' 62 | 'New Function': 63 | 'prefix': 'def' 64 | 'body': 'def ${1:fname}(${2:arg}):\n\t${3:pass}' 65 | 'New Property': 66 | 'prefix': 'property' 67 | 'body': 'def ${1:foo}():\n doc = "${2:The $1 property.}"\n def fget(self):\n ${3:return self._$1}\n def fset(self, value):\n ${4:self._$1 = value}\n def fdel(self):\n ${5:del self._$1}\n return locals()\n$1 = property(**$1())$0' 68 | 'if': 69 | 'prefix': 'if' 70 | 'body': 'if ${1:condition}:\n\t${2:pass}' 71 | 'for': 72 | 'prefix': 'for' 73 | 'body': 'for ${1:value} in ${2:variable}:\n\t${3:pass}' 74 | 'while': 75 | 'prefix': 'while' 76 | 'body': 'while ${1:condition}:\n\t${2:pass}' 77 | 'with statement': 78 | 'prefix': 'with' 79 | 'body': 'with ${1:expression} as ${2:target}:\n\t${3:pass}' 80 | 'Try/Except/Else/Finally': 81 | 'prefix': 'tryef' 82 | 'body': 'try:\n\t${1:pass}\nexcept${2: ${3:Exception} as ${4:e}}:\n\t${5:raise}\nelse:\n\t${6:pass}\nfinally:\n\t${7:pass}' 83 | 'Try/Except/Else': 84 | 'prefix': 'trye' 85 | 'body': 'try:\n\t${1:pass}\nexcept ${2:Exception} as ${3:e}:\n\t${4:raise $3}\nelse:\n\t${5:pass}' 86 | 'Try/Except/Finally': 87 | 'prefix': 'tryf' 88 | 'body': 'try:\n\t${1:pass}\nexcept ${2:Exception} as ${3:e}:\n\t${4:raise $3}\nfinally:\n\t${5:pass}' 89 | 'Try/Except': 90 | 'prefix': 'try' 91 | 'body': 'try:\n\t${1:pass}\nexcept ${2:Exception} as ${3:e}:\n\t${4:raise $3}' 92 | 'List Comprehension': 93 | 'prefix': 'lc' 94 | 'body': '[${1:value} for ${2:value} in ${3:variable}]' 95 | 'List Comprehension If Else': 96 | 'prefix': 'lcie' 97 | 'body': '[${1:value} if ${2:condition} else ${3:value} for ${4:value} in ${5:variable}]' 98 | 'Dictionary Comprehension': 99 | 'prefix': 'dc' 100 | 'body': '{${1:key}: ${2:value} for ${3:key}, ${4:value} in ${5:variable}}' 101 | 'Set Comprehension': 102 | 'prefix': 'sc' 103 | 'body': '{${1:value} for ${2:value} in ${3:variable}}' 104 | 'PDB set trace': 105 | 'prefix': 'pdb' 106 | 'body': 'import pdb; pdb.set_trace()' 107 | 'iPDB set trace': 108 | 'prefix': 'ipdb' 109 | 'body': 'import ipdb; ipdb.set_trace()' 110 | 'rPDB set trace': 111 | 'prefix': 'rpdb' 112 | 'body': 'import rpdb2; rpdb2.start_embedded_debugger(\'${1:debug_password}\')$0' 113 | 'PuDB set trace': 114 | 'prefix': 'pudb' 115 | 'body': 'import pudb; pudb.set_trace()' 116 | '__magic__': 117 | 'prefix': '__' 118 | 'body': '__${1:init}__' 119 | 'if __name__ == \'__main__\'': 120 | 'prefix': 'ifmain' 121 | 'body': 'if __name__ == \'__main__\':\n\t${1:main()}$0' 122 | -------------------------------------------------------------------------------- /spec/language-python-spec.coffee: -------------------------------------------------------------------------------- 1 | describe 'Python settings', -> 2 | [editor, languageMode] = [] 3 | 4 | afterEach -> 5 | editor.destroy() 6 | 7 | beforeEach -> 8 | atom.config.set('core.useTreeSitterParsers', false) 9 | 10 | waitsForPromise -> 11 | atom.workspace.open().then (o) -> 12 | editor = o 13 | languageMode = editor.languageMode 14 | 15 | waitsForPromise -> 16 | atom.packages.activatePackage('language-python') 17 | 18 | it 'matches lines correctly using the increaseIndentPattern', -> 19 | increaseIndentRegex = languageMode.increaseIndentRegexForScopeDescriptor(['source.python']) 20 | 21 | expect(increaseIndentRegex.testSync('for i in range(n):')).toBeTruthy() 22 | expect(increaseIndentRegex.testSync(' for i in range(n):')).toBeTruthy() 23 | expect(increaseIndentRegex.testSync('async for i in range(n):')).toBeTruthy() 24 | expect(increaseIndentRegex.testSync(' async for i in range(n):')).toBeTruthy() 25 | expect(increaseIndentRegex.testSync('class TheClass(Object):')).toBeTruthy() 26 | expect(increaseIndentRegex.testSync(' class TheClass(Object):')).toBeTruthy() 27 | expect(increaseIndentRegex.testSync('def f(x):')).toBeTruthy() 28 | expect(increaseIndentRegex.testSync(' def f(x):')).toBeTruthy() 29 | expect(increaseIndentRegex.testSync('async def f(x):')).toBeTruthy() 30 | expect(increaseIndentRegex.testSync(' async def f(x):')).toBeTruthy() 31 | expect(increaseIndentRegex.testSync('if this_var == that_var:')).toBeTruthy() 32 | expect(increaseIndentRegex.testSync(' if this_var == that_var:')).toBeTruthy() 33 | expect(increaseIndentRegex.testSync('elif this_var == that_var:')).toBeTruthy() 34 | expect(increaseIndentRegex.testSync(' elif this_var == that_var:')).toBeTruthy() 35 | expect(increaseIndentRegex.testSync('else:')).toBeTruthy() 36 | expect(increaseIndentRegex.testSync(' else:')).toBeTruthy() 37 | expect(increaseIndentRegex.testSync('except Exception:')).toBeTruthy() 38 | expect(increaseIndentRegex.testSync(' except Exception:')).toBeTruthy() 39 | expect(increaseIndentRegex.testSync('except Exception as e:')).toBeTruthy() 40 | expect(increaseIndentRegex.testSync(' except Exception as e:')).toBeTruthy() 41 | expect(increaseIndentRegex.testSync('finally:')).toBeTruthy() 42 | expect(increaseIndentRegex.testSync(' finally:')).toBeTruthy() 43 | expect(increaseIndentRegex.testSync('with open("filename") as f:')).toBeTruthy() 44 | expect(increaseIndentRegex.testSync(' with open("filename") as f:')).toBeTruthy() 45 | expect(increaseIndentRegex.testSync('async with open("filename") as f:')).toBeTruthy() 46 | expect(increaseIndentRegex.testSync(' async with open("filename") as f:')).toBeTruthy() 47 | expect(increaseIndentRegex.testSync('while True:')).toBeTruthy() 48 | expect(increaseIndentRegex.testSync(' while True:')).toBeTruthy() 49 | expect(increaseIndentRegex.testSync('\t\t while True:')).toBeTruthy() 50 | 51 | it 'does not match lines incorrectly using the increaseIndentPattern', -> 52 | increaseIndentRegex = languageMode.increaseIndentRegexForScopeDescriptor(['source.python']) 53 | 54 | expect(increaseIndentRegex.testSync('for i in range(n)')).toBeFalsy() 55 | expect(increaseIndentRegex.testSync('class TheClass(Object)')).toBeFalsy() 56 | expect(increaseIndentRegex.testSync('def f(x)')).toBeFalsy() 57 | expect(increaseIndentRegex.testSync('if this_var == that_var')).toBeFalsy() 58 | expect(increaseIndentRegex.testSync('"for i in range(n):"')).toBeFalsy() 59 | 60 | it 'matches lines correctly using the decreaseIndentPattern', -> 61 | decreaseIndentRegex = languageMode.decreaseIndentRegexForScopeDescriptor(['source.python']) 62 | 63 | expect(decreaseIndentRegex.testSync('elif this_var == that_var:')).toBeTruthy() 64 | expect(decreaseIndentRegex.testSync(' elif this_var == that_var:')).toBeTruthy() 65 | expect(decreaseIndentRegex.testSync('else:')).toBeTruthy() 66 | expect(decreaseIndentRegex.testSync(' else:')).toBeTruthy() 67 | expect(decreaseIndentRegex.testSync('except Exception:')).toBeTruthy() 68 | expect(decreaseIndentRegex.testSync(' except Exception:')).toBeTruthy() 69 | expect(decreaseIndentRegex.testSync('except Exception as e:')).toBeTruthy() 70 | expect(decreaseIndentRegex.testSync(' except Exception as e:')).toBeTruthy() 71 | expect(decreaseIndentRegex.testSync('finally:')).toBeTruthy() 72 | expect(decreaseIndentRegex.testSync(' finally:')).toBeTruthy() 73 | expect(decreaseIndentRegex.testSync('\t\t finally:')).toBeTruthy() 74 | 75 | it 'does not match lines incorrectly using the decreaseIndentPattern', -> 76 | decreaseIndentRegex = languageMode.decreaseIndentRegexForScopeDescriptor(['source.python']) 77 | 78 | # NOTE! This first one is different from most other rote tests here. 79 | expect(decreaseIndentRegex.testSync('else: expression()')).toBeFalsy() 80 | expect(decreaseIndentRegex.testSync('elif this_var == that_var')).toBeFalsy() 81 | expect(decreaseIndentRegex.testSync(' elif this_var == that_var')).toBeFalsy() 82 | expect(decreaseIndentRegex.testSync('else')).toBeFalsy() 83 | expect(decreaseIndentRegex.testSync(' "finally:"')).toBeFalsy() 84 | -------------------------------------------------------------------------------- /grammars/tree-sitter-python.cson: -------------------------------------------------------------------------------- 1 | name: 'Python' 2 | scopeName: 'source.python' 3 | type: 'tree-sitter' 4 | parser: 'tree-sitter-python' 5 | 6 | firstLineRegex: [ 7 | # shebang line 8 | '^#![ \\t]*/.*\\bpython[\\d\\.]*\\b' 9 | 10 | # vim modeline 11 | 'vim\\b.*\\bset\\b.*\\b(filetype|ft|syntax)=python' 12 | ] 13 | 14 | fileTypes: [ 15 | 'py' 16 | 'pyi' 17 | 'pyw' 18 | 'gyp' 19 | 'gypi' 20 | 'SConstruct' 21 | 'Sconstruct' 22 | 'sconstruct' 23 | 'SConscript' 24 | 'wsgi' 25 | ] 26 | 27 | folds: [ 28 | { 29 | type: ['if_statement'] 30 | start: {type: ':'} 31 | end: {type: ['elif_clause', 'else_clause']} 32 | }, 33 | { 34 | type: [ 35 | 'if_statement' 36 | 'elif_clause' 37 | 'else_clause' 38 | 'for_statement' 39 | 'try_statement' 40 | 'with_statement' 41 | 'while_statement' 42 | 'class_definition' 43 | 'function_definition' 44 | 'async_function_definition' 45 | ] 46 | start: {type: ':'} 47 | }, 48 | { 49 | start: {type: '(', index: 0} 50 | end: {type: ')', index: -1} 51 | }, 52 | { 53 | start: {type: '[', index: 0} 54 | end: {type: ']', index: -1} 55 | }, 56 | { 57 | start: {type: '{', index: 0} 58 | end: {type: '}', index: -1} 59 | } 60 | ] 61 | 62 | comments: 63 | start: '# ' 64 | 65 | scopes: 66 | 'module': 'source.python' 67 | 68 | 'comment': 'comment.line' 69 | 'string': 'string.quoted' 70 | 'escape_sequence': 'constant.character.escape' 71 | 'interpolation': 'meta.embedded' 72 | 'interpolation > "{"': 'punctuation.section.embedded' 73 | 'interpolation > "}"': 'punctuation.section.embedded' 74 | 75 | 'class_definition > identifier': 'entity.name.type.class' 76 | 'function_definition > identifier': 'entity.name.function.definition' 77 | 'call > identifier:nth-child(0)': [ 78 | {match: '^(abs|all|any|ascii|bin|bool|breakpoint|bytearray|bytes|callable|chr|classmethod|compile|complex|delattr|dict|dir|divmod|enumerate|eval|exec|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|isinstance|issubclass|iter|len|list|locals|map|max|memoryview|min|next|object|oct|open|ord|pow|print|property|range|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|vars|zip|__import__)$', 79 | scopes: 'support.function.call'}, 80 | {match: '^[A-Z]', scopes: 'support.type.contructor'} 81 | 'entity.name.function.call' 82 | ] 83 | 'call > attribute > identifier:nth-child(2)': 'entity.name.function' 84 | 85 | 'identifier': [ 86 | {match: 87 | '^(BaseException|Exception|TypeError|StopAsyncIteration|StopIteration|ImportError|ModuleNotFoundError|OSError|ConnectionError|BrokenPipeError|ConnectionAbortedError|ConnectionRefusedError|ConnectionResetError|BlockingIOError|ChildProcessError|FileExistsError|FileNotFoundError|IsADirectoryError|NotADirectoryError|InterruptedError|PermissionError|ProcessLookupError|TimeoutError|EOFError|RuntimeError|RecursionError|NotImplementedError|NameError|UnboundLocalError|AttributeError|SyntaxError|IndentationError|TabError|LookupError|IndexError|KeyError|ValueError|UnicodeError|UnicodeEncodeError|UnicodeDecodeError|UnicodeTranslateError|AssertionError|ArithmeticError|FloatingPointError|OverflowError|ZeroDivisionError|SystemError|ReferenceError|BufferError|MemoryError|Warning|UserWarning|DeprecationWarning|PendingDeprecationWarning|SyntaxWarning|RuntimeWarning|FutureWarning|ImportWarning|UnicodeWarning|BytesWarning|ResourceWarning|GeneratorExit|SystemExit|KeyboardInterrupt)$' 88 | scopes: 'support.type.exception'}, 89 | {match: '^(self)', scopes: 'entity.name.variable.self'} 90 | ] 91 | 92 | 'attribute > identifier:nth-child(2)': 'variable.other.object.property' 93 | 94 | 'decorator': 'entity.name.function.decorator' 95 | 96 | 'none': 'constant.language' 97 | 'true': 'constant.language' 98 | 'false': 'constant.language' 99 | 'integer': 'constant.numeric' 100 | 'float': 'constant.numeric' 101 | 102 | 'type > identifier': 'support.storage.type' 103 | 104 | 'class_definition > argument_list > attribute': 'entity.other.inherited-class' 105 | 'class_definition > argument_list > identifier': 'entity.other.inherited-class' 106 | 'class_definition > argument_list > keyword_argument > attribute': 'entity.other.inherited-class' 107 | 'class_definition > argument_list > keyword_argument > identifier:nth-child(2)': 'entity.other.inherited-class' 108 | 109 | '"class"': 'storage.type.class' 110 | '"def"': 'storage.type.function' 111 | '"lambda"': 'storage.type.function' 112 | 113 | '"global"': 'storage.modifier.global' 114 | '"nonlocal"': 'storage.modifier.nonlocal' 115 | 116 | 'parameters > identifier': 'variable.parameter.function' 117 | 'parameters > list_splat > identifier': 'variable.parameter.function' 118 | 'parameters > dictionary_splat > identifier': 'variable.parameter.function' 119 | 'default_parameter > identifier:nth-child(0)': 'variable.parameter.function' 120 | 'keyword_argument > identifier:nth-child(0)': 'variable.parameter.function' 121 | 'lambda_parameters > identifier': 'variable.parameter.function' 122 | 'typed_parameter > identifier': 'variable.parameter.function' 123 | 124 | 'argument_list': 'meta.method-call.python' 125 | 126 | '"if"': 'keyword.control' 127 | '"else"': 'keyword.control' 128 | '"elif"': 'keyword.control' 129 | '"while"': 'keyword.control' 130 | '"for"': 'keyword.control' 131 | '"return"': 'keyword.control' 132 | '"break"': 'keyword.control' 133 | '"continue"': 'keyword.control' 134 | '"pass"': 'keyword.control' 135 | '"raise"': 'keyword.control' 136 | '"yield"': 'keyword.control' 137 | '"await"': 'keyword.control' 138 | '"async"': 'keyword.control' 139 | '"try"': 'keyword.control' 140 | '"except"': 'keyword.control' 141 | '"with"': 'keyword.control' 142 | '"as"': 'keyword.control' 143 | '"finally"': 'keyword.control' 144 | '"import"': 'keyword.control' 145 | '"from"': 'keyword.control' 146 | 147 | '"print"': 'keyword.other' 148 | '"assert"': 'keyword.other' 149 | '"exec"': 'keyword.other' 150 | '"del"': 'keyword.other' 151 | 152 | '"+"': 'keyword.operator' 153 | '"-"': 'keyword.operator' 154 | '"*"': 'keyword.operator' 155 | '"/"': 'keyword.operator' 156 | '"%"': 'keyword.operator' 157 | '"**"': 'keyword.operator' 158 | '"//"': 'keyword.operator' 159 | '"=="': 'keyword.operator' 160 | '"!="': 'keyword.operator' 161 | '"<>"': 'keyword.operator' 162 | '">"': 'keyword.operator' 163 | '"<"': 'keyword.operator' 164 | '">="': 'keyword.operator' 165 | '"<="': 'keyword.operator' 166 | '"="': 'keyword.operator' 167 | '"+="': 'keyword.operator' 168 | '"-="': 'keyword.operator' 169 | '"*="': 'keyword.operator' 170 | '"/="': 'keyword.operator' 171 | '"%="': 'keyword.operator' 172 | '"**="': 'keyword.operator' 173 | '"//="': 'keyword.operator' 174 | '"&"': 'keyword.operator' 175 | '"|"': 'keyword.operator' 176 | '"^"': 'keyword.operator' 177 | '"~"': 'keyword.operator' 178 | '"<<"': 'keyword.operator' 179 | '">>"': 'keyword.operator' 180 | 'binary_operator > "@"': 'keyword.operator' 181 | 'binary_operator > "@="': 'keyword.operator' 182 | '"in"': 'keyword.operator.logical.python' 183 | '"and"': 'keyword.operator.logical.python' 184 | '"or"': 'keyword.operator.logical.python' 185 | '"not"': 'keyword.operator.logical.python' 186 | '"is"': 'keyword.operator.logical.python' 187 | '"->"': 'keyword.control.return' 188 | 189 | '"["': 'punctuation.definition.begin.bracket.square' 190 | '"]"': 'punctuation.definition.end.bracket.square' 191 | '","': 'punctuation.separator.delimiter' 192 | '"{"': 'punctuation.section.block.begin.bracket.curly' 193 | '"}"': 'punctuation.section.block.end.bracket.curly' 194 | '"("': 'punctuation.section.parens.begin.bracket.round' 195 | '")"': 'punctuation.section.parens.end.bracket.round' 196 | -------------------------------------------------------------------------------- /spec/python-spec.coffee: -------------------------------------------------------------------------------- 1 | path = require 'path' 2 | grammarTest = require 'atom-grammar-test' 3 | 4 | describe "Python grammar", -> 5 | grammar = null 6 | 7 | beforeEach -> 8 | atom.config.set('core.useTreeSitterParsers', false) 9 | 10 | waitsForPromise -> 11 | atom.packages.activatePackage("language-python") 12 | 13 | runs -> 14 | grammar = atom.grammars.grammarForScopeName("source.python") 15 | 16 | it "recognises shebang on firstline", -> 17 | expect(grammar.firstLineRegex.scanner.findNextMatchSync("#!/usr/bin/env python")).not.toBeNull() 18 | expect(grammar.firstLineRegex.scanner.findNextMatchSync("#! /usr/bin/env python")).not.toBeNull() 19 | 20 | it "parses the grammar", -> 21 | expect(grammar).toBeDefined() 22 | expect(grammar.scopeName).toBe "source.python" 23 | 24 | it "tokenizes `yield`", -> 25 | {tokens} = grammar.tokenizeLine 'yield v' 26 | 27 | expect(tokens[0]).toEqual value: 'yield', scopes: ['source.python', 'keyword.control.statement.python'] 28 | 29 | it "tokenizes `yield from`", -> 30 | {tokens} = grammar.tokenizeLine 'yield from v' 31 | 32 | expect(tokens[0]).toEqual value: 'yield from', scopes: ['source.python', 'keyword.control.statement.python'] 33 | 34 | it "tokenizes multi-line strings", -> 35 | tokens = grammar.tokenizeLines('"1\\\n2"') 36 | 37 | # Line 0 38 | expect(tokens[0][0].value).toBe '"' 39 | expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 40 | 41 | expect(tokens[0][1].value).toBe '1' 42 | expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python'] 43 | 44 | expect(tokens[0][2].value).toBe '\\' 45 | expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.newline.python'] 46 | 47 | expect(tokens[0][3]).not.toBeDefined() 48 | 49 | # Line 1 50 | expect(tokens[1][0].value).toBe '2' 51 | expect(tokens[1][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python'] 52 | 53 | expect(tokens[1][1].value).toBe '"' 54 | expect(tokens[1][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 55 | 56 | expect(tokens[1][2]).not.toBeDefined() 57 | 58 | it "terminates a single-quoted raw string containing opening parenthesis at closing quote", -> 59 | tokens = grammar.tokenizeLines("r'%d(' #foo") 60 | 61 | expect(tokens[0][0].value).toBe 'r' 62 | expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'storage.type.string.python'] 63 | expect(tokens[0][1].value).toBe "'" 64 | expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.begin.python'] 65 | expect(tokens[0][2].value).toBe '%d' 66 | expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.placeholder.python'] 67 | expect(tokens[0][3].value).toBe '(' 68 | expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] 69 | expect(tokens[0][4].value).toBe "'" 70 | expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] 71 | expect(tokens[0][5].value).toBe ' ' 72 | expect(tokens[0][5].scopes).toEqual ['source.python'] 73 | expect(tokens[0][6].value).toBe '#' 74 | expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] 75 | expect(tokens[0][7].value).toBe 'foo' 76 | expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] 77 | 78 | it "terminates a single-quoted raw string containing opening bracket at closing quote", -> 79 | tokens = grammar.tokenizeLines("r'%d[' #foo") 80 | 81 | expect(tokens[0][0].value).toBe 'r' 82 | expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'storage.type.string.python'] 83 | expect(tokens[0][1].value).toBe "'" 84 | expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.begin.python'] 85 | expect(tokens[0][2].value).toBe '%d' 86 | expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.placeholder.python'] 87 | expect(tokens[0][3].value).toBe '[' 88 | expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp'] 89 | expect(tokens[0][4].value).toBe "'" 90 | expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] 91 | expect(tokens[0][5].value).toBe ' ' 92 | expect(tokens[0][5].scopes).toEqual ['source.python'] 93 | expect(tokens[0][6].value).toBe '#' 94 | expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] 95 | expect(tokens[0][7].value).toBe 'foo' 96 | expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] 97 | 98 | it "terminates a double-quoted raw string containing opening parenthesis at closing quote", -> 99 | tokens = grammar.tokenizeLines('r"%d(" #foo') 100 | 101 | expect(tokens[0][0].value).toBe 'r' 102 | expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'storage.type.string.python'] 103 | expect(tokens[0][1].value).toBe '"' 104 | expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.begin.python'] 105 | expect(tokens[0][2].value).toBe '%d' 106 | expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.placeholder.python'] 107 | expect(tokens[0][3].value).toBe '(' 108 | expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] 109 | expect(tokens[0][4].value).toBe '"' 110 | expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] 111 | expect(tokens[0][5].value).toBe ' ' 112 | expect(tokens[0][5].scopes).toEqual ['source.python'] 113 | expect(tokens[0][6].value).toBe '#' 114 | expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] 115 | expect(tokens[0][7].value).toBe 'foo' 116 | expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] 117 | 118 | it "terminates a double-quoted raw string containing opening bracket at closing quote", -> 119 | tokens = grammar.tokenizeLines('r"%d[" #foo') 120 | 121 | expect(tokens[0][0].value).toBe 'r' 122 | expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'storage.type.string.python'] 123 | expect(tokens[0][1].value).toBe '"' 124 | expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.begin.python'] 125 | expect(tokens[0][2].value).toBe '%d' 126 | expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.placeholder.python'] 127 | expect(tokens[0][3].value).toBe '[' 128 | expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp'] 129 | expect(tokens[0][4].value).toBe '"' 130 | expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] 131 | expect(tokens[0][5].value).toBe ' ' 132 | expect(tokens[0][5].scopes).toEqual ['source.python'] 133 | expect(tokens[0][6].value).toBe '#' 134 | expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] 135 | expect(tokens[0][7].value).toBe 'foo' 136 | expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] 137 | 138 | it "terminates a unicode single-quoted raw string containing opening parenthesis at closing quote", -> 139 | tokens = grammar.tokenizeLines("ur'%d(' #foo") 140 | 141 | expect(tokens[0][0].value).toBe 'ur' 142 | expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'storage.type.string.python'] 143 | expect(tokens[0][1].value).toBe "'" 144 | expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python'] 145 | expect(tokens[0][2].value).toBe '%d' 146 | expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python'] 147 | expect(tokens[0][3].value).toBe '(' 148 | expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] 149 | expect(tokens[0][4].value).toBe "'" 150 | expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] 151 | expect(tokens[0][5].value).toBe ' ' 152 | expect(tokens[0][5].scopes).toEqual ['source.python'] 153 | expect(tokens[0][6].value).toBe '#' 154 | expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] 155 | expect(tokens[0][7].value).toBe 'foo' 156 | expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] 157 | 158 | it "terminates a unicode single-quoted raw string containing opening bracket at closing quote", -> 159 | tokens = grammar.tokenizeLines("ur'%d[' #foo") 160 | 161 | expect(tokens[0][0].value).toBe 'ur' 162 | expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'storage.type.string.python'] 163 | expect(tokens[0][1].value).toBe "'" 164 | expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python'] 165 | expect(tokens[0][2].value).toBe '%d' 166 | expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python'] 167 | expect(tokens[0][3].value).toBe '[' 168 | expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp'] 169 | expect(tokens[0][4].value).toBe "'" 170 | expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] 171 | expect(tokens[0][5].value).toBe ' ' 172 | expect(tokens[0][5].scopes).toEqual ['source.python'] 173 | expect(tokens[0][6].value).toBe '#' 174 | expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] 175 | expect(tokens[0][7].value).toBe 'foo' 176 | expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] 177 | 178 | it "terminates a unicode double-quoted raw string containing opening parenthesis at closing quote", -> 179 | tokens = grammar.tokenizeLines('ur"%d(" #foo') 180 | 181 | expect(tokens[0][0].value).toBe 'ur' 182 | expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'storage.type.string.python'] 183 | expect(tokens[0][1].value).toBe '"' 184 | expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python'] 185 | expect(tokens[0][2].value).toBe '%d' 186 | expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python'] 187 | expect(tokens[0][3].value).toBe '(' 188 | expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] 189 | expect(tokens[0][4].value).toBe '"' 190 | expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] 191 | expect(tokens[0][5].value).toBe ' ' 192 | expect(tokens[0][5].scopes).toEqual ['source.python'] 193 | expect(tokens[0][6].value).toBe '#' 194 | expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] 195 | expect(tokens[0][7].value).toBe 'foo' 196 | expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] 197 | 198 | it "terminates a unicode double-quoted raw string containing opening bracket at closing quote", -> 199 | tokens = grammar.tokenizeLines('ur"%d[" #foo') 200 | 201 | expect(tokens[0][0].value).toBe 'ur' 202 | expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'storage.type.string.python'] 203 | expect(tokens[0][1].value).toBe '"' 204 | expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python'] 205 | expect(tokens[0][2].value).toBe '%d' 206 | expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python'] 207 | expect(tokens[0][3].value).toBe '[' 208 | expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp'] 209 | expect(tokens[0][4].value).toBe '"' 210 | expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] 211 | expect(tokens[0][5].value).toBe ' ' 212 | expect(tokens[0][5].scopes).toEqual ['source.python'] 213 | expect(tokens[0][6].value).toBe '#' 214 | expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] 215 | expect(tokens[0][7].value).toBe 'foo' 216 | expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] 217 | 218 | it "terminates referencing an item in a list variable after a sequence of a closing and opening bracket", -> 219 | tokens = grammar.tokenizeLines('foo[i[0]][j[0]]') 220 | 221 | expect(tokens[0][0].value).toBe 'foo' 222 | expect(tokens[0][0].scopes).toEqual ['source.python', 'meta.item-access.python'] 223 | expect(tokens[0][1].value).toBe '[' 224 | expect(tokens[0][1].scopes).toEqual ['source.python', 'meta.item-access.python', 'punctuation.definition.arguments.begin.python'] 225 | expect(tokens[0][2].value).toBe 'i' 226 | expect(tokens[0][2].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python'] 227 | expect(tokens[0][3].value).toBe '[' 228 | expect(tokens[0][3].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python', 'punctuation.definition.arguments.begin.python'] 229 | expect(tokens[0][4].value).toBe '0' 230 | expect(tokens[0][4].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'constant.numeric.integer.decimal.python'] 231 | expect(tokens[0][5].value).toBe ']' 232 | expect(tokens[0][5].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python'] 233 | expect(tokens[0][6].value).toBe ']' 234 | expect(tokens[0][6].scopes).toEqual ['source.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python'] 235 | expect(tokens[0][7].value).toBe '[' 236 | expect(tokens[0][7].scopes).toEqual ['source.python', 'meta.structure.list.python', 'punctuation.definition.list.begin.python'] 237 | expect(tokens[0][8].value).toBe 'j' 238 | expect(tokens[0][8].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python'] 239 | expect(tokens[0][9].value).toBe '[' 240 | expect(tokens[0][9].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'punctuation.definition.arguments.begin.python'] 241 | expect(tokens[0][10].value).toBe '0' 242 | expect(tokens[0][10].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'constant.numeric.integer.decimal.python'] 243 | expect(tokens[0][11].value).toBe ']' 244 | expect(tokens[0][11].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python'] 245 | expect(tokens[0][12].value).toBe ']' 246 | expect(tokens[0][12].scopes).toEqual ['source.python', 'meta.structure.list.python', 'punctuation.definition.list.end.python'] 247 | 248 | it "tokenizes a hex escape inside a string", -> 249 | tokens = grammar.tokenizeLines('"\\x5A"') 250 | 251 | expect(tokens[0][0].value).toBe '"' 252 | expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 253 | expect(tokens[0][1].value).toBe '\\x5A' 254 | expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.hex.python'] 255 | 256 | tokens = grammar.tokenizeLines('"\\x9f"') 257 | 258 | expect(tokens[0][0].value).toBe '"' 259 | expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 260 | expect(tokens[0][1].value).toBe '\\x9f' 261 | expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.hex.python'] 262 | 263 | describe "f-strings", -> 264 | it "tokenizes them", -> 265 | {tokens} = grammar.tokenizeLine "f'hello'" 266 | 267 | expect(tokens[0]).toEqual value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python'] 268 | expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python'] 269 | expect(tokens[2]).toEqual value: 'hello', scopes: ['source.python', "string.quoted.single.single-line.format.python"] 270 | expect(tokens[3]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python'] 271 | 272 | it "tokenizes {{ and }} as escape characters", -> 273 | {tokens} = grammar.tokenizeLine "f'he}}l{{lo'" 274 | 275 | expect(tokens[0]).toEqual value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python'] 276 | expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python'] 277 | expect(tokens[2]).toEqual value: 'he', scopes: ['source.python', "string.quoted.single.single-line.format.python"] 278 | expect(tokens[3]).toEqual value: '}}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'constant.character.escape.curly-bracket.python'] 279 | expect(tokens[4]).toEqual value: 'l', scopes: ['source.python', "string.quoted.single.single-line.format.python"] 280 | expect(tokens[5]).toEqual value: '{{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'constant.character.escape.curly-bracket.python'] 281 | expect(tokens[6]).toEqual value: 'lo', scopes: ['source.python', "string.quoted.single.single-line.format.python"] 282 | expect(tokens[7]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python'] 283 | 284 | it "tokenizes unmatched closing curly brackets as invalid", -> 285 | {tokens} = grammar.tokenizeLine "f'he}llo'" 286 | 287 | expect(tokens[0]).toEqual value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python'] 288 | expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python'] 289 | expect(tokens[2]).toEqual value: 'he', scopes: ['source.python', "string.quoted.single.single-line.format.python"] 290 | expect(tokens[3]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'invalid.illegal.closing-curly-bracket.python'] 291 | expect(tokens[4]).toEqual value: 'llo', scopes: ['source.python', "string.quoted.single.single-line.format.python"] 292 | expect(tokens[5]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python'] 293 | 294 | describe "in expressions", -> 295 | it "tokenizes variables", -> 296 | {tokens} = grammar.tokenizeLine "f'{abc}'" 297 | 298 | expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] 299 | expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python'] 300 | expect(tokens[4]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] 301 | 302 | it "tokenizes arithmetic", -> 303 | {tokens} = grammar.tokenizeLine "f'{5 - 3}'" 304 | 305 | expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] 306 | expect(tokens[3]).toEqual value: '5', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python'] 307 | expect(tokens[5]).toEqual value: '-', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'keyword.operator.arithmetic.python'] 308 | expect(tokens[7]).toEqual value: '3', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python'] 309 | expect(tokens[8]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] 310 | 311 | it "tokenizes function and method calls", -> 312 | {tokens} = grammar.tokenizeLine "f'{name.decode(\"utf-8\").lower()}'" 313 | 314 | expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] 315 | expect(tokens[3]).toEqual value: 'name', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'variable.other.object.python'] 316 | expect(tokens[4]).toEqual value: '.', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.separator.method.period.python'] 317 | expect(tokens[5]).toEqual value: 'decode', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'entity.name.function.python'] 318 | expect(tokens[6]).toEqual value: '(', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.begin.bracket.round.python'] 319 | expect(tokens[7]).toEqual value: '"', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'meta.method-call.arguments.python', "string.quoted.double.single-line.python", 'punctuation.definition.string.begin.python'] 320 | expect(tokens[8]).toEqual value: 'utf-8', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'meta.method-call.arguments.python', "string.quoted.double.single-line.python"] 321 | expect(tokens[9]).toEqual value: '"', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'meta.method-call.arguments.python', "string.quoted.double.single-line.python", 'punctuation.definition.string.end.python'] 322 | expect(tokens[10]).toEqual value: ')', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.end.bracket.round.python'] 323 | expect(tokens[11]).toEqual value: '.', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.separator.method.period.python'] 324 | expect(tokens[12]).toEqual value: 'lower', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'entity.name.function.python'] 325 | expect(tokens[13]).toEqual value: '(', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.begin.bracket.round.python'] 326 | expect(tokens[14]).toEqual value: ')', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.end.bracket.round.python'] 327 | expect(tokens[15]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] 328 | 329 | it "tokenizes conversion flags", -> 330 | {tokens} = grammar.tokenizeLine "f'{abc!r}'" 331 | 332 | expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] 333 | expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python'] 334 | expect(tokens[4]).toEqual value: '!r', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] 335 | expect(tokens[5]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] 336 | 337 | it "tokenizes format specifiers", -> 338 | {tokens} = grammar.tokenizeLine "f'{abc:^d}'" 339 | 340 | expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] 341 | expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python'] 342 | expect(tokens[4]).toEqual value: ':^d', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] 343 | expect(tokens[5]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] 344 | 345 | it "tokenizes nested replacement fields in top-level format specifiers", -> 346 | {tokens} = grammar.tokenizeLine "f'{abc:{align}d}'" 347 | 348 | expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] 349 | expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python'] 350 | expect(tokens[4]).toEqual value: ':', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] 351 | expect(tokens[5]).toEqual value: '{align}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python'] 352 | expect(tokens[6]).toEqual value: 'd', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] 353 | expect(tokens[7]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] 354 | 355 | it "tokenizes backslashes as invalid", -> 356 | {tokens} = grammar.tokenizeLine "f'{ab\\n}'" 357 | 358 | expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] 359 | expect(tokens[3]).toEqual value: 'ab', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python'] 360 | expect(tokens[4]).toEqual value: '\\', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'invalid.illegal.backslash.python'] 361 | expect(tokens[6]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] 362 | 363 | describe "binary strings", -> 364 | it "tokenizes them", -> 365 | {tokens} = grammar.tokenizeLine "b'test'" 366 | 367 | expect(tokens[0]).toEqual value: 'b', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'storage.type.string.python'] 368 | expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.begin.python'] 369 | expect(tokens[2]).toEqual value: 'test', scopes: ['source.python', "string.quoted.single.single-line.binary.python"] 370 | expect(tokens[3]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.end.python'] 371 | 372 | it "tokenizes invalid characters", -> 373 | {tokens} = grammar.tokenizeLine "b'tést'" 374 | 375 | expect(tokens[0]).toEqual value: 'b', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'storage.type.string.python'] 376 | expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.begin.python'] 377 | expect(tokens[2]).toEqual value: 't', scopes: ['source.python', "string.quoted.single.single-line.binary.python"] 378 | expect(tokens[3]).toEqual value: 'é', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'invalid.illegal.character-out-of-range.python'] 379 | expect(tokens[4]).toEqual value: 'st', scopes: ['source.python', "string.quoted.single.single-line.binary.python"] 380 | expect(tokens[5]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.end.python'] 381 | 382 | describe "docstrings", -> 383 | it "tokenizes them", -> 384 | lines = grammar.tokenizeLines ''' 385 | """ 386 | Bla bla bla "wow" what's this? 387 | """ 388 | ''' 389 | 390 | expect(lines[0][0]).toEqual value: '"""', scopes: ['source.python', 'string.quoted.double.block.python', 'punctuation.definition.string.begin.python'] 391 | expect(lines[1][0]).toEqual value: ' Bla bla bla "wow" what\'s this?', scopes: ['source.python', 'string.quoted.double.block.python'] 392 | expect(lines[2][0]).toEqual value: '"""', scopes: ['source.python', 'string.quoted.double.block.python', 'punctuation.definition.string.end.python'] 393 | 394 | lines = grammar.tokenizeLines """ 395 | ''' 396 | Bla bla bla "wow" what's this? 397 | ''' 398 | """ 399 | 400 | expect(lines[0][0]).toEqual value: "'''", scopes: ['source.python', 'string.quoted.single.block.python', 'punctuation.definition.string.begin.python'] 401 | expect(lines[1][0]).toEqual value: ' Bla bla bla "wow" what\'s this?', scopes: ['source.python', 'string.quoted.single.block.python'] 402 | expect(lines[2][0]).toEqual value: "'''", scopes: ['source.python', 'string.quoted.single.block.python', 'punctuation.definition.string.end.python'] 403 | 404 | 405 | describe "string formatting", -> 406 | describe "%-style formatting", -> 407 | it "tokenizes the conversion type", -> 408 | {tokens} = grammar.tokenizeLine '"%d"' 409 | 410 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 411 | expect(tokens[1]).toEqual value: '%d', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 412 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 413 | 414 | it "tokenizes an optional mapping key", -> 415 | {tokens} = grammar.tokenizeLine '"%(key)x"' 416 | 417 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 418 | expect(tokens[1]).toEqual value: '%(key)x', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 419 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 420 | 421 | it "tokenizes an optional conversion flag", -> 422 | {tokens} = grammar.tokenizeLine '"% F"' 423 | 424 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 425 | expect(tokens[1]).toEqual value: '% F', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 426 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 427 | 428 | it "tokenizes an optional field width", -> 429 | {tokens} = grammar.tokenizeLine '"%11s"' 430 | 431 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 432 | expect(tokens[1]).toEqual value: '%11s', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 433 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 434 | 435 | it "tokenizes * as the optional field width", -> 436 | {tokens} = grammar.tokenizeLine '"%*g"' 437 | 438 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 439 | expect(tokens[1]).toEqual value: '%*g', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 440 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 441 | 442 | it "tokenizes an optional precision", -> 443 | {tokens} = grammar.tokenizeLine '"%.4r"' 444 | 445 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 446 | expect(tokens[1]).toEqual value: '%.4r', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 447 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 448 | 449 | it "tokenizes * as the optional precision", -> 450 | {tokens} = grammar.tokenizeLine '"%.*%"' 451 | 452 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 453 | expect(tokens[1]).toEqual value: '%.*%', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 454 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 455 | 456 | it "tokenizes an optional length modifier", -> 457 | {tokens} = grammar.tokenizeLine '"%Lo"' 458 | 459 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 460 | expect(tokens[1]).toEqual value: '%Lo', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 461 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 462 | 463 | it "tokenizes complex formats", -> 464 | {tokens} = grammar.tokenizeLine '"%(key)#5.*hc"' 465 | 466 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 467 | expect(tokens[1]).toEqual value: '%(key)#5.*hc', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 468 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 469 | 470 | describe "{}-style formatting", -> 471 | it "tokenizes the empty replacement field", -> 472 | {tokens} = grammar.tokenizeLine '"{}"' 473 | 474 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 475 | expect(tokens[1]).toEqual value: '{}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 476 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 477 | 478 | it "tokenizes a number as the field name", -> 479 | {tokens} = grammar.tokenizeLine '"{1}"' 480 | 481 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 482 | expect(tokens[1]).toEqual value: '{1}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 483 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 484 | 485 | it "tokenizes a variable name as the field name", -> 486 | {tokens} = grammar.tokenizeLine '"{key}"' 487 | 488 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 489 | expect(tokens[1]).toEqual value: '{key}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 490 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 491 | 492 | it "tokenizes field name attributes", -> 493 | {tokens} = grammar.tokenizeLine '"{key.length}"' 494 | 495 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 496 | expect(tokens[1]).toEqual value: '{key.length}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 497 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 498 | 499 | {tokens} = grammar.tokenizeLine '"{4.width}"' 500 | 501 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 502 | expect(tokens[1]).toEqual value: '{4.width}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 503 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 504 | 505 | {tokens} = grammar.tokenizeLine '"{python2[\'3\']}"' 506 | 507 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 508 | expect(tokens[1]).toEqual value: '{python2[\'3\']}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 509 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 510 | 511 | {tokens} = grammar.tokenizeLine '"{2[4]}"' 512 | 513 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 514 | expect(tokens[1]).toEqual value: '{2[4]}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 515 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 516 | 517 | it "tokenizes multiple field name attributes", -> 518 | {tokens} = grammar.tokenizeLine '"{nested.a[2][\'val\'].value}"' 519 | 520 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 521 | expect(tokens[1]).toEqual value: '{nested.a[2][\'val\'].value}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 522 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 523 | 524 | it "tokenizes conversions", -> 525 | {tokens} = grammar.tokenizeLine '"{!r}"' 526 | 527 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 528 | expect(tokens[1]).toEqual value: '{!r}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 529 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 530 | 531 | describe "format specifiers", -> 532 | it "tokenizes alignment", -> 533 | {tokens} = grammar.tokenizeLine '"{:<}"' 534 | 535 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 536 | expect(tokens[1]).toEqual value: '{:<}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 537 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 538 | 539 | {tokens} = grammar.tokenizeLine '"{:a^}"' 540 | 541 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 542 | expect(tokens[1]).toEqual value: '{:a^}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 543 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 544 | 545 | it "tokenizes signs", -> 546 | {tokens} = grammar.tokenizeLine '"{:+}"' 547 | 548 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 549 | expect(tokens[1]).toEqual value: '{:+}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 550 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 551 | 552 | {tokens} = grammar.tokenizeLine '"{: }"' 553 | 554 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 555 | expect(tokens[1]).toEqual value: '{: }', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 556 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 557 | 558 | it "tokenizes the alternate form indicator", -> 559 | {tokens} = grammar.tokenizeLine '"{:#}"' 560 | 561 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 562 | expect(tokens[1]).toEqual value: '{:#}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 563 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 564 | 565 | it "tokenizes 0", -> 566 | {tokens} = grammar.tokenizeLine '"{:0}"' 567 | 568 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 569 | expect(tokens[1]).toEqual value: '{:0}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 570 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 571 | 572 | it "tokenizes the width", -> 573 | {tokens} = grammar.tokenizeLine '"{:34}"' 574 | 575 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 576 | expect(tokens[1]).toEqual value: '{:34}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 577 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 578 | 579 | it "tokenizes the grouping option", -> 580 | {tokens} = grammar.tokenizeLine '"{:,}"' 581 | 582 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 583 | expect(tokens[1]).toEqual value: '{:,}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 584 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 585 | 586 | it "tokenizes the precision", -> 587 | {tokens} = grammar.tokenizeLine '"{:.5}"' 588 | 589 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 590 | expect(tokens[1]).toEqual value: '{:.5}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 591 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 592 | 593 | it "tokenizes the type", -> 594 | {tokens} = grammar.tokenizeLine '"{:b}"' 595 | 596 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 597 | expect(tokens[1]).toEqual value: '{:b}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 598 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 599 | 600 | it "tokenizes nested replacement fields", -> 601 | {tokens} = grammar.tokenizeLine '"{:{align}-.{precision}%}"' 602 | 603 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 604 | expect(tokens[1]).toEqual value: '{:', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 605 | expect(tokens[2]).toEqual value: '{align}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python'] 606 | expect(tokens[3]).toEqual value: '-.', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 607 | expect(tokens[4]).toEqual value: '{precision}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python'] 608 | expect(tokens[5]).toEqual value: '%}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 609 | expect(tokens[6]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 610 | 611 | it "tokenizes complex formats", -> 612 | {tokens} = grammar.tokenizeLine '"{0.players[2]!a:2>-#01_.3d}"' 613 | 614 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 615 | expect(tokens[1]).toEqual value: '{0.players[2]!a:2>-#01_.3d}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] 616 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 617 | 618 | it "tokenizes {{ and }} as escape characters and not formatters", -> 619 | {tokens} = grammar.tokenizeLine '"{{hello}}"' 620 | 621 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 622 | expect(tokens[1]).toEqual value: '{{', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.curly-bracket.python'] 623 | expect(tokens[2]).toEqual value: 'hello', scopes: ['source.python', 'string.quoted.double.single-line.python'] 624 | expect(tokens[3]).toEqual value: '}}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.curly-bracket.python'] 625 | expect(tokens[4]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 626 | 627 | it "tokenizes properties of self as self-type variables", -> 628 | tokens = grammar.tokenizeLines('self.foo') 629 | 630 | expect(tokens[0][0]).toEqual value: 'self', scopes: ['source.python', 'variable.language.self.python'] 631 | expect(tokens[0][1]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python'] 632 | expect(tokens[0][2]).toEqual value: 'foo', scopes: ['source.python', 'variable.other.property.python'] 633 | 634 | it "tokenizes cls as a self-type variable", -> 635 | tokens = grammar.tokenizeLines('cls.foo') 636 | 637 | expect(tokens[0][0]).toEqual value: 'cls', scopes: ['source.python', 'variable.language.self.python'] 638 | expect(tokens[0][1]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python'] 639 | expect(tokens[0][2]).toEqual value: 'foo', scopes: ['source.python', 'variable.other.property.python'] 640 | 641 | it "tokenizes properties of a variable as variables", -> 642 | tokens = grammar.tokenizeLines('bar.foo') 643 | 644 | expect(tokens[0][0]).toEqual value: 'bar', scopes: ['source.python', 'variable.other.object.python'] 645 | expect(tokens[0][1]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python'] 646 | expect(tokens[0][2]).toEqual value: 'foo', scopes: ['source.python', 'variable.other.property.python'] 647 | 648 | # Add the grammar test fixtures 649 | grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python.py') 650 | grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python_functions.py') 651 | grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python_lambdas.py') 652 | grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python_typing.py') 653 | 654 | describe "SQL highlighting", -> 655 | beforeEach -> 656 | waitsForPromise -> 657 | atom.packages.activatePackage('language-sql') 658 | 659 | it "tokenizes SQL inline highlighting on blocks", -> 660 | delimsByScope = 661 | "string.quoted.double.block.sql.python": '"""' 662 | "string.quoted.single.block.sql.python": "'''" 663 | 664 | for scope, delim in delimsByScope 665 | tokens = grammar.tokenizeLines( 666 | delim + 667 | 'SELECT bar 668 | FROM foo' 669 | + delim 670 | ) 671 | 672 | expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] 673 | expect(tokens[1][0]).toEqual value: 'SELECT', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] 674 | expect(tokens[1][1]).toEqual value: ' bar', scopes: ['source.python', scope, 'meta.embedded.sql'] 675 | expect(tokens[2][0]).toEqual value: 'FROM', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] 676 | expect(tokens[2][1]).toEqual value ' foo', scopes: ['source.python', scope, 'meta.embedded.sql'] 677 | expect(tokens[3][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] 678 | 679 | it "tokenizes SQL inline highlighting on blocks with a CTE", -> 680 | # Note that these scopes do not contain .sql because we can't definitively tell 681 | # if the string contains SQL or not 682 | delimsByScope = 683 | "string.quoted.double.block.python": '"""' 684 | "string.quoted.single.block.python": "'''" 685 | 686 | for scope, delim of delimsByScope 687 | tokens = grammar.tokenizeLines(""" 688 | #{delim} 689 | WITH example_cte AS ( 690 | SELECT bar 691 | FROM foo 692 | GROUP BY bar 693 | ) 694 | 695 | SELECT COUNT(*) 696 | FROM example_cte 697 | #{delim} 698 | """) 699 | 700 | expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] 701 | expect(tokens[1][0]).toEqual value: 'WITH', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] 702 | expect(tokens[1][1]).toEqual value: ' example_cte ', scopes: ['source.python', scope, 'meta.embedded.sql'] 703 | expect(tokens[1][2]).toEqual value: 'AS', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.alias.sql'] 704 | expect(tokens[1][3]).toEqual value: ' ', scopes: ['source.python', scope, 'meta.embedded.sql'] 705 | expect(tokens[1][4]).toEqual value: '(', scopes: ['source.python', scope, 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql'] 706 | expect(tokens[2][0]).toEqual value: 'SELECT', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] 707 | expect(tokens[2][1]).toEqual value: ' bar', scopes: ['source.python', scope, 'meta.embedded.sql'] 708 | expect(tokens[3][0]).toEqual value: 'FROM', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] 709 | expect(tokens[3][1]).toEqual value: ' foo', scopes: ['source.python', scope, 'meta.embedded.sql'] 710 | expect(tokens[4][0]).toEqual value: 'GROUP BY', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] 711 | expect(tokens[4][1]).toEqual value: ' bar', scopes: ['source.python', scope, 'meta.embedded.sql'] 712 | expect(tokens[5][0]).toEqual value: ')', scopes: ['source.python', scope, 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql'] 713 | expect(tokens[7][0]).toEqual value: 'SELECT', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] 714 | expect(tokens[8][0]).toEqual value: 'FROM', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] 715 | expect(tokens[9][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] 716 | 717 | it "tokenizes SQL inline highlighting on single line with a CTE", -> 718 | {tokens} = grammar.tokenizeLine('\'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte\'') 719 | 720 | expect(tokens[0]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'punctuation.definition.string.begin.python'] 721 | expect(tokens[1]).toEqual value: 'WITH', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] 722 | expect(tokens[2]).toEqual value: ' example_cte ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] 723 | expect(tokens[3]).toEqual value: 'AS', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.alias.sql'] 724 | expect(tokens[4]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] 725 | expect(tokens[5]).toEqual value: '(', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql'] 726 | expect(tokens[6]).toEqual value: 'SELECT', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] 727 | expect(tokens[7]).toEqual value: ' bar ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] 728 | expect(tokens[8]).toEqual value: 'FROM', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] 729 | expect(tokens[9]).toEqual value: ' foo', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] 730 | expect(tokens[10]).toEqual value: ')', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql'] 731 | expect(tokens[11]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] 732 | expect(tokens[12]).toEqual value: 'SELECT', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] 733 | expect(tokens[13]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] 734 | expect(tokens[14]).toEqual value: 'COUNT', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'support.function.aggregate.sql'] 735 | expect(tokens[15]).toEqual value: '(', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql'] 736 | expect(tokens[16]).toEqual value: '*', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.operator.star.sql'] 737 | expect(tokens[17]).toEqual value: ')', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql'] 738 | expect(tokens[18]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] 739 | expect(tokens[19]).toEqual value: 'FROM', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] 740 | expect(tokens[20]).toEqual value: ' example_cte', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] 741 | expect(tokens[21]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'punctuation.definition.string.end.python'] 742 | 743 | it "tokenizes Python escape characters and formatting specifiers in SQL strings", -> 744 | {tokens} = grammar.tokenizeLine('"INSERT INTO url (image_uri) VALUES (\\\'%s\\\');" % values') 745 | 746 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'punctuation.definition.string.begin.python'] 747 | expect(tokens[10]).toEqual value: '\\\'', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.character.escape.single-quote.python'] 748 | expect(tokens[11]).toEqual value: '%s', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.other.placeholder.python'] 749 | expect(tokens[12]).toEqual value: '\\\'', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.character.escape.single-quote.python'] 750 | expect(tokens[13]).toEqual value: ')', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql'] 751 | expect(tokens[15]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'punctuation.definition.string.end.python'] 752 | expect(tokens[17]).toEqual value: '%', scopes: ['source.python', 'keyword.operator.arithmetic.python'] 753 | 754 | it "recognizes DELETE as an HTTP method", -> 755 | {tokens} = grammar.tokenizeLine('"DELETE /api/v1/endpoint"') 756 | 757 | expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] 758 | expect(tokens[1]).toEqual value: 'DELETE /api/v1/endpoint', scopes: ['source.python', 'string.quoted.double.single-line.python'] 759 | expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] 760 | -------------------------------------------------------------------------------- /grammars/python.cson: -------------------------------------------------------------------------------- 1 | 'scopeName': 'source.python' 2 | 'name': 'Python' 3 | 'fileTypes': [ 4 | 'cpy' 5 | 'gyp' 6 | 'gypi' 7 | 'kv' 8 | 'py' 9 | 'pyw' 10 | 'pyi' 11 | 'rpy' 12 | 'SConscript' 13 | 'SConstruct' 14 | 'Sconstruct' 15 | 'sconstruct' 16 | 'Snakefile' # Snakemake support 17 | 'smk' # Snakemake support 18 | 'tac' 19 | 'wscript' 20 | 'wsgi' 21 | ] 22 | 'firstLineMatch': '^#![ \\t]*/.*\\bpython[\\d\\.]*\\b' 23 | 'injections': 24 | 'L:source.python meta.embedded.sql': 25 | 'patterns': [ 26 | { 27 | 'include': '#string_formatting' 28 | } 29 | { 30 | 'include': '#escaped_char' 31 | } 32 | ] 33 | 'patterns': [ 34 | { 35 | 'include': '#line_comments' 36 | } 37 | { 38 | 'include': '#language_variables' 39 | } 40 | { 41 | 'match': '\\b(?i:(0x\\h*)L)' 42 | 'name': 'constant.numeric.integer.long.hexadecimal.python' 43 | } 44 | { 45 | 'match': '\\b(?i:(0x\\h*))' 46 | 'name': 'constant.numeric.integer.hexadecimal.python' 47 | } 48 | { 49 | 'match': '\\b(?i:(0o?[0-7]+)L)' 50 | 'name': 'constant.numeric.integer.long.octal.python' 51 | } 52 | { 53 | 'match': '\\b(?i:(0o?[0-7]+))' 54 | 'name': 'constant.numeric.integer.octal.python' 55 | } 56 | { 57 | 'match': '\\b(?i:(0b[01]+)L)', 58 | 'name': 'constant.numeric.integer.long.binary.python' 59 | } 60 | { 61 | 'match': '\\b(?i:(0b[01]+))', 62 | 'name': 'constant.numeric.integer.binary.python' 63 | } 64 | { 65 | 'match': '\\b(?i:(((\\d+(\\.(?=[^a-zA-Z_])\\d*)?|(?<=[^0-9a-zA-Z_])\\.\\d+)(e[\\-\\+]?\\d+)?))J)' 66 | 'name': 'constant.numeric.complex.python' 67 | } 68 | { 69 | 'match': '\\b(?i:(\\d+\\.\\d*(e[\\-\\+]?\\d+)?))(?=[^a-zA-Z_])' 70 | 'name': 'constant.numeric.float.python' 71 | } 72 | { 73 | 'match': '(?<=[^0-9a-zA-Z_])(?i:(\\.\\d+(e[\\-\\+]?\\d+)?))' 74 | 'name': 'constant.numeric.float.python' 75 | } 76 | { 77 | 'match': '\\b(?i:(\\d+e[\\-\\+]?\\d+))' 78 | 'name': 'constant.numeric.float.python' 79 | } 80 | { 81 | 'match': '\\b(?i:([1-9]+[0-9]*|0)L)' 82 | 'name': 'constant.numeric.integer.long.decimal.python' 83 | } 84 | { 85 | 'match': '\\b([1-9]+[0-9]*|0)' 86 | 'name': 'constant.numeric.integer.decimal.python' 87 | } 88 | { 89 | 'captures': 90 | '1': 91 | 'name': 'storage.modifier.global.python' 92 | 'match': '\\b(global)\\b' 93 | } 94 | { 95 | 'captures': 96 | '1': 97 | 'name': 'storage.modifier.nonlocal.python' 98 | 'match': '\\b(nonlocal)\\b' 99 | } 100 | { 101 | 'captures': 102 | '1': 103 | 'name': 'keyword.control.import.python' 104 | '2': 105 | 'name': 'keyword.control.import.from.python' 106 | 'match': '\\b(?:(import)|(from))\\b' 107 | } 108 | { 109 | 'comment': 'keywords that delimit flow conditionals' 110 | 'name': 'keyword.control.conditional.python' 111 | 'match': '\\b(if|elif|else)\\b' 112 | } 113 | { 114 | 'comment': 'keywords that delimit an exception' 115 | 'name': 'keyword.control.exception.python' 116 | 'match': '\\b(except|finally|try|raise)\\b' 117 | } 118 | { 119 | 'comment': 'keywords that delimit loops' 120 | 'name': 'keyword.control.repeat.python' 121 | 'match': '\\b(for|while)\\b' 122 | } 123 | { 124 | 'comment': 'keywords that alter flow from within a block' 125 | 'name': 'keyword.control.statement.python' 126 | 'match': '\\b(with|break|continue|pass|return|yield(\\s+from)?|await)\\b' 127 | } 128 | { 129 | 'comment': 'keyword operators that evaluate to True or False' 130 | 'match': '\\b(and|in|is|not|or)\\b' 131 | 'name': 'keyword.operator.logical.python' 132 | } 133 | { 134 | 'captures': 135 | '1': 136 | 'name': 'keyword.other.python' 137 | 'comment': 'keywords that haven\'t fit into other groups (yet).' 138 | 'match': '\\b(as|assert|del|exec|print)\\b' 139 | } 140 | { 141 | 'match': '<>' 142 | 'name': 'invalid.deprecated.operator.python' 143 | } 144 | { 145 | 'match': '<\\=|>\\=|\\=\\=|<|>|\\!\\=' 146 | 'name': 'keyword.operator.comparison.python' 147 | } 148 | { 149 | 'match': '\\+\\=|-\\=|\\*\\=|/\\=|//\\=|%\\=|&\\=|\\|\\=|\\^\\=|>>\\=|<<\\=|\\*\\*\\=' 150 | 'name': 'keyword.operator.assignment.augmented.python' 151 | } 152 | { 153 | 'match': '\\+|\\-|\\*|\\*\\*|/|//|%|<<|>>|&|\\||\\^|~' 154 | 'name': 'keyword.operator.arithmetic.python' 155 | } 156 | { 157 | 'match': '\\=' 158 | 'name': 'keyword.operator.assignment.python' 159 | } 160 | { 161 | 'begin': '^\\s*(class)\\s+(?=[a-zA-Z_][a-zA-Z_0-9]*\\s*\\:)' 162 | 'beginCaptures': 163 | '1': 164 | 'name': 'storage.type.class.python' 165 | 'contentName': 'entity.name.type.class.python' 166 | 'end': '\\s*(:)' 167 | 'endCaptures': 168 | '1': 169 | 'name': 'punctuation.section.class.begin.python' 170 | 'name': 'meta.class.old-style.python' 171 | 'patterns': [ 172 | { 173 | 'include': '#entity_name_class' 174 | } 175 | ] 176 | } 177 | { 178 | 'begin': '^\\s*(class)\\s+(?=[a-zA-Z_][a-zA-Z_0-9]*\\s*\\()' 179 | 'beginCaptures': 180 | '1': 181 | 'name': 'storage.type.class.python' 182 | 'end': '(\\))\\s*(?:(\\:)|(.*$\\n?))' 183 | 'endCaptures': 184 | '1': 185 | 'name': 'punctuation.definition.inheritance.end.python' 186 | '2': 187 | 'name': 'punctuation.section.class.begin.python' 188 | '3': 189 | 'name': 'invalid.illegal.missing-section-begin.python' 190 | 'name': 'meta.class.python' 191 | 'patterns': [ 192 | { 193 | 'begin': '(?=[A-Za-z_][A-Za-z0-9_]*)' 194 | 'contentName': 'entity.name.type.class.python' 195 | 'end': '(?![A-Za-z0-9_])' 196 | 'patterns': [ 197 | { 198 | 'include': '#entity_name_class' 199 | } 200 | ] 201 | } 202 | { 203 | 'begin': '(\\()' 204 | 'beginCaptures': 205 | '1': 206 | 'name': 'punctuation.definition.inheritance.begin.python' 207 | 'contentName': 'meta.class.inheritance.python' 208 | 'end': '(?=\\)|:)' 209 | 'patterns': [ 210 | { 211 | 'begin': '(?<=\\(|,)\\s*' 212 | 'contentName': 'entity.other.inherited-class.python' 213 | 'end': '\\s*(?:(,)|(?=\\)))' 214 | 'endCaptures': 215 | '1': 216 | 'name': 'punctuation.separator.inheritance.python' 217 | 'patterns': [ 218 | { 219 | 'include': '$self' 220 | } 221 | ] 222 | } 223 | ] 224 | } 225 | ] 226 | } 227 | { 228 | 'begin': '^\\s*(class)\\s+(?=[a-zA-Z_][a-zA-Z_0-9])' 229 | 'beginCaptures': 230 | '1': 231 | 'name': 'storage.type.class.python' 232 | 'end': '(\\()|(\\s*$\\n?|#.*$\\n?)' 233 | 'endCaptures': 234 | '1': 235 | 'name': 'punctuation.definition.inheritance.begin.python' 236 | '2': 237 | 'name': 'invalid.illegal.missing-inheritance.python' 238 | 'name': 'meta.class.python' 239 | 'patterns': [ 240 | { 241 | 'begin': '(?=[A-Za-z_][A-Za-z0-9_]*)' 242 | 'contentName': 'entity.name.type.class.python' 243 | 'end': '(?![A-Za-z0-9_])' 244 | 'patterns': [ 245 | { 246 | 'include': '#entity_name_function' 247 | } 248 | ] 249 | } 250 | ] 251 | } 252 | { 253 | 'begin': '^\\s*(?:(async)\\s+)?(def)\\s+(?=[A-Za-z_][A-Za-z0-9_]*)' 254 | 'beginCaptures': 255 | '1': 256 | 'name': 'storage.modifier.async.python' 257 | '2': 258 | 'name': 'storage.type.function.python' 259 | 'end': ':' 260 | 'endCaptures': 261 | '0': 262 | 'name': 'punctuation.definition.function.begin.python' 263 | 'name': 'meta.function.python' 264 | 'patterns': [ 265 | { 266 | 'begin': '(?=[A-Za-z_][A-Za-z0-9_]*)' 267 | 'contentName': 'entity.name.function.python' 268 | 'end': '(?![A-Za-z0-9_])' 269 | 'patterns': [ 270 | { 271 | 'include': '#entity_name_function' 272 | } 273 | ] 274 | } 275 | { 276 | 'begin': '\\(' 277 | 'beginCaptures': 278 | '0': 279 | 'name': 'punctuation.definition.parameters.begin.python' 280 | 'end': '\\)' 281 | 'endCaptures': 282 | '0': 283 | 'name': 'punctuation.definition.parameters.end.python' 284 | 'contentName': 'meta.function.parameters.python' 285 | 'patterns': [ 286 | { 287 | 'include': '#line_comments' 288 | } 289 | { 290 | # param = 3 291 | # param: int = 3 292 | 'begin': '(?:(\\*{0,2})|\\b)([a-zA-Z_][\\w_]*)\\s*(?:(:)\\s*([a-zA-Z_][\\w_]*))?\\s*(=)\\s*' 293 | 'beginCaptures': 294 | '1': 295 | 'name': 'keyword.operator.unpacking.arguments.python' 296 | '2': 297 | 'name': 'variable.parameter.function.python' 298 | '3': 299 | 'name': 'punctuation.separator.python' 300 | '4': 301 | 'name': 'storage.type.python' 302 | '5': 303 | 'name': 'keyword.operator.assignment.python' 304 | 'end': '(?!\\G)' 305 | 'patterns': [ 306 | { 307 | 'include': '$self' 308 | } 309 | ] 310 | } 311 | { 312 | # param 313 | # param: int 314 | 'match': '(?:(\\*{0,2})|\\b)([a-zA-Z_][\\w_]*)\\s*(?:(:)\\s*([a-zA-Z_][\\w_]*))?' 315 | 'captures': 316 | '1': 317 | 'name': 'keyword.operator.unpacking.arguments.python' 318 | '2': 319 | 'name': 'variable.parameter.function.python' 320 | '3': 321 | 'name': 'punctuation.separator.python' 322 | '4': 323 | 'name': 'storage.type.python' 324 | } 325 | { 326 | 'match': ',' 327 | 'name': 'punctuation.separator.parameters.python' 328 | } 329 | ] 330 | } 331 | { 332 | 'match': '(->)\\s*([A-Za-z_][\\w_]*)(?=\\s*:)' 333 | 'captures': 334 | '1': 335 | 'name': 'keyword.operator.function-annotation.python' 336 | '2': 337 | 'name': 'storage.type.python' 338 | } 339 | ] 340 | } 341 | { 342 | 'begin': '\\b(lambda)\\b' 343 | 'beginCaptures': 344 | '1': 345 | 'name': 'storage.type.function.inline.python' 346 | 'end': ':' 347 | 'endCaptures': 348 | '0': 349 | 'name': 'punctuation.definition.function.begin.python' 350 | 'name': 'meta.function.inline.python' 351 | 'patterns': [ 352 | { 353 | 'begin': '\\G' 354 | 'end': '(?=\\:)' 355 | 'contentName': 'meta.function.inline.parameters.python' 356 | 'patterns': [ 357 | { 358 | # param = 3 359 | 'begin': '(?:(\\*{0,2})|\\b)([a-zA-Z_][\\w_]*)\\s*(=)\\s*' 360 | 'beginCaptures': 361 | '1': 362 | 'name': 'keyword.operator.unpacking.arguments.python' 363 | '2': 364 | 'name': 'variable.parameter.function.python' 365 | '3': 366 | 'name': 'keyword.operator.assignment.python' 367 | 'end': '(?!\\G)' 368 | 'patterns': [ 369 | { 370 | 'include': '$self' 371 | } 372 | ] 373 | } 374 | { 375 | # param 376 | 'match': '(?:(\\*{0,2})|\\b)([a-zA-Z_][\\w_]*)\\b' 377 | 'captures': 378 | '1': 379 | 'name': 'keyword.operator.unpacking.arguments.python' 380 | '2': 381 | 'name': 'variable.parameter.function.python' 382 | } 383 | { 384 | 'match': ',' 385 | 'name': 'punctuation.separator.parameters.python' 386 | } 387 | ] 388 | } 389 | ] 390 | } 391 | { 392 | 'begin': '^\\s*(?=@\\s*[A-Za-z_][A-Za-z0-9_]*(?:\\.[a-zA-Z_][a-zA-Z_0-9]*)*\\s*\\()' 393 | 'comment': 'a decorator may be a function call which returns a decorator.' 394 | 'end': '(\\))' 395 | 'endCaptures': 396 | '1': 397 | 'name': 'punctuation.definition.arguments.end.python' 398 | 'name': 'meta.function.decorator.python' 399 | 'patterns': [ 400 | { 401 | 'begin': '(?=(@)\\s*[A-Za-z_][A-Za-z0-9_]*(?:\\.[A-Za-z_][A-Za-z0-9_]*)*\\s*\\()' 402 | 'beginCaptures': 403 | '1': 404 | 'name': 'punctuation.definition.decorator.python' 405 | 'contentName': 'entity.name.function.decorator.python' 406 | 'end': '(?=\\s*\\()' 407 | 'patterns': [ 408 | { 409 | 'include': '#dotted_name' 410 | } 411 | ] 412 | } 413 | { 414 | 'begin': '(\\()' 415 | 'beginCaptures': 416 | '1': 417 | 'name': 'punctuation.definition.arguments.begin.python' 418 | 'contentName': 'meta.function.decorator.arguments.python' 419 | 'end': '(?=\\))' 420 | 'patterns': [ 421 | { 422 | 'include': '#keyword_arguments' 423 | } 424 | { 425 | 'include': '$self' 426 | } 427 | ] 428 | } 429 | ] 430 | } 431 | { 432 | 'begin': '^\\s*(?=@\\s*[A-Za-z_][A-Za-z0-9_]*(?:\\.[a-zA-Z_][a-zA-Z_0-9]*)*)' 433 | 'contentName': 'entity.name.function.decorator.python' 434 | 'end': '(?=\\s|$\\n?|#)' 435 | 'name': 'meta.function.decorator.python' 436 | 'patterns': [ 437 | { 438 | 'begin': '(?=(@)\\s*[A-Za-z_][A-Za-z0-9_]*(\\.[A-Za-z_][A-Za-z0-9_]*)*)' 439 | 'beginCaptures': 440 | '1': 441 | 'name': 'punctuation.definition.decorator.python' 442 | 'end': '(?=\\s|$\\n?|#)' 443 | 'patterns': [ 444 | { 445 | 'include': '#dotted_name' 446 | } 447 | ] 448 | } 449 | ] 450 | } 451 | { 452 | 'include': '#function_calls' 453 | } 454 | { 455 | 'include': '#method_calls' 456 | } 457 | { 458 | 'include': '#objects' 459 | } 460 | { 461 | 'include': '#properties' 462 | } 463 | { 464 | 'begin': '(?=[A-Za-z_][A-Za-z0-9_]*(?:\\.[a-zA-Z_][a-zA-Z_0-9]*)*\\s*\\[)' 465 | 'end': '(\\])' 466 | 'endCaptures': 467 | '1': 468 | 'name': 'punctuation.definition.arguments.end.python' 469 | 'name': 'meta.item-access.python' 470 | 'patterns': [ 471 | { 472 | 'begin': '(?=[A-Za-z_][A-Za-z0-9_]*(?:\\.[A-Za-z_][A-Za-z0-9_]*)*\\s*\\[)' 473 | 'end': '(?=\\s*\\[)' 474 | 'patterns': [ 475 | { 476 | 'include': '#dotted_name' 477 | } 478 | ] 479 | } 480 | { 481 | 'begin': '(\\[)' 482 | 'beginCaptures': 483 | '1': 484 | 'name': 'punctuation.definition.arguments.begin.python' 485 | 'contentName': 'meta.item-access.arguments.python' 486 | 'end': '(?=\\])' 487 | 'patterns': [ 488 | { 489 | 'include': '$self' 490 | } 491 | ] 492 | } 493 | ] 494 | } 495 | { 496 | 'begin': '(?<=\\)|\\])\\s+(\\[)' 497 | 'beginCaptures': 498 | '1': 499 | 'name': 'punctuation.definition.arguments.begin.python' 500 | 'contentName': 'meta.item-access.arguments.python' 501 | 'end': '(\\])' 502 | 'endCaptures': 503 | '1': 504 | 'name': 'punctuation.definition.arguments.end.python' 505 | 'name': 'meta.item-access.python' 506 | 'patterns': [ 507 | { 508 | 'include': '$self' 509 | } 510 | ] 511 | } 512 | { 513 | 'captures': 514 | '1': 515 | 'name': 'storage.type.class.python' 516 | 'match': '\\b(class)\\b' 517 | } 518 | { 519 | 'include': '#line_continuation' 520 | } 521 | { 522 | 'match': '\\b(None|True|False|Ellipsis|NotImplemented)\\b' 523 | 'name': 'constant.language.python' 524 | } 525 | { 526 | 'include': '#string_quoted_single' 527 | } 528 | { 529 | 'include': '#string_quoted_double' 530 | } 531 | { 532 | 'begin': '(\\()' 533 | 'end': '(\\))' 534 | 'patterns': [ 535 | { 536 | 'include': '$self' 537 | } 538 | ] 539 | } 540 | { 541 | 'captures': 542 | '1': 543 | 'name': 'punctuation.definition.list.begin.python' 544 | '2': 545 | 'name': 'meta.empty-list.python' 546 | '3': 547 | 'name': 'punctuation.definition.list.end.python' 548 | 'match': '(\\[)(\\s*(\\]))\\b' 549 | } 550 | { 551 | 'begin': '(\\[)' 552 | 'beginCaptures': 553 | '1': 554 | 'name': 'punctuation.definition.list.begin.python' 555 | 'end': '(\\])' 556 | 'endCaptures': 557 | '1': 558 | 'name': 'punctuation.definition.list.end.python' 559 | 'name': 'meta.structure.list.python' 560 | 'patterns': [ 561 | { 562 | 'begin': '(?<=\\[|\\,)\\s*(?![\\],])' 563 | 'contentName': 'meta.structure.list.item.python' 564 | 'end': '\\s*(?:(,)|(?=\\]))' 565 | 'endCaptures': 566 | '1': 567 | 'name': 'punctuation.separator.list.python' 568 | 'patterns': [ 569 | { 570 | 'include': '$self' 571 | } 572 | ] 573 | } 574 | ] 575 | } 576 | { 577 | 'captures': 578 | '1': 579 | 'name': 'punctuation.definition.tuple.begin.python' 580 | '2': 581 | 'name': 'meta.empty-tuple.python' 582 | '3': 583 | 'name': 'punctuation.definition.tuple.end.python' 584 | 'match': '(\\()(\\s*(\\)))' 585 | 'name': 'meta.structure.tuple.python' 586 | } 587 | { 588 | 'captures': 589 | '1': 590 | 'name': 'punctuation.definition.dictionary.begin.python' 591 | '2': 592 | 'name': 'meta.empty-dictionary.python' 593 | '3': 594 | 'name': 'punctuation.definition.dictionary.end.python' 595 | 'match': '(\\{)(\\s*(\\}))' 596 | 'name': 'meta.structure.dictionary.python' 597 | } 598 | { 599 | 'begin': '(\\{)' 600 | 'beginCaptures': 601 | '1': 602 | 'name': 'punctuation.definition.dictionary.begin.python' 603 | 'end': '(\\})' 604 | 'endCaptures': 605 | '1': 606 | 'name': 'punctuation.definition.dictionary.end.python' 607 | 'name': 'meta.structure.dictionary.python' 608 | 'patterns': [ 609 | { 610 | 'begin': '(?<=\\{|\\,|^)\\s*(?![\\},])' 611 | 'contentName': 'meta.structure.dictionary.key.python' 612 | 'end': '\\s*(?:(?=\\})|(\\:))' 613 | 'endCaptures': 614 | '1': 615 | 'name': 'punctuation.separator.valuepair.dictionary.python' 616 | 'patterns': [ 617 | { 618 | 'include': '$self' 619 | } 620 | ] 621 | } 622 | { 623 | 'begin': '(?<=\\:|^)\\s*' 624 | 'contentName': 'meta.structure.dictionary.value.python' 625 | 'end': '\\s*(?:(?=\\})|(,))' 626 | 'endCaptures': 627 | '1': 628 | 'name': 'punctuation.separator.dictionary.python' 629 | 'patterns': [ 630 | { 631 | 'include': '$self' 632 | } 633 | ] 634 | } 635 | ] 636 | } 637 | ] 638 | 'repository': 639 | 'builtin_exceptions': 640 | 'match': '(?x)\\b(\n\t\t\t\t(\n\t\t\t\t\tArithmetic|Assertion|Attribute|BlockingIO|BrokenPipe|Buffer|ChildProcess|\n\t\t\t\t\tConnection(Aborted|Refused|Reset)?|EOF|Environment|FileExists|\n\t\t\t\t\tFileNotFound|FloatingPoint|Interrupted|IO|IsADirectoryError|\n\t\t\t\t\tImport|Indentation|Index|Key|Lookup|Memory|Name|NotADirectory|\n\t\t\t\t\tNotImplemented|OS|Overflow|Permission|ProcessLookup|Recursion|Reference|\n\t\t\t\t\tRuntime|Standard|Syntax|System|Tab|Timeout|Type|UnboundLocal|\n\t\t\t\t\tUnicode(Encode|Decode|Translate)?|Value|VMS|Windows|ZeroDivision\n\t\t\t\t)Error|\n\t\t\t\t((Pending)?Deprecation|Runtime|Syntax|User|Future|Import|Unicode|Bytes)?Warning|\n\t\t\t\t(Base)?Exception|\n\t\t\t\tSystemExit|StopAsyncIteration|StopIteration|NotImplemented|KeyboardInterrupt|GeneratorExit\n\t\t\t)\\b' 641 | 'name': 'support.type.exception.python' 642 | 'builtin_functions': 643 | 'match': '(?x)\\b(__import__|abs|all|any|ascii|bin|bool|bytearray|bytes|callable|chr|classmethod|compile|complex|delattr|dict|dir|divmod|enumerate|eval|exec|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|isinstance|issubclass|iter|len|list|locals|map|max|memoryview|min|next|object|oct|open|ord|pow|print|property|range|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|vars|zip|file|long|raw_input|reduce|reload|unichr|unicode|xrange|apply|buffer|coerce|intern|execfile)\\b' 644 | 'name': 'support.function.builtin.python' 645 | 'builtin_types': 646 | 'match': '(?x)\\b(\n\t\t\t\tbasestring|bool|buffer|bytearray|bytes|complex|dict|float|frozenset|int|\n\t\t\t\tlist|long|memoryview|object|range|set|slice|str|tuple|unicode|xrange\n\t\t\t)\\b' 647 | 'name': 'support.type.python' 648 | 'docstrings': 649 | 'patterns': [ 650 | { 651 | 'begin': '^\\s*(?=[uU]?[rR]?""")' 652 | 'end': '(?<=""")' 653 | 'name': 'comment.block.python' 654 | 'patterns': [ 655 | { 656 | 'include': '#string_quoted_double' 657 | } 658 | ] 659 | } 660 | { 661 | 'begin': '^\\s*(?=[uU]?[rR]?\'\'\')' 662 | 'end': '(?<=\'\'\')' 663 | 'name': 'comment.block.python' 664 | 'patterns': [ 665 | { 666 | 'include': '#string_quoted_single' 667 | } 668 | ] 669 | } 670 | ] 671 | 'function_calls': 672 | 'patterns': [ 673 | { 674 | 'begin': '(?:([A-Za-z_][A-Za-z0-9_]*)|(?<=\\)|\\]))\\s*(\\()' 675 | 'beginCaptures': 676 | '1': 677 | 'patterns': [ 678 | { 679 | 'include': '#builtin_functions' 680 | } 681 | { 682 | 'include': '#function_names' 683 | } 684 | ] 685 | '2': 686 | 'name': 'punctuation.definition.arguments.begin.bracket.round.python' 687 | 'end': '\\)' 688 | 'endCaptures': 689 | '0': 690 | 'name': 'punctuation.definition.arguments.end.bracket.round.python' 691 | 'name': 'meta.function-call.python' 692 | 'contentName': 'meta.function-call.arguments.python' 693 | 'patterns': [ 694 | { 695 | 'include': '#keyword_arguments' 696 | } 697 | { 698 | 'match': ',' 699 | 'name': 'punctuation.separator.arguments.python' 700 | } 701 | { 702 | 'include': '$self' 703 | } 704 | ] 705 | } 706 | ] 707 | 'dotted_name': 708 | 'begin': '(?=[A-Za-z_][A-Za-z0-9_]*(?:\\.[A-Za-z_][A-Za-z0-9_]*)*)' 709 | 'end': '(?![A-Za-z0-9_\\.])' 710 | 'patterns': [ 711 | { 712 | 'begin': '(\\.)(?=[A-Za-z_][A-Za-z0-9_]*)' 713 | 'end': '(?![A-Za-z0-9_])' 714 | 'patterns': [ 715 | { 716 | 'include': '#magic_function_names' 717 | } 718 | { 719 | 'include': '#magic_variable_names' 720 | } 721 | { 722 | 'include': '#illegal_names' 723 | } 724 | { 725 | 'include': '#generic_names' 726 | } 727 | ] 728 | } 729 | { 730 | 'begin': '(?=^])? # fill followed by align 1018 | [+\\- ]? # sign (space at the end is intentional) 1019 | \\#? # alternate form 1020 | 0? 1021 | \\d* # width 1022 | [_,]? # grouping option 1023 | (\\.\\d+)? # precision 1024 | [bcdeEfFgGnosxX%]? # type 1025 | )? 1026 | } 1027 | ''' 1028 | 'name': 'constant.other.placeholder.python' 1029 | 'regular_expressions': 1030 | 'comment': 'Changed disabled to 1 to turn off syntax highlighting in “r” strings.' 1031 | 'disabled': 0 1032 | 'patterns': [ 1033 | { 1034 | 'include': 'source.regexp.python' 1035 | } 1036 | ] 1037 | 'string_formatting': 1038 | 'patterns': [ 1039 | { 1040 | # https://docs.python.org/2/library/stdtypes.html#string-formatting (deprecated) 1041 | 'match': '''(?x) 1042 | % 1043 | (\\([a-zA-Z_]+\\))? # mapping key 1044 | [#0+\\- ]? # conversion flags (space at the end is intentional) 1045 | (\\d+|\\*)? # minimum field width 1046 | (\\.(\\d+|\\*))? # precision 1047 | [hlL]? # length modifier 1048 | [diouxXeEfFgGcrs%] # conversion type 1049 | ''' 1050 | 'name': 'constant.other.placeholder.python' 1051 | } 1052 | { 1053 | # https://docs.python.org/3/library/string.html#format-string-syntax 1054 | 'match': '''(?x) 1055 | { 1056 | (?: 1057 | (?: 1058 | \\d # integer 1059 | | 1060 | [a-zA-Z_]\\w* # identifier 1061 | ) 1062 | (?: 1063 | \\.[a-zA-Z_]\\w* # attribute name 1064 | | 1065 | \\[[^\\]]+\\] # element index 1066 | )* 1067 | )? 1068 | (?:![rsa])? # conversion 1069 | (?: 1070 | # Yup, this is disgusting. But top-level format specifiers can have nested replacement fields. 1071 | : 1072 | (?:(?:.|({[^}]*}))?(?:[<>=^]|({[^}]*})))? # fill followed by align 1073 | (?:[+\\- ]|({[^}]*}))? # sign (space at the end is intentional) 1074 | (?:\\#|({[^}]*}))? # alternate form 1075 | (?:0|({[^}]*}))? 1076 | (?:\\d+|({[^}]*}))? # width 1077 | (?:[_,]|({[^}]*}))? # grouping option 1078 | (?:\\.(?:\\d+|({[^}]*}))|({[^}]*}))? # precision 1079 | (?:[bcdeEfFgGnosxX%]|({[^}]*}))? # type 1080 | )? 1081 | } 1082 | ''' 1083 | 'name': 'constant.other.placeholder.python' 1084 | 'captures': 1085 | '1': 'patterns': [{'include': '#nested_replacement_field'}] 1086 | '2': 'patterns': [{'include': '#nested_replacement_field'}] 1087 | '3': 'patterns': [{'include': '#nested_replacement_field'}] 1088 | '4': 'patterns': [{'include': '#nested_replacement_field'}] 1089 | '5': 'patterns': [{'include': '#nested_replacement_field'}] 1090 | '6': 'patterns': [{'include': '#nested_replacement_field'}] 1091 | '7': 'patterns': [{'include': '#nested_replacement_field'}] 1092 | '8': 'patterns': [{'include': '#nested_replacement_field'}] 1093 | '9': 'patterns': [{'include': '#nested_replacement_field'}] 1094 | '10': 'patterns': [{'include': '#nested_replacement_field'}] 1095 | } 1096 | ] 1097 | 'string_interpolation': 1098 | # https://docs.python.org/3/reference/lexical_analysis.html#f-strings 1099 | # and https://www.python.org/dev/peps/pep-0498/ 1100 | # Unlike string_formatting, string_interpolation can contain expressions 1101 | 'begin': '{' 1102 | 'beginCaptures': 1103 | '0': 1104 | 'name': 'punctuation.definition.interpolation.begin.bracket.curly.python' 1105 | 'end': '''(?x)(?!\\G) 1106 | ( 1107 | (?:![rsa])? # conversion 1108 | (?: 1109 | # Yup, this is disgusting. But top-level format specifiers can have nested replacement fields. 1110 | : 1111 | (?:(?:.|({[^}]*}))?(?:[<>=^]|({[^}]*})))? # fill followed by align 1112 | (?:[+\\- ]|({[^}]*}))? # sign (space at the end is intentional) 1113 | (?:\\#|({[^}]*}))? # alternate form 1114 | (?:0|({[^}]*}))? 1115 | (?:\\d+|({[^}]*}))? # width 1116 | (?:[_,]|({[^}]*}))? # grouping option 1117 | (?:\\.(?:\\d+|({[^}]*}))|({[^}]*}))? # precision 1118 | (?:[bcdeEfFgGnosxX%]|({[^}]*}))? # type 1119 | )? 1120 | ) 1121 | (}) 1122 | ''' 1123 | 'endCaptures': 1124 | '1': 1125 | 'name': 'constant.other.placeholder.python' 1126 | '2': 'patterns': [{'include': '#nested_replacement_field'}] 1127 | '3': 'patterns': [{'include': '#nested_replacement_field'}] 1128 | '4': 'patterns': [{'include': '#nested_replacement_field'}] 1129 | '5': 'patterns': [{'include': '#nested_replacement_field'}] 1130 | '6': 'patterns': [{'include': '#nested_replacement_field'}] 1131 | '7': 'patterns': [{'include': '#nested_replacement_field'}] 1132 | '8': 'patterns': [{'include': '#nested_replacement_field'}] 1133 | '9': 'patterns': [{'include': '#nested_replacement_field'}] 1134 | '10': 'patterns': [{'include': '#nested_replacement_field'}] 1135 | '11': 'patterns': [{'include': '#nested_replacement_field'}] 1136 | '12': 1137 | 'name': 'punctuation.definition.interpolation.end.bracket.curly.python' 1138 | 'name': 'meta.interpolation.python' 1139 | 'contentName': 'meta.embedded.python' 1140 | 'patterns': [ 1141 | { 1142 | 'match': '\\\\' 1143 | 'name': 'invalid.illegal.backslash.python' 1144 | } 1145 | { 1146 | 'include': '$self' 1147 | } 1148 | ] 1149 | 'string_quoted_double': 1150 | 'patterns': [ 1151 | { 1152 | 'begin': '([uU]r)(""")' 1153 | 'beginCaptures': 1154 | '1': 1155 | 'name': 'storage.type.string.python' 1156 | '2': 1157 | 'name': 'punctuation.definition.string.begin.python' 1158 | 'comment': 'single quoted unicode-raw string' 1159 | 'end': '((?<=""")(")""|""")' 1160 | 'endCaptures': 1161 | '1': 1162 | 'name': 'punctuation.definition.string.end.python' 1163 | '2': 1164 | 'name': 'meta.empty-string.double.python' 1165 | 'name': 'string.quoted.double.block.unicode-raw-regex.python' 1166 | 'patterns': [ 1167 | { 1168 | 'include': '#string_formatting' 1169 | } 1170 | { 1171 | 'include': '#escaped_unicode_char' 1172 | } 1173 | { 1174 | 'include': '#escaped_char' 1175 | } 1176 | { 1177 | 'include': '#regular_expressions' 1178 | } 1179 | ] 1180 | } 1181 | { 1182 | 'begin': '([uU]R)(""")' 1183 | 'beginCaptures': 1184 | '1': 1185 | 'name': 'storage.type.string.python' 1186 | '2': 1187 | 'name': 'punctuation.definition.string.begin.python' 1188 | 'comment': 'single quoted unicode-raw string without regular expression highlighting' 1189 | 'end': '((?<=""")(")""|""")' 1190 | 'endCaptures': 1191 | '1': 1192 | 'name': 'punctuation.definition.string.end.python' 1193 | '2': 1194 | 'name': 'meta.empty-string.double.python' 1195 | 'name': 'string.quoted.double.block.unicode-raw.python' 1196 | 'patterns': [ 1197 | { 1198 | 'include': '#string_formatting' 1199 | } 1200 | { 1201 | 'include': '#escaped_unicode_char' 1202 | } 1203 | { 1204 | 'include': '#escaped_char' 1205 | } 1206 | ] 1207 | } 1208 | { 1209 | 'begin': '(r)(""")' 1210 | 'beginCaptures': 1211 | '1': 1212 | 'name': 'storage.type.string.python' 1213 | '2': 1214 | 'name': 'punctuation.definition.string.begin.python' 1215 | 'comment': 'double quoted raw string' 1216 | 'end': '((?<=""")(")""|""")' 1217 | 'endCaptures': 1218 | '1': 1219 | 'name': 'punctuation.definition.string.end.python' 1220 | '2': 1221 | 'name': 'meta.empty-string.double.python' 1222 | 'name': 'string.quoted.double.block.raw-regex.python' 1223 | 'patterns': [ 1224 | { 1225 | 'include': '#string_formatting' 1226 | } 1227 | { 1228 | 'include': '#escaped_char' 1229 | } 1230 | { 1231 | 'include': '#regular_expressions' 1232 | } 1233 | ] 1234 | } 1235 | { 1236 | 'begin': '(R)(""")' 1237 | 'beginCaptures': 1238 | '1': 1239 | 'name': 'storage.type.string.python' 1240 | '2': 1241 | 'name': 'punctuation.definition.string.begin.python' 1242 | 'comment': 'double quoted raw string' 1243 | 'end': '((?<=""")(")""|""")' 1244 | 'endCaptures': 1245 | '1': 1246 | 'name': 'punctuation.definition.string.end.python' 1247 | '2': 1248 | 'name': 'meta.empty-string.double.python' 1249 | 'name': 'string.quoted.double.block.raw.python' 1250 | 'patterns': [ 1251 | { 1252 | 'include': '#string_formatting' 1253 | } 1254 | { 1255 | 'include': '#escaped_char' 1256 | } 1257 | ] 1258 | } 1259 | { 1260 | 'begin': '([uU])(""")' 1261 | 'beginCaptures': 1262 | '1': 1263 | 'name': 'storage.type.string.python' 1264 | '2': 1265 | 'name': 'punctuation.definition.string.begin.python' 1266 | 'comment': 'double quoted unicode string' 1267 | 'end': '((?<=""")(")""|""")' 1268 | 'endCaptures': 1269 | '1': 1270 | 'name': 'punctuation.definition.string.end.python' 1271 | '2': 1272 | 'name': 'meta.empty-string.double.python' 1273 | 'name': 'string.quoted.double.block.unicode.python' 1274 | 'patterns': [ 1275 | { 1276 | 'include': '#string_formatting' 1277 | } 1278 | { 1279 | 'include': '#escaped_unicode_char' 1280 | } 1281 | { 1282 | 'include': '#escaped_char' 1283 | } 1284 | ] 1285 | } 1286 | { 1287 | 'begin': '([fF])(""")' 1288 | 'beginCaptures': 1289 | '1': 1290 | 'name': 'storage.type.string.python' 1291 | '2': 1292 | 'name': 'punctuation.definition.string.begin.python' 1293 | 'end': '((?<=""")(")""|""")' 1294 | 'endCaptures': 1295 | '1': 1296 | 'name': 'punctuation.definition.string.end.python' 1297 | '2': 1298 | 'name': 'meta.empty-string.double.python' 1299 | 'name': 'string.quoted.double.block.format.python' 1300 | 'patterns': [ 1301 | { 1302 | 'include': '#escaped_char' 1303 | } 1304 | { 1305 | 'include': '#string_interpolation' 1306 | } 1307 | { 1308 | 'match': '}' 1309 | 'name': 'invalid.illegal.closing-curly-bracket.python' 1310 | } 1311 | ] 1312 | } 1313 | { 1314 | 'begin': '([rR][fF]|[fF][rR])(""")' 1315 | 'beginCaptures': 1316 | '1': 1317 | 'name': 'storage.type.string.python' 1318 | '2': 1319 | 'name': 'punctuation.definition.string.begin.python' 1320 | 'end': '((?<=""")(")""|""")' 1321 | 'endCaptures': 1322 | '1': 1323 | 'name': 'punctuation.definition.string.end.python' 1324 | '2': 1325 | 'name': 'meta.empty-string.double.python' 1326 | 'name': 'string.quoted.double.block.raw-format.python' 1327 | 'patterns': [ 1328 | { 1329 | 'include': '#escaped_char' 1330 | } 1331 | { 1332 | 'include': '#string_interpolation' 1333 | } 1334 | { 1335 | 'match': '}' 1336 | 'name': 'invalid.illegal.closing-curly-bracket.python' 1337 | } 1338 | ] 1339 | } 1340 | { 1341 | 'begin': '([bB])(""")' 1342 | 'beginCaptures': 1343 | '1': 1344 | 'name': 'storage.type.string.python' 1345 | '2': 1346 | 'name': 'punctuation.definition.string.begin.python' 1347 | 'end': '((?<=""")(")""|""")' 1348 | 'endCaptures': 1349 | '1': 1350 | 'name': 'punctuation.definition.string.end.python' 1351 | '2': 1352 | 'name': 'meta.empty-string.double.python' 1353 | 'name': 'string.quoted.double.block.binary.python' 1354 | 'patterns': [ 1355 | { 1356 | 'include': '#escaped_char' 1357 | } 1358 | { 1359 | 'include': '#string_formatting' 1360 | } 1361 | { 1362 | 'match': '[^\\x{01}-\\x{7f}]' 1363 | 'name': 'invalid.illegal.character-out-of-range.python' 1364 | } 1365 | ] 1366 | } 1367 | { 1368 | 'begin': '([rR][bB]|[bB][rR])(""")' 1369 | 'beginCaptures': 1370 | '1': 1371 | 'name': 'storage.type.string.python' 1372 | '2': 1373 | 'name': 'punctuation.definition.string.begin.python' 1374 | 'end': '((?<=""")(")""|""")' 1375 | 'endCaptures': 1376 | '1': 1377 | 'name': 'punctuation.definition.string.end.python' 1378 | '2': 1379 | 'name': 'meta.empty-string.double.python' 1380 | 'name': 'string.quoted.double.block.raw-binary.python' 1381 | 'patterns': [ 1382 | { 1383 | 'include': '#escaped_char' 1384 | } 1385 | { 1386 | 'include': '#string_formatting' 1387 | } 1388 | { 1389 | 'match': '[^\\x{01}-\\x{7f}]' 1390 | 'name': 'invalid.illegal.character-out-of-range.python' 1391 | } 1392 | ] 1393 | } 1394 | { 1395 | 'captures': 1396 | '1': 1397 | 'name': 'storage.type.string.python' 1398 | '2': 1399 | 'name': 'punctuation.definition.string.begin.python' 1400 | '3': 1401 | 'patterns': [ 1402 | {'include': '#string_formatting'} 1403 | {'include': '#escaped_unicode_char'} 1404 | {'include': '#escaped_char'} 1405 | {'include': '#regular_expressions'} 1406 | ] 1407 | '4': 1408 | 'name': 'punctuation.definition.string.end.python' 1409 | 'comment': 'double-quoted raw string' 1410 | 'match': '([uU]r)(")((?:[^"\\\\]|\\\\.)*)(")' 1411 | 'name': 'string.quoted.double.single-line.unicode-raw-regex.python' 1412 | } 1413 | { 1414 | 'begin': '([uU]R)(")' 1415 | 'beginCaptures': 1416 | '1': 1417 | 'name': 'storage.type.string.python' 1418 | '2': 1419 | 'name': 'punctuation.definition.string.begin.python' 1420 | 'comment': 'double-quoted raw string' 1421 | 'end': '((?<=")(")|")|(\\n)' 1422 | 'endCaptures': 1423 | '1': 1424 | 'name': 'punctuation.definition.string.end.python' 1425 | '2': 1426 | 'name': 'meta.empty-string.double.python' 1427 | '3': 1428 | 'name': 'invalid.illegal.unclosed-string.python' 1429 | 'name': 'string.quoted.double.single-line.unicode-raw.python' 1430 | 'patterns': [ 1431 | { 1432 | 'include': '#string_formatting' 1433 | } 1434 | { 1435 | 'include': '#escaped_unicode_char' 1436 | } 1437 | { 1438 | 'include': '#escaped_char' 1439 | } 1440 | ] 1441 | } 1442 | { 1443 | 'captures': 1444 | '1': 1445 | 'name': 'storage.type.string.python' 1446 | '2': 1447 | 'name': 'punctuation.definition.string.begin.python' 1448 | '3': 1449 | 'patterns': [ 1450 | {'include': '#string_formatting'} 1451 | {'include': '#escaped_char'} 1452 | {'include': '#regular_expressions'} 1453 | ] 1454 | '4': 1455 | 'name': 'punctuation.definition.string.end.python' 1456 | 'comment': 'double-quoted raw string' 1457 | 'match': '(r)(")((?:[^"\\\\]|\\\\.)*)(")' 1458 | 'name': 'string.quoted.double.single-line.raw-regex.python' 1459 | } 1460 | { 1461 | 'begin': '(R)(")' 1462 | 'beginCaptures': 1463 | '1': 1464 | 'name': 'storage.type.string.python' 1465 | '2': 1466 | 'name': 'punctuation.definition.string.begin.python' 1467 | 'comment': 'double-quoted raw string' 1468 | 'end': '((?<=")(")|")|(\\n)' 1469 | 'endCaptures': 1470 | '1': 1471 | 'name': 'punctuation.definition.string.end.python' 1472 | '2': 1473 | 'name': 'meta.empty-string.double.python' 1474 | '3': 1475 | 'name': 'invalid.illegal.unclosed-string.python' 1476 | 'name': 'string.quoted.double.single-line.raw.python' 1477 | 'patterns': [ 1478 | { 1479 | 'include': '#string_formatting' 1480 | } 1481 | { 1482 | 'include': '#escaped_char' 1483 | } 1484 | ] 1485 | } 1486 | { 1487 | 'begin': '([uU])(")' 1488 | 'beginCaptures': 1489 | '1': 1490 | 'name': 'storage.type.string.python' 1491 | '2': 1492 | 'name': 'punctuation.definition.string.begin.python' 1493 | 'comment': 'double quoted unicode string' 1494 | 'end': '((?<=")(")|")|(\\n)' 1495 | 'endCaptures': 1496 | '1': 1497 | 'name': 'punctuation.definition.string.end.python' 1498 | '2': 1499 | 'name': 'meta.empty-string.double.python' 1500 | '3': 1501 | 'name': 'invalid.illegal.unclosed-string.python' 1502 | 'name': 'string.quoted.double.single-line.unicode.python' 1503 | 'patterns': [ 1504 | { 1505 | 'include': '#string_formatting' 1506 | } 1507 | { 1508 | 'include': '#escaped_unicode_char' 1509 | } 1510 | { 1511 | 'include': '#escaped_char' 1512 | } 1513 | ] 1514 | } 1515 | { 1516 | 'begin': '([fF])(")' 1517 | 'beginCaptures': 1518 | '1': 1519 | 'name': 'storage.type.string.python' 1520 | '2': 1521 | 'name': 'punctuation.definition.string.begin.python' 1522 | 'end': '((?<=")(")|")|(\\n)' 1523 | 'endCaptures': 1524 | '1': 1525 | 'name': 'punctuation.definition.string.end.python' 1526 | '2': 1527 | 'name': 'meta.empty-string.double.python' 1528 | '3': 1529 | 'name': 'invalid.illegal.unclosed-string.python' 1530 | 'name': 'string.quoted.double.single-line.format.python' 1531 | 'patterns': [ 1532 | { 1533 | 'include': '#escaped_char' 1534 | } 1535 | { 1536 | 'include': '#string_interpolation' 1537 | } 1538 | { 1539 | 'match': '}' 1540 | 'name': 'invalid.illegal.closing-curly-bracket.python' 1541 | } 1542 | ] 1543 | } 1544 | { 1545 | 'begin': '([rR][fF]|[fF][rR])(")' 1546 | 'beginCaptures': 1547 | '1': 1548 | 'name': 'storage.type.string.python' 1549 | '2': 1550 | 'name': 'punctuation.definition.string.begin.python' 1551 | 'end': '((?<=")(")|")|(\\n)' 1552 | 'endCaptures': 1553 | '1': 1554 | 'name': 'punctuation.definition.string.end.python' 1555 | '2': 1556 | 'name': 'meta.empty-string.double.python' 1557 | '3': 1558 | 'name': 'invalid.illegal.unclosed-string.python' 1559 | 'name': 'string.quoted.double.single-line.raw-format.python' 1560 | 'patterns': [ 1561 | { 1562 | 'include': '#escaped_char' 1563 | } 1564 | { 1565 | 'include': '#string_interpolation' 1566 | } 1567 | { 1568 | 'match': '}' 1569 | 'name': 'invalid.illegal.closing-curly-bracket.python' 1570 | } 1571 | ] 1572 | } 1573 | { 1574 | 'begin': '([bB])(")' 1575 | 'beginCaptures': 1576 | '1': 1577 | 'name': 'storage.type.string.python' 1578 | '2': 1579 | 'name': 'punctuation.definition.string.begin.python' 1580 | 'end': '((?<=")(")|")|(\\n)' 1581 | 'endCaptures': 1582 | '1': 1583 | 'name': 'punctuation.definition.string.end.python' 1584 | '2': 1585 | 'name': 'meta.empty-string.double.python' 1586 | '3': 1587 | 'name': 'invalid.illegal.unclosed-string.python' 1588 | 'name': 'string.quoted.double.single-line.binary.python' 1589 | 'patterns': [ 1590 | { 1591 | 'include': '#escaped_char' 1592 | } 1593 | { 1594 | 'include': '#string_formatting' 1595 | } 1596 | { 1597 | 'match': '[^\\x{01}-\\x{7f}]' 1598 | 'name': 'invalid.illegal.character-out-of-range.python' 1599 | } 1600 | ] 1601 | } 1602 | { 1603 | 'begin': '([rR][bB]|[bB][rR])(")' 1604 | 'beginCaptures': 1605 | '1': 1606 | 'name': 'storage.type.string.python' 1607 | '2': 1608 | 'name': 'punctuation.definition.string.begin.python' 1609 | 'end': '((?<=")(")|")|(\\n)' 1610 | 'endCaptures': 1611 | '1': 1612 | 'name': 'punctuation.definition.string.end.python' 1613 | '2': 1614 | 'name': 'meta.empty-string.double.python' 1615 | '3': 1616 | 'name': 'invalid.illegal.unclosed-string.python' 1617 | 'name': 'string.quoted.double.single-line.raw-binary.python' 1618 | 'patterns': [ 1619 | { 1620 | 'include': '#escaped_char' 1621 | } 1622 | { 1623 | 'include': '#string_formatting' 1624 | } 1625 | { 1626 | 'match': '[^\\x{01}-\\x{7f}]' 1627 | 'name': 'invalid.illegal.character-out-of-range.python' 1628 | } 1629 | ] 1630 | } 1631 | { 1632 | 'begin': '(""")' 1633 | 'beginCaptures': 1634 | '1': 1635 | 'name': 'punctuation.definition.string.begin.python' 1636 | 'comment': 'double quoted string' 1637 | 'end': '((?<=""")(")""|""")' 1638 | 'endCaptures': 1639 | '1': 1640 | 'name': 'punctuation.definition.string.end.python' 1641 | '2': 1642 | 'name': 'meta.empty-string.double.python' 1643 | 'name': 'string.quoted.double.block.python' 1644 | 'patterns': [ 1645 | { 1646 | 'begin': '(?=\\s*(SELECT|INSERT|UPDATE|DELETE(?! \/)|CREATE|REPLACE|ALTER|WITH))' 1647 | 'name': 'meta.embedded.sql' 1648 | 'end': '(?=\\s*""")' 1649 | 'patterns': [ 1650 | { 1651 | 'include': 'source.sql' 1652 | } 1653 | ] 1654 | } 1655 | ] 1656 | } 1657 | { 1658 | 'begin': '(")(?=\\s*(SELECT|INSERT|UPDATE|DELETE(?! \/)|CREATE|REPLACE|ALTER|WITH))' 1659 | 'beginCaptures': 1660 | '1': 1661 | 'name': 'punctuation.definition.string.begin.python' 1662 | 'comment': 'double quoted string' 1663 | 'end': '((?<=")(")|")|(\\n)' 1664 | 'endCaptures': 1665 | '1': 1666 | 'name': 'punctuation.definition.string.end.python' 1667 | '2': 1668 | 'name': 'meta.empty-string.double.python' 1669 | '3': 1670 | 'name': 'invalid.illegal.unclosed-string.python' 1671 | 'name': 'string.quoted.double.single-line.sql.python' 1672 | 'contentName': 'meta.embedded.sql' 1673 | 'patterns': [ 1674 | { 1675 | 'include': 'source.sql' 1676 | } 1677 | ] 1678 | } 1679 | { 1680 | 'begin': '(""")' 1681 | 'beginCaptures': 1682 | '1': 1683 | 'name': 'punctuation.definition.string.begin.python' 1684 | 'comment': 'double quoted string' 1685 | 'end': '((?<=""")(")""|""")' 1686 | 'endCaptures': 1687 | '1': 1688 | 'name': 'punctuation.definition.string.end.python' 1689 | '2': 1690 | 'name': 'meta.empty-string.double.python' 1691 | 'name': 'string.quoted.double.block.python' 1692 | 'patterns': [ 1693 | { 1694 | 'include': '#string_formatting' 1695 | } 1696 | { 1697 | 'include': '#escaped_char' 1698 | } 1699 | ] 1700 | } 1701 | { 1702 | 'begin': '(")' 1703 | 'beginCaptures': 1704 | '1': 1705 | 'name': 'punctuation.definition.string.begin.python' 1706 | 'comment': 'double quoted string' 1707 | 'end': '((?<=")(")|")|(\\n)' 1708 | 'endCaptures': 1709 | '1': 1710 | 'name': 'punctuation.definition.string.end.python' 1711 | '2': 1712 | 'name': 'meta.empty-string.double.python' 1713 | '3': 1714 | 'name': 'invalid.illegal.unclosed-string.python' 1715 | 'name': 'string.quoted.double.single-line.python' 1716 | 'patterns': [ 1717 | { 1718 | 'include': '#string_formatting' 1719 | } 1720 | { 1721 | 'include': '#escaped_char' 1722 | } 1723 | ] 1724 | } 1725 | ] 1726 | 'string_quoted_single': 1727 | 'patterns': [ 1728 | { 1729 | 'captures': 1730 | '1': 1731 | 'name': 'punctuation.definition.string.begin.python' 1732 | '2': 1733 | 'name': 'punctuation.definition.string.end.python' 1734 | '3': 1735 | 'name': 'meta.empty-string.single.python' 1736 | 'match': '(?