├── tests ├── __init__.py ├── texts │ ├── comments.py │ ├── debugging.py │ ├── blank_lines.py │ ├── backslashes.py │ ├── __init__.py │ ├── string_interpolation.py │ ├── raise_exception.py │ ├── indents.py │ ├── single_quotes.py │ ├── naming.py │ ├── builtins.py │ ├── mutables.py │ ├── duplications.py │ ├── imports.py │ ├── spacing.py │ └── declerations.py ├── test_comments.py ├── test_raise_exception.py ├── test_backslashes.py ├── test_string_interpolation.py ├── test_debugging.py ├── test_naming.py ├── test_blank_lines.py ├── test_builtins.py ├── test_indents.py ├── test_duplications.py ├── test_single_quotes.py ├── test_mutables.py ├── test_imports.py └── test_checker.py ├── flake8_intsights ├── __init__.py ├── checkers │ ├── backslashes.py │ ├── single_quotes.py │ ├── comments.py │ ├── __init__.py │ ├── raise_exception.py │ ├── debugging.py │ ├── string_interpolation.py │ ├── mutables.py │ ├── blank_lines.py │ ├── indents.py │ ├── naming.py │ ├── _errors.py │ ├── duplications.py │ ├── imports.py │ ├── builtins.py │ ├── spacing.py │ └── _checker.py └── checker.py ├── images ├── logo.png └── examples │ ├── declarations - lines diff.png │ └── declarations - same line diff.png ├── setup.cfg ├── requirements.txt ├── .snyk ├── .flake8 ├── .pre-commit-hooks.yaml ├── cortex.yaml ├── .github └── workflows │ ├── deploy.yml │ └── build.yml ├── LICENSE ├── setup.py ├── .gitignore └── TODO.md /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /flake8_intsights/__init__.py: -------------------------------------------------------------------------------- 1 | from . import checkers 2 | from . import checker 3 | -------------------------------------------------------------------------------- /images/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Intsights/flake8-intsights/HEAD/images/logo.png -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [aliases] 2 | test=pytest 3 | 4 | [tool:pytest] 5 | addopts = --tb=native -s 6 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | astroid==2.15.8 2 | zipp==3.19.1 3 | flake8 4 | flake8-assertive 5 | flake8-comprehensions 6 | -------------------------------------------------------------------------------- /.snyk: -------------------------------------------------------------------------------- 1 | version: v1.25.0 2 | language-settings: 3 | python: "3.10" 4 | ignore: {} 5 | patch: {} 6 | exclude: {} 7 | -------------------------------------------------------------------------------- /images/examples/declarations - lines diff.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Intsights/flake8-intsights/HEAD/images/examples/declarations - lines diff.png -------------------------------------------------------------------------------- /images/examples/declarations - same line diff.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Intsights/flake8-intsights/HEAD/images/examples/declarations - same line diff.png -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E501, W503 3 | max-line-length = 120 4 | exclude = 5 | .git, 6 | __pycache__ 7 | 8 | [flake8:local-plugins] 9 | extension = 10 | I = flake8_intsights.checker:Checker 11 | -------------------------------------------------------------------------------- /tests/texts/comments.py: -------------------------------------------------------------------------------- 1 | comments_test_text_001 = ''' 2 | pass 3 | ''' 4 | comments_test_text_002 = ''' 5 | # some pass 6 | pass 7 | ''' 8 | comments_test_text_003 = ''' 9 | def func(): 10 | # some pass 11 | pass 12 | ''' 13 | -------------------------------------------------------------------------------- /.pre-commit-hooks.yaml: -------------------------------------------------------------------------------- 1 | - id: flake8-intsights 2 | name: flake8-intsights 3 | description: "Uncompromising and opinionated flake8 plugin which follows Intsights' practices" 4 | entry: flake8 5 | language: python 6 | language_version: python3 7 | require_serial: true 8 | types: [python] 9 | -------------------------------------------------------------------------------- /tests/texts/debugging.py: -------------------------------------------------------------------------------- 1 | debugging_test_text_001 = 'import bla' 2 | debugging_test_text_002 = 'import pdb' 3 | debugging_test_text_003 = 'import ipdb' 4 | debugging_test_text_004 = 'import pudb' 5 | debugging_test_text_005 = 'pdb = 10' 6 | debugging_test_text_006 = 'breakpoint()' 7 | debugging_test_text_007 = 'breakpoint = 0' 8 | -------------------------------------------------------------------------------- /tests/texts/blank_lines.py: -------------------------------------------------------------------------------- 1 | blank_lines_test_text_001 = '' 2 | blank_lines_test_text_002 = '\n' 3 | blank_lines_test_text_003 = '\n\n' 4 | blank_lines_test_text_004 = 'pass' 5 | blank_lines_test_text_005 = 'pass\n' 6 | blank_lines_test_text_006 = 'pass\n\n' 7 | blank_lines_test_text_007 = '\npass\n' 8 | blank_lines_test_text_008 = '\n\npass\n' 9 | -------------------------------------------------------------------------------- /tests/texts/backslashes.py: -------------------------------------------------------------------------------- 1 | backslashes_test_text_001 = ''' 2 | string = 'string' 3 | ''' 4 | backslashes_test_text_002 = ''' 5 | string = 'string_start \\ 6 | string end' 7 | ''' 8 | backslashes_test_text_003 = ''' 9 | if arg_one is not None \\ 10 | and arg_two is None: 11 | pass 12 | ''' 13 | backslashes_test_text_004 = ''' 14 | string = \'\'\' 15 | text \\\\ 16 | text 17 | \'\'\' 18 | ''' 19 | -------------------------------------------------------------------------------- /tests/texts/__init__.py: -------------------------------------------------------------------------------- 1 | from . import backslashes 2 | from . import blank_lines 3 | from . import builtins 4 | from . import comments 5 | from . import debugging 6 | from . import declerations 7 | from . import duplications 8 | from . import imports 9 | from . import indents 10 | from . import mutables 11 | from . import naming 12 | from . import raise_exception 13 | from . import single_quotes 14 | from . import spacing 15 | from . import string_interpolation 16 | -------------------------------------------------------------------------------- /tests/texts/string_interpolation.py: -------------------------------------------------------------------------------- 1 | string_interpolation_test_text_001 = ''' 2 | var_one = '' 3 | var_one = 'string'.format() 4 | var_one = 'string{}'.format(1) 5 | var_one = 'string{arg}'.format(arg=1) 6 | var_one = f'string{arg}' 7 | var_one = f'string' 8 | 9 | def function(): 10 | return 'string{arg}'.format(arg=1) 11 | 12 | def function(): 13 | return f'string{arg}' 14 | 15 | var_one = 'string' % () 16 | var_one = 'string %s' % (1,) 17 | ''' 18 | -------------------------------------------------------------------------------- /cortex.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | info: 3 | title: Flake8 Intsights 4 | description: Uncompromising and opinionated flake8 plugin which follows Intsights' 5 | practices 6 | x-cortex-git: 7 | github: 8 | alias: intsightsorg 9 | repository: Intsights/flake8-intsights 10 | x-cortex-tag: flake8-intsights 11 | x-cortex-type: service 12 | x-cortex-domain-parents: 13 | - tag: threatintel-platform-delivery 14 | x-cortex-groups: 15 | - exposure:opensource 16 | - target:library 17 | openapi: 3.0.1 18 | servers: 19 | - url: "/" 20 | -------------------------------------------------------------------------------- /tests/texts/raise_exception.py: -------------------------------------------------------------------------------- 1 | raise_exception_test_text_001 = ''' 2 | raise Exception 3 | raise Exception() 4 | raise Exception('exception message') 5 | raise Exception from exc 6 | raise Exception() from exc 7 | 8 | try: 9 | raise Exception 10 | raise Exception() 11 | except Exception as exception: 12 | raise Exception 13 | raise Exception() 14 | 15 | raise exception 16 | raise Exception from exception 17 | raise Exception() from exception 18 | 19 | try: 20 | exception = Exception() 21 | except: 22 | raise 23 | raise exception 24 | raise self.exception 25 | ''' 26 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | name: Deploy 2 | on: 3 | release: 4 | types: 5 | - published 6 | workflow_dispatch: null 7 | jobs: 8 | deploy: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Checkout 12 | uses: actions/checkout@v4 13 | - name: Set up Python 3.11 14 | uses: actions/setup-python@v4 15 | with: 16 | python-version: '3.11' 17 | - name: Build a source tarball 18 | run: >- 19 | python -m pip install --user --upgrade setuptools; 20 | python setup.py sdist; 21 | - name: Publish distribution 📦 to PyPI 22 | uses: pypa/gh-action-pypi-publish@release/v1 23 | with: 24 | password: ${{ secrets.pypi_password }} 25 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/backslashes.py: -------------------------------------------------------------------------------- 1 | from . import _checker 2 | 3 | 4 | class Checker( 5 | _checker.BaseChecker, 6 | ): 7 | @classmethod 8 | def check( 9 | cls, 10 | filename, 11 | lines, 12 | tokens, 13 | start_position_to_token, 14 | ast_tree, 15 | astroid_tree, 16 | all_astroid_nodes, 17 | ): 18 | for lineno, line in enumerate(lines, 1): 19 | if line.endswith('\\') and not line.endswith('\\\\'): 20 | yield from cls.error_yielder.yield_error( 21 | error_id='I093', 22 | line_number=lineno, 23 | column_offset=len(line), 24 | ) 25 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/single_quotes.py: -------------------------------------------------------------------------------- 1 | import tokenize 2 | 3 | from . import _checker 4 | 5 | 6 | class Checker( 7 | _checker.BaseChecker, 8 | ): 9 | @classmethod 10 | def check( 11 | cls, 12 | filename, 13 | lines, 14 | tokens, 15 | start_position_to_token, 16 | ast_tree, 17 | astroid_tree, 18 | all_astroid_nodes, 19 | ): 20 | for token in tokens: 21 | if token.type != tokenize.STRING: 22 | continue 23 | 24 | if token.string.startswith('"') or token.string.endswith('"'): 25 | yield from cls.error_yielder.yield_error( 26 | error_id='I011', 27 | line_number=token.start[0], 28 | column_offset=token.start[1], 29 | ) 30 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/comments.py: -------------------------------------------------------------------------------- 1 | import tokenize 2 | 3 | from . import _checker 4 | 5 | 6 | class Checker( 7 | _checker.BaseChecker, 8 | ): 9 | @classmethod 10 | def check( 11 | cls, 12 | filename, 13 | lines, 14 | tokens, 15 | start_position_to_token, 16 | ast_tree, 17 | astroid_tree, 18 | all_astroid_nodes, 19 | ): 20 | yield from cls.check_no_inline_comments( 21 | tokens=tokens, 22 | ) 23 | 24 | @classmethod 25 | def check_no_inline_comments( 26 | cls, 27 | tokens, 28 | ): 29 | for token in tokens: 30 | token_type_is_comment = token.type == tokenize.COMMENT 31 | if token_type_is_comment: 32 | yield from cls.error_yielder.yield_error( 33 | error_id='I005', 34 | line_number=token.start[0], 35 | column_offset=token.start[1], 36 | ) 37 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/__init__.py: -------------------------------------------------------------------------------- 1 | from . import backslashes 2 | from . import blank_lines 3 | from . import builtins 4 | from . import comments 5 | from . import debugging 6 | from . import declerations 7 | from . import duplications 8 | from . import imports 9 | from . import indents 10 | from . import mutables 11 | from . import naming 12 | from . import raise_exception 13 | from . import single_quotes 14 | from . import spacing 15 | from . import string_interpolation 16 | 17 | from . import _checker 18 | from . import _errors 19 | 20 | 21 | __checkers__ = [ 22 | backslashes.Checker, 23 | blank_lines.Checker, 24 | builtins.Checker, 25 | comments.Checker, 26 | debugging.Checker, 27 | declerations.Checker, 28 | duplications.Checker, 29 | imports.Checker, 30 | indents.Checker, 31 | mutables.Checker, 32 | naming.Checker, 33 | raise_exception.Checker, 34 | single_quotes.Checker, 35 | spacing.Checker, 36 | string_interpolation.Checker, 37 | ] 38 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/raise_exception.py: -------------------------------------------------------------------------------- 1 | import astroid 2 | import builtins 3 | 4 | from . import _checker 5 | 6 | 7 | class Checker( 8 | _checker.BaseChecker, 9 | ): 10 | @classmethod 11 | def check( 12 | cls, 13 | filename, 14 | lines, 15 | tokens, 16 | start_position_to_token, 17 | ast_tree, 18 | astroid_tree, 19 | all_astroid_nodes, 20 | ): 21 | for node in all_astroid_nodes: 22 | if not isinstance(node, astroid.Raise): 23 | continue 24 | 25 | if not node.exc or isinstance(node.exc, astroid.Call): 26 | continue 27 | 28 | if isinstance(node.exc, astroid.Name): 29 | builtin_exception_object = hasattr(builtins, node.exc.name) 30 | if builtin_exception_object: 31 | yield from cls.error_yielder.yield_error( 32 | error_id='I099', 33 | line_number=node.exc.lineno, 34 | column_offset=node.exc.col_offset, 35 | ) 36 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Gal Ben David 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/debugging.py: -------------------------------------------------------------------------------- 1 | import astroid 2 | 3 | from . import _checker 4 | 5 | 6 | class Checker( 7 | _checker.BaseChecker, 8 | ): 9 | DEBUG_MODULES = { 10 | 'pdb', 11 | 'ipdb', 12 | 'pudb', 13 | } 14 | 15 | @classmethod 16 | def check( 17 | cls, 18 | filename, 19 | lines, 20 | tokens, 21 | start_position_to_token, 22 | ast_tree, 23 | astroid_tree, 24 | all_astroid_nodes, 25 | ): 26 | for node in all_astroid_nodes: 27 | if isinstance(node, astroid.Call): 28 | if isinstance(node.func, astroid.Name) and node.func.name == 'breakpoint': 29 | yield from cls.error_yielder.yield_error( 30 | error_id='I092', 31 | line_number=node.lineno, 32 | column_offset=node.col_offset, 33 | ) 34 | elif isinstance(node, astroid.Import): 35 | for import_name, import_as in node.names: 36 | if import_name in cls.DEBUG_MODULES: 37 | yield from cls.error_yielder.yield_error( 38 | error_id='I092', 39 | line_number=node.lineno, 40 | column_offset=node.col_offset, 41 | ) 42 | -------------------------------------------------------------------------------- /tests/texts/indents.py: -------------------------------------------------------------------------------- 1 | indents_test_text_001 = ''' 2 | def func(): 3 | pass 4 | ''' 5 | indents_test_text_002 = ''' 6 | def func(): 7 | pass 8 | ''' 9 | indents_test_text_003 = ''' 10 | def func(): 11 | \tpass 12 | ''' 13 | indents_test_text_004 = ''' 14 | if True: 15 | pass 16 | ''' 17 | indents_test_text_005 = ''' 18 | if True: 19 | pass 20 | ''' 21 | indents_test_text_006 = ''' 22 | if True: 23 | pass 24 | ''' 25 | indents_test_text_007 = ''' 26 | if True: 27 | if True: 28 | if True: 29 | pass 30 | ''' 31 | indents_test_text_008 = ''' 32 | if True: 33 | if True: 34 | if True: 35 | pass 36 | ''' 37 | indents_test_text_009 = ''' 38 | [ 39 | 1, 40 | 2, 41 | ] 42 | ''' 43 | indents_test_text_010 = ''' 44 | [ 45 | 1, 46 | 2, 47 | ] 48 | ''' 49 | indents_test_text_011 = ''' 50 | [ 51 | 1, 52 | 2, 53 | ] 54 | ''' 55 | indents_test_text_012 = ''' 56 | if True: 57 | if True: 58 | if True: 59 | pass 60 | 61 | param = 1 62 | ''' 63 | indents_test_text_013 = ''' 64 | long_string = \'\'\' 65 | good line 66 | \'\'\' 67 | ''' 68 | indents_test_text_014 = ''' 69 | long_string = \'\'\' 70 | good line 71 | good line 72 | \'\'\' 73 | ''' 74 | indents_test_text_015 = ''' 75 | raise Exception( 76 | 'a'.join( 77 | lines, 78 | ) 79 | ) 80 | ''' 81 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | 4 | setuptools.setup( 5 | name='flake8-intsights', 6 | version='0.4.0', 7 | url='https://github.com/Intsights/flake8-intsights', 8 | project_urls={ 9 | 'Source': 'https://github.com/Intsights/flake8-intsights', 10 | }, 11 | license='MIT', 12 | description='Uncompromising and opinionated flake8 plugin which follows Intsights\' practices', 13 | long_description=open('README.md').read(), 14 | long_description_content_type='text/markdown', 15 | classifiers=[ 16 | 'License :: OSI Approved :: MIT License', 17 | 'Programming Language :: Python :: 3.7', 18 | 'Programming Language :: Python :: 3.8', 19 | 'Programming Language :: Python :: 3.9', 20 | 'Programming Language :: Python :: 3.10', 21 | 'Programming Language :: Python :: 3.11', 22 | 'Programming Language :: Python :: 3.12', 23 | ], 24 | keywords='flake8 conventions style lint linter intsights', 25 | python_requires='>=3.7', 26 | zip_safe=False, 27 | install_requires=[ 28 | 'astroid==2.15.8', 29 | 'flake8', 30 | 'flake8-assertive', 31 | 'flake8-comprehensions', 32 | ], 33 | packages=[ 34 | 'flake8_intsights', 35 | 'flake8_intsights.checkers', 36 | ], 37 | entry_points={ 38 | 'flake8.extension': [ 39 | 'I = flake8_intsights.checker:Checker', 40 | ], 41 | }, 42 | ) 43 | -------------------------------------------------------------------------------- /tests/texts/single_quotes.py: -------------------------------------------------------------------------------- 1 | single_quotes_test_text_001 = ''' 2 | arg = 'test' 3 | ''' 4 | single_quotes_test_text_002 = ''' 5 | arg = "test" 6 | ''' 7 | single_quotes_test_text_003 = ''' 8 | function('test') 9 | ''' 10 | single_quotes_test_text_004 = ''' 11 | function("test") 12 | ''' 13 | single_quotes_test_text_005 = ''' 14 | function( 15 | arg='test', 16 | ) 17 | ''' 18 | single_quotes_test_text_006 = ''' 19 | function( 20 | arg="test", 21 | ) 22 | ''' 23 | single_quotes_test_text_007 = ''' 24 | def function( 25 | arg='test', 26 | ): 27 | pass 28 | ''' 29 | single_quotes_test_text_008 = ''' 30 | def function( 31 | arg="test", 32 | ): 33 | pass 34 | ''' 35 | single_quotes_test_text_009 = ''' 36 | def function( 37 | arg, 38 | ): 39 | \'\'\' 40 | \'\'\' 41 | pass 42 | ''' 43 | single_quotes_test_text_010 = ''' 44 | def function( 45 | arg, 46 | ): 47 | """ 48 | """ 49 | pass 50 | ''' 51 | single_quotes_test_text_011 = ''' 52 | tuple_arg = ( 53 | 'test', 54 | ) 55 | ''' 56 | single_quotes_test_text_012 = ''' 57 | tuple_arg = ( 58 | "test", 59 | ) 60 | ''' 61 | single_quotes_test_text_013 = ''' 62 | dict_arg = { 63 | 'key': 1, 64 | } 65 | ''' 66 | single_quotes_test_text_014 = ''' 67 | dict_arg = { 68 | "key": 1, 69 | } 70 | ''' 71 | single_quotes_test_text_015 = ''' 72 | dict_arg = { 73 | 'key': '1', 74 | } 75 | ''' 76 | single_quotes_test_text_016 = ''' 77 | dict_arg = { 78 | "key": "1", 79 | } 80 | ''' 81 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # IPython Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # dotenv 79 | .env 80 | 81 | # virtualenv 82 | venv/ 83 | ENV/ 84 | 85 | # Spyder project settings 86 | .spyderproject 87 | 88 | # Rope project settings 89 | .ropeproject 90 | 91 | .pytest_cache 92 | .vscode 93 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | on: 3 | - push 4 | - pull_request 5 | jobs: 6 | lint: 7 | if: github.event_name == 'push' && !startsWith(github.event.ref, 'refs/tags') 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: Checkout 11 | uses: actions/checkout@v4 12 | - name: Set up Python 3.11 13 | uses: actions/setup-python@v4 14 | with: 15 | python-version: '3.11' 16 | - name: Install Flake8 & mypy 17 | run: >- 18 | python -m pip install --upgrade . mypy; 19 | - name: Check Flake8 Linting Errors 20 | run: >- 21 | flake8 22 | - name: Check mypy Linting Errors 23 | run: >- 24 | mypy --ignore-missing-imports -p flake8_intsights 25 | test: 26 | runs-on: ${{ matrix.os }} 27 | needs: lint 28 | strategy: 29 | fail-fast: false 30 | matrix: 31 | python-version: 32 | - '3.10' 33 | - '3.11' 34 | - '3.12' 35 | os: 36 | - ubuntu-latest 37 | steps: 38 | - name: Checkout 39 | uses: actions/checkout@v4 40 | - name: Set up Python ${{ matrix.python-version }} 41 | uses: actions/setup-python@v4 42 | with: 43 | python-version: ${{ matrix.python-version }} 44 | - name: Install Python Dependencies 45 | run: >- 46 | python -m pip install --upgrade -r requirements.txt; 47 | python -m pip install --upgrade pytest; 48 | - name: Test module 49 | run: >- 50 | pytest 51 | -------------------------------------------------------------------------------- /tests/texts/naming.py: -------------------------------------------------------------------------------- 1 | naming_test_text_001 = ''' 2 | def function_one(): 3 | pass 4 | 5 | def function_two(arg): 6 | pass 7 | 8 | def function_three(self): 9 | pass 10 | 11 | def function_four(cls): 12 | pass 13 | 14 | class Test: 15 | def method_1(): 16 | pass 17 | 18 | def method_2(arg): 19 | pass 20 | 21 | def method_3(self): 22 | pass 23 | 24 | def method_4(cls): 25 | pass 26 | 27 | @classmethod 28 | def method_5(): 29 | pass 30 | 31 | @classmethod 32 | def method_6(arg): 33 | pass 34 | 35 | @classmethod 36 | def method_7(cls): 37 | pass 38 | 39 | @classmethod 40 | def method_8(self): 41 | pass 42 | 43 | @staticmethod 44 | def method_9(): 45 | pass 46 | 47 | @staticmethod 48 | def method_10(arg): 49 | pass 50 | 51 | @staticmethod 52 | def method_11(cls): 53 | pass 54 | 55 | @staticmethod 56 | def method_12(self): 57 | pass 58 | 59 | def __new__( 60 | cls, 61 | *args, 62 | **kwargs 63 | ): 64 | pass 65 | 66 | def function_five(Argumemnt): 67 | pass 68 | ''' 69 | naming_test_text_002 = ''' 70 | class Some: 71 | pass 72 | 73 | class some: 74 | pass 75 | 76 | class Some_class: 77 | pass 78 | 79 | class ModuleTestCase( 80 | unittest.TestCase, 81 | ): 82 | pass 83 | 84 | class TestCase( 85 | unittest.TestCase, 86 | ): 87 | pass 88 | 89 | class Module( 90 | unittest.TestCase, 91 | ): 92 | pass 93 | ''' 94 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/string_interpolation.py: -------------------------------------------------------------------------------- 1 | import astroid 2 | import tokenize 3 | 4 | from . import _checker 5 | 6 | 7 | class Checker( 8 | _checker.BaseChecker, 9 | ): 10 | @classmethod 11 | def check( 12 | cls, 13 | filename, 14 | lines, 15 | tokens, 16 | start_position_to_token, 17 | ast_tree, 18 | astroid_tree, 19 | all_astroid_nodes, 20 | ): 21 | for node in all_astroid_nodes: 22 | if isinstance(node, astroid.Call) and hasattr(node.func, 'attrname'): 23 | if node.func.attrname != 'format': 24 | continue 25 | elif not isinstance(node.func.expr, astroid.Const): 26 | continue 27 | elif node.func.expr.pytype() != 'builtins.str': 28 | continue 29 | 30 | yield from cls.error_yielder.yield_error( 31 | error_id='I098', 32 | line_number=node.lineno, 33 | column_offset=node.col_offset, 34 | ) 35 | 36 | for index, token in enumerate(tokens): 37 | if token.type != tokenize.OP or token.string != '%': 38 | continue 39 | 40 | if index == 0: 41 | continue 42 | 43 | previous_token = tokens[index - 1] 44 | if previous_token.type == tokenize.STRING: 45 | yield from cls.error_yielder.yield_error( 46 | error_id='I098', 47 | line_number=token.start[0], 48 | column_offset=token.start[1], 49 | ) 50 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/mutables.py: -------------------------------------------------------------------------------- 1 | import astroid 2 | 3 | from . import _checker 4 | 5 | 6 | class Checker( 7 | _checker.BaseChecker, 8 | ): 9 | MUTABLE_LITERALS = ( 10 | astroid.Dict, 11 | astroid.List, 12 | astroid.Set, 13 | astroid.Tuple, 14 | ) 15 | MUTABLE_CALLS = { 16 | 'Counter', 17 | 'OrderedDict', 18 | 'collections.Counter', 19 | 'collections.OrderedDict', 20 | 'collections.defaultdict', 21 | 'collections.deque', 22 | 'defaultdict', 23 | 'deque', 24 | 'dict', 25 | 'list', 26 | 'set', 27 | 'tuple', 28 | } 29 | 30 | @classmethod 31 | def check( 32 | cls, 33 | filename, 34 | lines, 35 | tokens, 36 | start_position_to_token, 37 | ast_tree, 38 | astroid_tree, 39 | all_astroid_nodes, 40 | ): 41 | for node in all_astroid_nodes: 42 | if not isinstance(node, astroid.FunctionDef): 43 | continue 44 | 45 | for default in node.args.defaults + node.args.kw_defaults: 46 | if isinstance(default, cls.MUTABLE_LITERALS): 47 | yield from cls.error_yielder.yield_error( 48 | error_id='I081', 49 | line_number=default.lineno, 50 | column_offset=default.col_offset, 51 | ) 52 | elif isinstance(default, astroid.Call): 53 | if default.func.as_string() in cls.MUTABLE_CALLS: 54 | yield from cls.error_yielder.yield_error( 55 | error_id='I081', 56 | line_number=default.lineno, 57 | column_offset=default.col_offset, 58 | ) 59 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/blank_lines.py: -------------------------------------------------------------------------------- 1 | from . import _checker 2 | 3 | 4 | class Checker( 5 | _checker.BaseChecker, 6 | ): 7 | @classmethod 8 | def check( 9 | cls, 10 | filename, 11 | lines, 12 | tokens, 13 | start_position_to_token, 14 | ast_tree, 15 | astroid_tree, 16 | all_astroid_nodes, 17 | ): 18 | yield from cls.check_blank_lines_from_bottom( 19 | lines=lines, 20 | ) 21 | 22 | yield from cls.check_blank_lines_from_top( 23 | lines=lines, 24 | ) 25 | 26 | @classmethod 27 | def check_blank_lines_from_bottom( 28 | cls, 29 | lines, 30 | ): 31 | if not lines: 32 | return 33 | elif len(lines) == 1: 34 | if lines[0] != '': 35 | yield from cls.error_yielder.yield_error( 36 | error_id='I001', 37 | line_number=len(lines), 38 | column_offset=0, 39 | ) 40 | else: 41 | last_line_is_blank = lines[-1].strip() == '' 42 | previous_to_last_line_is_not_blank = lines[-2].strip() != '' 43 | one_blank_line_at_the_end = last_line_is_blank and previous_to_last_line_is_not_blank 44 | 45 | if not one_blank_line_at_the_end: 46 | yield from cls.error_yielder.yield_error( 47 | error_id='I001', 48 | line_number=len(lines), 49 | column_offset=0, 50 | ) 51 | 52 | @classmethod 53 | def check_blank_lines_from_top( 54 | cls, 55 | lines, 56 | ): 57 | if len(lines) > 1: 58 | first_line_is_blank = lines[0].strip() == '' 59 | if first_line_is_blank: 60 | yield from cls.error_yielder.yield_error( 61 | error_id='I002', 62 | line_number=1, 63 | column_offset=0, 64 | ) 65 | -------------------------------------------------------------------------------- /tests/texts/builtins.py: -------------------------------------------------------------------------------- 1 | builtins_test_text_001 = ''' 2 | arg = 10 3 | int = 10 4 | ''' 5 | builtins_test_text_002 = ''' 6 | def function_one(): 7 | pass 8 | 9 | def function_two( 10 | arg, 11 | ): 12 | pass 13 | 14 | def function_three( 15 | int, 16 | ): 17 | pass 18 | 19 | async def function_four(): 20 | pass 21 | 22 | async def function_five( 23 | arg, 24 | ): 25 | pass 26 | 27 | async def function_six( 28 | int, 29 | ): 30 | pass 31 | 32 | def int(): 33 | pass 34 | ''' 35 | builtins_test_text_003 = ''' 36 | for i in range(10): 37 | pass 38 | 39 | for int in range(10): 40 | pass 41 | 42 | for a, b in range(10): 43 | pass 44 | 45 | for a, int in range(10): 46 | pass 47 | 48 | for *a, b in [[1,2,3]]: 49 | pass 50 | 51 | for *int, b in [[1,2,3]]: 52 | pass 53 | ''' 54 | builtins_test_text_004 = ''' 55 | with func(): 56 | pass 57 | 58 | with func() as var: 59 | pass 60 | 61 | with func() as int: 62 | pass 63 | 64 | with func_one() as var_one, func_two() as var_two: 65 | pass 66 | 67 | with func_one() as int, func_two() as var_two: 68 | pass 69 | 70 | with func_one() as var_one, func_two() as int: 71 | pass 72 | 73 | with func() as (var_one, var_two): 74 | pass 75 | 76 | with func() as (var_one, int): 77 | pass 78 | ''' 79 | builtins_test_text_005 = ''' 80 | var = [ 81 | var_one 82 | for var_one in var_list 83 | ] 84 | var = [ 85 | int 86 | for int in var_list 87 | ] 88 | var = [ 89 | (var_one, var_two) 90 | for (var_one, var_two) in var_list 91 | ] 92 | var = [ 93 | (var_one, int) 94 | for (var_one, int) in var_list 95 | ] 96 | ''' 97 | builtins_test_text_006 = ''' 98 | class ClassName: 99 | pass 100 | 101 | class dict: 102 | pass 103 | 104 | class SomeClass: 105 | type = 'some' 106 | 107 | def method( 108 | self, 109 | ): 110 | pass 111 | 112 | def filter( 113 | self, 114 | ): 115 | pass 116 | ''' 117 | -------------------------------------------------------------------------------- /TODO.md: -------------------------------------------------------------------------------- 1 | ### general 2 | + No tabs at code, unless in strings. 3 | + all lines must start with 4-spaces tabs. make sure the number of spaces is a multiplication of 4 spaces. 4 | + never use inline comments. 5 | + no trailing spaces. 6 | + last line is a blank line in a file 7 | + no blank lines at the top of the file 8 | + if the same file has 2 same name functions/classes/dict_keys .... 9 | + no newline after class def 10 | + except, finally: no blank lines above 11 | + two blank lines after last import 12 | + indenteation must be +1 0 -1 from the previous line 13 | + use only f strings 14 | + raise must raise an object() not class 15 | + no mutable as default argument 16 | + remove set_trace 17 | - vars cant be 1 char long 18 | - functions names must be at least 3 chars. 19 | - check if the file is in utf8 format. 20 | - variables names must not be less than 3 chars. 21 | - not string -> length > 100 -> error 22 | - if comparison -> no more than 2 boolean operators 23 | - no \r in code 24 | - re.compile -> pattern, always r'' 25 | - try should be spaced above 26 | - no strings without any assignment : 27 | ```python 28 | import test 29 | 30 | """ 31 | some comment 32 | """ 33 | ``` 34 | - test file must start with test_ 35 | - dict key name must be next to the bracket - dict['key'] and not dict[ 'key' ] 36 | * write a formatter 37 | 38 | 39 | 40 | ### dict/list-comprehension 41 | ```python 42 | [ 43 | entry 44 | for entry in arr 45 | if condition 46 | ..... 47 | ] 48 | ``` 49 | 50 | ### std functions calls 51 | - comma ',' at the end of each line 52 | 53 | ```python 54 | range( 55 | 0, 56 | 10, 57 | 2, 58 | ) 59 | ``` 60 | 61 | ### non std functions calls 62 | - comma ',' at the end of each line 63 | - always call by parameters names 64 | 65 | ```python 66 | requests.get( 67 | url='http:/....', 68 | headers={ 69 | 'key': 'value', 70 | }, 71 | ) 72 | ``` 73 | 74 | ### use more and better comprehensions 75 | https://pypi.python.org/pypi/flake8-comprehensions/1.2.1 76 | 77 | ### no leftovers! 78 | - check there are no 'print' calls in the code 79 | 80 | ### regex 81 | - regex patterns must always be escaped string -> r'' 82 | -------------------------------------------------------------------------------- /tests/texts/mutables.py: -------------------------------------------------------------------------------- 1 | mutables_test_text_001 = ''' 2 | def function( 3 | param, 4 | ): 5 | pass 6 | ''' 7 | mutables_test_text_002 = ''' 8 | def function( 9 | param=0, 10 | ): 11 | pass 12 | ''' 13 | mutables_test_text_003 = ''' 14 | def function( 15 | param={}, 16 | ): 17 | pass 18 | ''' 19 | mutables_test_text_004 = ''' 20 | def function( 21 | param=[], 22 | ): 23 | pass 24 | ''' 25 | mutables_test_text_005 = ''' 26 | def function( 27 | param=tuple(), 28 | ): 29 | pass 30 | ''' 31 | mutables_test_text_006 = ''' 32 | def function( 33 | param=list(), 34 | ): 35 | pass 36 | ''' 37 | mutables_test_text_007 = ''' 38 | def function( 39 | param_one, 40 | param_two, 41 | ): 42 | pass 43 | ''' 44 | mutables_test_text_008 = ''' 45 | def function( 46 | param_one, 47 | param_two=0, 48 | ): 49 | pass 50 | ''' 51 | mutables_test_text_009 = ''' 52 | def function( 53 | param_one, 54 | param_two={}, 55 | ): 56 | pass 57 | ''' 58 | mutables_test_text_010 = ''' 59 | def function( 60 | param_one, 61 | param_two=[], 62 | ): 63 | pass 64 | ''' 65 | mutables_test_text_011 = ''' 66 | def function( 67 | param_one, 68 | param_two=list(), 69 | ): 70 | pass 71 | ''' 72 | mutables_test_text_012 = ''' 73 | def function( 74 | param_one, 75 | param_two=tuple(), 76 | ): 77 | pass 78 | ''' 79 | mutables_test_text_013 = ''' 80 | def function( 81 | param_one, 82 | param_two, 83 | param_three, 84 | ): 85 | pass 86 | ''' 87 | mutables_test_text_014 = ''' 88 | def function( 89 | param_one, 90 | param_two, 91 | param_three=0, 92 | ): 93 | pass 94 | ''' 95 | mutables_test_text_015 = ''' 96 | def function( 97 | param_one, 98 | param_two=0, 99 | param_three={}, 100 | ): 101 | pass 102 | ''' 103 | mutables_test_text_016 = ''' 104 | def function( 105 | param_one, 106 | param_two=[], 107 | param_three={}, 108 | ): 109 | pass 110 | ''' 111 | mutables_test_text_017 = ''' 112 | def function( 113 | param_one={}, 114 | param_two=0, 115 | param_three={}, 116 | ): 117 | pass 118 | ''' 119 | mutables_test_text_018 = ''' 120 | def function( 121 | param_one=0, 122 | param_two=[], 123 | param_three=0, 124 | ): 125 | pass 126 | ''' 127 | -------------------------------------------------------------------------------- /tests/texts/duplications.py: -------------------------------------------------------------------------------- 1 | duplications_test_text_001 = ''' 2 | class ClassOne: 3 | pass 4 | 5 | class ClassTwo: 6 | pass 7 | ''' 8 | duplications_test_text_002 = ''' 9 | class ClassOne: 10 | pass 11 | 12 | class ClassOne: 13 | pass 14 | ''' 15 | duplications_test_text_003 = ''' 16 | def function_one(): 17 | pass 18 | 19 | def function_two(): 20 | pass 21 | ''' 22 | duplications_test_text_004 = ''' 23 | def function_one(): 24 | pass 25 | 26 | def function_one(): 27 | pass 28 | ''' 29 | duplications_test_text_005 = ''' 30 | class ClassOne: 31 | def method_one(): 32 | pass 33 | 34 | def method_two(): 35 | pass 36 | ''' 37 | duplications_test_text_006 = ''' 38 | class ClassOne: 39 | def method_one(): 40 | pass 41 | 42 | def method_one(): 43 | pass 44 | ''' 45 | duplications_test_text_007 = ''' 46 | class ClassOne: 47 | @property 48 | def method_one(): 49 | pass 50 | 51 | @method_one.setter 52 | def method_one(): 53 | pass 54 | ''' 55 | duplications_test_text_008 = ''' 56 | class ClassOne: 57 | @property 58 | def method_one(): 59 | pass 60 | 61 | @method_one.setter 62 | def method_one(): 63 | pass 64 | 65 | def method_one(): 66 | pass 67 | ''' 68 | duplications_test_text_009 = ''' 69 | class ClassOne: 70 | class SubClassOne: 71 | pass 72 | 73 | class SubClassTwo: 74 | pass 75 | ''' 76 | duplications_test_text_010 = ''' 77 | class ClassOne: 78 | class SubClassOne: 79 | pass 80 | 81 | class SubClassOne: 82 | pass 83 | ''' 84 | duplications_test_text_011 = ''' 85 | def function_one(): 86 | def sub_function_one(): 87 | pass 88 | 89 | def sub_function_two(): 90 | pass 91 | ''' 92 | duplications_test_text_012 = ''' 93 | def function_one(): 94 | def sub_function_one(): 95 | pass 96 | 97 | def sub_function_one(): 98 | pass 99 | ''' 100 | duplications_test_text_013 = ''' 101 | dict_one = { 102 | 'key_1': 'value_1', 103 | 'key_2': 'value_2', 104 | } 105 | ''' 106 | duplications_test_text_014 = ''' 107 | dict_one = { 108 | 'key_1': 'value_1', 109 | 'key_1': 'value_2', 110 | } 111 | ''' 112 | duplications_test_text_015 = ''' 113 | new_dict = { 114 | **dict_one, 115 | **dict_two, 116 | } 117 | ''' 118 | -------------------------------------------------------------------------------- /tests/test_comments.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | import tokenize 3 | import ast 4 | import io 5 | import unittest 6 | 7 | import flake8_intsights 8 | 9 | from . import texts 10 | 11 | 12 | class CommentsTestCase( 13 | unittest.TestCase, 14 | ): 15 | def get_linting_errors( 16 | self, 17 | source_code, 18 | ): 19 | source_code = textwrap.dedent( 20 | text=source_code.lstrip(), 21 | ) 22 | file_tokens = list( 23 | tokenize.tokenize( 24 | readline=io.BytesIO( 25 | initial_bytes=source_code.encode('utf-8'), 26 | ).readline, 27 | ) 28 | ) 29 | tree = ast.parse(source_code) 30 | filename = 'test_comments' 31 | lines = io.StringIO(source_code).readlines() 32 | 33 | checker_obj = flake8_intsights.checker.Checker( 34 | tree=tree, 35 | filename=filename, 36 | lines=lines, 37 | file_tokens=file_tokens, 38 | ) 39 | checker_obj.checkers = [flake8_intsights.checkers.comments.Checker] 40 | linting_errors = list(checker_obj.run()) 41 | 42 | return linting_errors 43 | 44 | def test_case_1( 45 | self, 46 | ): 47 | self.assertEqual( 48 | first=self.get_linting_errors( 49 | source_code=texts.comments.comments_test_text_001, 50 | ), 51 | second=[], 52 | ) 53 | 54 | def test_case_2( 55 | self, 56 | ): 57 | self.assertEqual( 58 | first=self.get_linting_errors( 59 | source_code=texts.comments.comments_test_text_002, 60 | ), 61 | second=[ 62 | ( 63 | 1, 64 | 0, 65 | 'I005 inline comments are not allowed', 66 | 'intsights_checker', 67 | ), 68 | ], 69 | ) 70 | 71 | def test_case_3( 72 | self, 73 | ): 74 | self.assertEqual( 75 | first=self.get_linting_errors( 76 | source_code=texts.comments.comments_test_text_003, 77 | ), 78 | second=[ 79 | ( 80 | 2, 81 | 4, 82 | 'I005 inline comments are not allowed', 83 | 'intsights_checker', 84 | ), 85 | ], 86 | ) 87 | -------------------------------------------------------------------------------- /tests/texts/imports.py: -------------------------------------------------------------------------------- 1 | imports_test_text_001 = ''' 2 | import module1 3 | ''' 4 | imports_test_text_002 = ''' 5 | import module1.submodule1 6 | ''' 7 | imports_test_text_003 = ''' 8 | from . import module1 9 | ''' 10 | imports_test_text_004 = ''' 11 | from .. import module1 12 | ''' 13 | imports_test_text_005 = ''' 14 | import module1 15 | 16 | from . import module2 17 | ''' 18 | imports_test_text_006 = ''' 19 | import module1 20 | import module2 21 | 22 | from . import module3 23 | ''' 24 | imports_test_text_007 = ''' 25 | import module1 26 | 27 | from . import module2 28 | from . import module3 29 | ''' 30 | imports_test_text_008 = ''' 31 | import module1 32 | import module2 33 | 34 | from . import module3 35 | from . import module4 36 | ''' 37 | imports_test_text_009 = ''' 38 | import module1 39 | import module2 40 | 41 | from . import module3 42 | from . import module4 43 | ''' 44 | imports_test_text_010 = ''' 45 | import module1.submodule1 as alias1 46 | ''' 47 | imports_test_text_011 = ''' 48 | import module1 as alias1 49 | ''' 50 | imports_test_text_012 = ''' 51 | import module1, module2 52 | ''' 53 | imports_test_text_013 = ''' 54 | import module1.submodule1, module2 55 | ''' 56 | imports_test_text_014 = ''' 57 | from module1 import submodule1 58 | ''' 59 | imports_test_text_015 = ''' 60 | from module1 import submodule1 as alias1 61 | ''' 62 | imports_test_text_016 = ''' 63 | from module1 import * 64 | ''' 65 | imports_test_text_017 = ''' 66 | from module1 import module1, module2 67 | ''' 68 | imports_test_text_018 = ''' 69 | from .module1 import module1 70 | ''' 71 | imports_test_text_019 = ''' 72 | from .module1 import module1, module2 73 | ''' 74 | imports_test_text_020 = ''' 75 | from ..module1 import module1 76 | ''' 77 | imports_test_text_021 = ''' 78 | from ..module1 import module1, module2 79 | ''' 80 | imports_test_text_022 = ''' 81 | from . import module1 as alias1 82 | ''' 83 | imports_test_text_023 = ''' 84 | from .module1 import * 85 | ''' 86 | imports_test_text_024 = ''' 87 | import module1 88 | from . import module2 89 | ''' 90 | imports_test_text_025 = ''' 91 | from . import module2 92 | import module1 93 | ''' 94 | imports_test_text_026 = ''' 95 | import module 96 | ''' 97 | imports_test_text_027 = ''' 98 | import module 99 | 100 | ''' 101 | imports_test_text_028 = ''' 102 | import module 103 | 104 | 105 | param = 1 106 | ''' 107 | imports_test_text_029 = ''' 108 | import module 109 | 110 | param = 1 111 | ''' 112 | imports_test_text_030 = ''' 113 | import module 114 | param = 1 115 | ''' 116 | -------------------------------------------------------------------------------- /tests/test_raise_exception.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | import tokenize 3 | import ast 4 | import io 5 | import unittest 6 | 7 | import flake8_intsights 8 | 9 | from . import texts 10 | 11 | 12 | class RaiseExceptionTestCase( 13 | unittest.TestCase, 14 | ): 15 | def get_linting_errors( 16 | self, 17 | source_code, 18 | ): 19 | source_code = textwrap.dedent( 20 | text=source_code.lstrip(), 21 | ) 22 | file_tokens = list( 23 | tokenize.tokenize( 24 | readline=io.BytesIO( 25 | initial_bytes=source_code.encode('utf-8'), 26 | ).readline, 27 | ) 28 | ) 29 | tree = ast.parse(source_code) 30 | filename = 'test_raise_exception' 31 | lines = io.StringIO(source_code).readlines() 32 | 33 | checker_obj = flake8_intsights.checker.Checker( 34 | tree=tree, 35 | filename=filename, 36 | lines=lines, 37 | file_tokens=file_tokens, 38 | ) 39 | checker_obj.checkers = [flake8_intsights.checkers.raise_exception.Checker] 40 | linting_errors = list(checker_obj.run()) 41 | 42 | return linting_errors 43 | 44 | def test_case_1( 45 | self, 46 | ): 47 | self.assertEqual( 48 | first=self.get_linting_errors( 49 | source_code=texts.raise_exception.raise_exception_test_text_001, 50 | ), 51 | second=[ 52 | ( 53 | 1, 54 | 6, 55 | 'I099 raising an exception should raise an object not a type', 56 | 'intsights_checker', 57 | ), 58 | ( 59 | 4, 60 | 6, 61 | 'I099 raising an exception should raise an object not a type', 62 | 'intsights_checker', 63 | ), 64 | ( 65 | 8, 66 | 10, 67 | 'I099 raising an exception should raise an object not a type', 68 | 'intsights_checker', 69 | ), 70 | ( 71 | 11, 72 | 10, 73 | 'I099 raising an exception should raise an object not a type', 74 | 'intsights_checker', 75 | ), 76 | ( 77 | 15, 78 | 10, 79 | 'I099 raising an exception should raise an object not a type', 80 | 'intsights_checker', 81 | ), 82 | ], 83 | ) 84 | -------------------------------------------------------------------------------- /tests/test_backslashes.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | import tokenize 3 | import ast 4 | import io 5 | import unittest 6 | 7 | import flake8_intsights 8 | 9 | from . import texts 10 | 11 | 12 | class BackslashesTestCase( 13 | unittest.TestCase, 14 | ): 15 | def get_linting_errors( 16 | self, 17 | source_code, 18 | ): 19 | source_code = textwrap.dedent( 20 | text=source_code.lstrip(), 21 | ) 22 | file_tokens = list( 23 | tokenize.tokenize( 24 | readline=io.BytesIO( 25 | initial_bytes=source_code.encode('utf-8'), 26 | ).readline, 27 | ) 28 | ) 29 | tree = ast.parse(source_code) 30 | filename = 'test_backslashes' 31 | lines = io.StringIO(source_code).readlines() 32 | 33 | checker_obj = flake8_intsights.checker.Checker( 34 | tree=tree, 35 | filename=filename, 36 | lines=lines, 37 | file_tokens=file_tokens, 38 | ) 39 | checker_obj.checkers = [flake8_intsights.checkers.backslashes.Checker] 40 | linting_errors = list(checker_obj.run()) 41 | 42 | return linting_errors 43 | 44 | def test_case_1( 45 | self, 46 | ): 47 | self.assertEqual( 48 | first=self.get_linting_errors( 49 | source_code=texts.backslashes.backslashes_test_text_001, 50 | ), 51 | second=[], 52 | ) 53 | 54 | def test_case_2( 55 | self, 56 | ): 57 | self.assertEqual( 58 | first=self.get_linting_errors( 59 | source_code=texts.backslashes.backslashes_test_text_002, 60 | ), 61 | second=[ 62 | ( 63 | 1, 64 | 24, 65 | 'I093 do not break a line with backslashes', 66 | 'intsights_checker', 67 | ), 68 | ], 69 | ) 70 | 71 | def test_case_3( 72 | self, 73 | ): 74 | self.assertEqual( 75 | first=self.get_linting_errors( 76 | source_code=texts.backslashes.backslashes_test_text_003, 77 | ), 78 | second=[ 79 | ( 80 | 1, 81 | 24, 82 | 'I093 do not break a line with backslashes', 83 | 'intsights_checker', 84 | ), 85 | ], 86 | ) 87 | 88 | def test_case_4( 89 | self, 90 | ): 91 | self.assertEqual( 92 | first=self.get_linting_errors( 93 | source_code=texts.backslashes.backslashes_test_text_004, 94 | ), 95 | second=[], 96 | ) 97 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/indents.py: -------------------------------------------------------------------------------- 1 | import tokenize 2 | 3 | from . import _checker 4 | 5 | 6 | class Checker( 7 | _checker.BaseChecker, 8 | ): 9 | @classmethod 10 | def check( 11 | cls, 12 | filename, 13 | lines, 14 | tokens, 15 | start_position_to_token, 16 | ast_tree, 17 | astroid_tree, 18 | all_astroid_nodes, 19 | ): 20 | yield from cls.check_only_spaces_indents( 21 | tokens=tokens, 22 | ) 23 | 24 | yield from cls.check_indentations_gradually( 25 | lines=lines, 26 | tokens=tokens, 27 | ) 28 | 29 | @classmethod 30 | def check_only_spaces_indents( 31 | cls, 32 | tokens, 33 | ): 34 | for token in tokens: 35 | token_type_is_indent = token.type == tokenize.INDENT 36 | if not token_type_is_indent: 37 | continue 38 | 39 | number_of_tabs = token.string.count('\t') 40 | if number_of_tabs > 0: 41 | yield from cls.error_yielder.yield_error( 42 | error_id='I003', 43 | line_number=token.start[0], 44 | column_offset=0, 45 | ) 46 | 47 | number_of_spaces = token.string.count(' ') 48 | spaces_are_not_in_four = number_of_spaces % 4 != 0 49 | if spaces_are_not_in_four: 50 | yield from cls.error_yielder.yield_error( 51 | error_id='I004', 52 | line_number=token.start[0], 53 | column_offset=0, 54 | ) 55 | 56 | @classmethod 57 | def check_indentations_gradually( 58 | cls, 59 | lines, 60 | tokens, 61 | ): 62 | previous_indenation_level = 0 63 | 64 | for line_number, line in enumerate(lines, 1): 65 | if line.strip() == '': 66 | continue 67 | 68 | current_indenation_level = cls.get_line_indentation_level( 69 | line=line, 70 | ) 71 | tokens_indented_ungradually = (current_indenation_level - previous_indenation_level) > 1 72 | if tokens_indented_ungradually: 73 | col_offset = len(line) - len(line.lstrip()) 74 | current_token = cls.get_token_by_position( 75 | lineno=line_number, 76 | col_offset=col_offset, 77 | tokens=tokens, 78 | ) 79 | current_token_is_string = current_token.type == tokenize.STRING 80 | 81 | if not current_token_is_string: 82 | yield from cls.error_yielder.yield_error( 83 | error_id='I006', 84 | line_number=line_number, 85 | column_offset=0, 86 | ) 87 | 88 | previous_indenation_level = current_indenation_level 89 | -------------------------------------------------------------------------------- /flake8_intsights/checker.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import astroid 3 | import astroid.builder 4 | 5 | from . import checkers 6 | 7 | 8 | class Checker: 9 | name = 'intsights' 10 | version = '0.1.0' 11 | 12 | astroid_builder = astroid.builder.AstroidBuilder() 13 | 14 | def __init__( 15 | self, 16 | tree, 17 | filename, 18 | lines, 19 | file_tokens, 20 | ): 21 | self.source_code = ''.join(lines) 22 | self.filename = filename 23 | self.lines = self.source_code.split('\n') 24 | self.tokens = file_tokens 25 | self.start_position_to_token = { 26 | token.start: { 27 | 'token': token, 28 | 'index': index, 29 | } 30 | for index, token in enumerate(self.tokens, 0) 31 | } 32 | 33 | self.ast_tree = tree 34 | for node in ast.walk(self.ast_tree): 35 | for child in ast.iter_child_nodes(node): 36 | child.parent = node 37 | 38 | self.astroid_tree = self.astroid_builder.string_build( 39 | data=self.source_code, 40 | ) 41 | self.all_astroid_nodes = [] 42 | for astroid_node in checkers._checker.BaseChecker.walk( 43 | node=self.astroid_tree, 44 | ): 45 | astroid_node_lineno = astroid_node.lineno if astroid_node.lineno is not None else 0 46 | astroid_node_col_offset = getattr(astroid_node, 'col_offset', 0) 47 | astroid_node_col_offset = astroid_node_col_offset if astroid_node_col_offset else 0 48 | astroid_node.position_in_tree = astroid_node_lineno * 10000 + astroid_node_col_offset 49 | 50 | node_inside_formatted_value = False 51 | current_parent = astroid_node.parent 52 | while current_parent: 53 | if isinstance(current_parent, astroid.FormattedValue): 54 | node_inside_formatted_value = True 55 | 56 | break 57 | 58 | current_parent = current_parent.parent 59 | 60 | if not node_inside_formatted_value: 61 | self.all_astroid_nodes.append(astroid_node) 62 | 63 | self.checkers = checkers.__checkers__ 64 | 65 | def run( 66 | self, 67 | ): 68 | for checker in self.checkers: 69 | yield from checker.check( 70 | filename=self.filename, 71 | lines=self.lines, 72 | tokens=self.tokens, 73 | start_position_to_token=self.start_position_to_token, 74 | ast_tree=self.ast_tree, 75 | astroid_tree=self.astroid_tree, 76 | all_astroid_nodes=self.all_astroid_nodes, 77 | ) 78 | 79 | @classmethod 80 | def parse_options( 81 | cls, 82 | options, 83 | ): 84 | if options.per_file_ignores: 85 | options.per_file_ignores += ' */__init__.py:F401' 86 | else: 87 | options.per_file_ignores = '*/__init__.py:F401' 88 | -------------------------------------------------------------------------------- /tests/test_string_interpolation.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | import tokenize 3 | import ast 4 | import io 5 | import unittest 6 | 7 | import flake8_intsights 8 | 9 | from . import texts 10 | 11 | 12 | class StringInterpolationTestCase( 13 | unittest.TestCase, 14 | ): 15 | def get_linting_errors( 16 | self, 17 | source_code, 18 | ): 19 | source_code = textwrap.dedent( 20 | text=source_code.lstrip(), 21 | ) 22 | file_tokens = list( 23 | tokenize.tokenize( 24 | readline=io.BytesIO( 25 | initial_bytes=source_code.encode('utf-8'), 26 | ).readline, 27 | ) 28 | ) 29 | tree = ast.parse(source_code) 30 | filename = 'test_string_interpolation' 31 | lines = io.StringIO(source_code).readlines() 32 | 33 | checker_obj = flake8_intsights.checker.Checker( 34 | tree=tree, 35 | filename=filename, 36 | lines=lines, 37 | file_tokens=file_tokens, 38 | ) 39 | checker_obj.checkers = [flake8_intsights.checkers.string_interpolation.Checker] 40 | linting_errors = list(checker_obj.run()) 41 | 42 | return linting_errors 43 | 44 | def test_case_1( 45 | self, 46 | ): 47 | self.assertEqual( 48 | first=self.get_linting_errors( 49 | source_code=texts.string_interpolation.string_interpolation_test_text_001, 50 | ), 51 | second=[ 52 | ( 53 | 2, 54 | 10, 55 | 'I098 use only fstring for string interpolation. format and % are forbidden', 56 | 'intsights_checker', 57 | ), 58 | ( 59 | 3, 60 | 10, 61 | 'I098 use only fstring for string interpolation. format and % are forbidden', 62 | 'intsights_checker', 63 | ), 64 | ( 65 | 4, 66 | 10, 67 | 'I098 use only fstring for string interpolation. format and % are forbidden', 68 | 'intsights_checker', 69 | ), 70 | ( 71 | 9, 72 | 11, 73 | 'I098 use only fstring for string interpolation. format and % are forbidden', 74 | 'intsights_checker', 75 | ), 76 | ( 77 | 14, 78 | 19, 79 | 'I098 use only fstring for string interpolation. format and % are forbidden', 80 | 'intsights_checker', 81 | ), 82 | ( 83 | 15, 84 | 22, 85 | 'I098 use only fstring for string interpolation. format and % are forbidden', 86 | 'intsights_checker', 87 | ), 88 | ], 89 | ) 90 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/naming.py: -------------------------------------------------------------------------------- 1 | import astroid 2 | 3 | from . import _checker 4 | 5 | 6 | class Checker( 7 | _checker.BaseChecker, 8 | ): 9 | @classmethod 10 | def check( 11 | cls, 12 | filename, 13 | lines, 14 | tokens, 15 | start_position_to_token, 16 | ast_tree, 17 | astroid_tree, 18 | all_astroid_nodes, 19 | ): 20 | for node in all_astroid_nodes: 21 | if isinstance(node, astroid.FunctionDef): 22 | yield from cls.check_functions( 23 | node=node, 24 | ) 25 | elif isinstance(node, astroid.ClassDef): 26 | yield from cls.check_classes( 27 | node=node, 28 | ) 29 | 30 | @classmethod 31 | def check_functions( 32 | cls, 33 | node, 34 | ): 35 | for arg in node.args.args: 36 | if not arg.name.islower(): 37 | yield from cls.error_yielder.yield_error( 38 | error_id='I097', 39 | line_number=arg.fromlineno, 40 | column_offset=arg.col_offset, 41 | ) 42 | 43 | if isinstance(node.parent, astroid.ClassDef): 44 | function_is_classmethod = False 45 | function_is_staticmethod = False 46 | 47 | if node.name == '__new__': 48 | function_is_classmethod = True 49 | 50 | for decorator_name in node.decoratornames(): 51 | if decorator_name == 'builtins.classmethod': 52 | function_is_classmethod = True 53 | 54 | break 55 | elif decorator_name == 'builtins.staticmethod': 56 | function_is_staticmethod = True 57 | 58 | break 59 | 60 | if function_is_staticmethod: 61 | return 62 | 63 | if not node.args.args: 64 | yield from cls.error_yielder.yield_error( 65 | error_id='I094', 66 | line_number=node.fromlineno, 67 | column_offset=node.col_offset, 68 | ) 69 | 70 | return 71 | 72 | first_arg = node.args.args[0] 73 | if function_is_classmethod: 74 | if first_arg.name != 'cls': 75 | yield from cls.error_yielder.yield_error( 76 | error_id='I094', 77 | line_number=first_arg.fromlineno, 78 | column_offset=first_arg.col_offset, 79 | ) 80 | else: 81 | if first_arg.name != 'self': 82 | yield from cls.error_yielder.yield_error( 83 | error_id='I094', 84 | line_number=first_arg.fromlineno, 85 | column_offset=first_arg.col_offset, 86 | ) 87 | 88 | @classmethod 89 | def check_classes( 90 | cls, 91 | node, 92 | ): 93 | if not node.name[0].isupper() or '_' in node.name: 94 | yield from cls.error_yielder.yield_error( 95 | error_id='I095', 96 | line_number=node.lineno, 97 | column_offset=node.col_offset, 98 | ) 99 | 100 | if 'unittest.TestCase' in node.basenames: 101 | if node.name == 'TestCase' or not node.name.endswith('TestCase'): 102 | yield from cls.error_yielder.yield_error( 103 | error_id='I096', 104 | line_number=node.lineno, 105 | column_offset=node.col_offset, 106 | ) 107 | -------------------------------------------------------------------------------- /tests/test_debugging.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | import tokenize 3 | import ast 4 | import io 5 | import unittest 6 | 7 | import flake8_intsights 8 | 9 | from . import texts 10 | 11 | 12 | class DebuggingTestCase( 13 | unittest.TestCase, 14 | ): 15 | def get_linting_errors( 16 | self, 17 | source_code, 18 | ): 19 | source_code = textwrap.dedent( 20 | text=source_code.lstrip(), 21 | ) 22 | file_tokens = list( 23 | tokenize.tokenize( 24 | readline=io.BytesIO( 25 | initial_bytes=source_code.encode('utf-8'), 26 | ).readline, 27 | ) 28 | ) 29 | tree = ast.parse(source_code) 30 | filename = 'test_debugging' 31 | lines = io.StringIO(source_code).readlines() 32 | 33 | checker_obj = flake8_intsights.checker.Checker( 34 | tree=tree, 35 | filename=filename, 36 | lines=lines, 37 | file_tokens=file_tokens, 38 | ) 39 | checker_obj.checkers = [flake8_intsights.checkers.debugging.Checker] 40 | linting_errors = list(checker_obj.run()) 41 | 42 | return linting_errors 43 | 44 | def test_case_1( 45 | self, 46 | ): 47 | self.assertEqual( 48 | first=self.get_linting_errors( 49 | source_code=texts.debugging.debugging_test_text_001, 50 | ), 51 | second=[], 52 | ) 53 | 54 | def test_case_2( 55 | self, 56 | ): 57 | self.assertEqual( 58 | first=self.get_linting_errors( 59 | source_code=texts.debugging.debugging_test_text_002, 60 | ), 61 | second=[ 62 | ( 63 | 1, 64 | 0, 65 | 'I092 never leave a debug code', 66 | 'intsights_checker', 67 | ), 68 | ], 69 | ) 70 | 71 | def test_case_3( 72 | self, 73 | ): 74 | self.assertEqual( 75 | first=self.get_linting_errors( 76 | source_code=texts.debugging.debugging_test_text_003, 77 | ), 78 | second=[ 79 | ( 80 | 1, 81 | 0, 82 | 'I092 never leave a debug code', 83 | 'intsights_checker', 84 | ), 85 | ], 86 | ) 87 | 88 | def test_case_4( 89 | self, 90 | ): 91 | self.assertEqual( 92 | first=self.get_linting_errors( 93 | source_code=texts.debugging.debugging_test_text_004, 94 | ), 95 | second=[ 96 | ( 97 | 1, 98 | 0, 99 | 'I092 never leave a debug code', 100 | 'intsights_checker', 101 | ), 102 | ], 103 | ) 104 | 105 | def test_case_5( 106 | self, 107 | ): 108 | self.assertEqual( 109 | first=self.get_linting_errors( 110 | source_code=texts.debugging.debugging_test_text_005, 111 | ), 112 | second=[], 113 | ) 114 | 115 | def test_case_6( 116 | self, 117 | ): 118 | self.assertEqual( 119 | first=self.get_linting_errors( 120 | source_code=texts.debugging.debugging_test_text_006, 121 | ), 122 | second=[ 123 | ( 124 | 1, 125 | 0, 126 | 'I092 never leave a debug code', 127 | 'intsights_checker', 128 | ), 129 | ], 130 | ) 131 | 132 | def test_case_7( 133 | self, 134 | ): 135 | self.assertEqual( 136 | first=self.get_linting_errors( 137 | source_code=texts.debugging.debugging_test_text_007, 138 | ), 139 | second=[], 140 | ) 141 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/_errors.py: -------------------------------------------------------------------------------- 1 | class ErrorYielder: 2 | checker_name = 'intsights_checker' 3 | errors = { 4 | 'I001': r'I001 at each python module, there should be a blank line at the end of the file', 5 | 'I002': r'I002 there should not be any blank lines at the top of a python module', 6 | 'I003': r'I003 no tabs indentation, only spaces', 7 | 'I004': r'I004 only 4 spaces indentation', 8 | 'I005': r'I005 inline comments are not allowed', 9 | 'I006': r'I006 indentation should be gradually added', 10 | 'I011': r'I011 only single quote is allowed in strings', 11 | 'I012': r'I012 duplicate definition', 12 | 'I021': r'I021 only one import module per line', 13 | 'I022': r'I022 never import * from module', 14 | 'I023': r'I023 never from import a module name, just relative from imports', 15 | 'I024': r'I024 never relative import from a module, import the whole module instead', 16 | 'I025': r'I025 never provide a module alias', 17 | 'I026': r'I026 relative imports must be decleared after non-relative ones', 18 | 'I027': r'I027 standard imports and relative imports should be seperated by one newline', 19 | 'I028': r'I027 last import must be spaced with two lines', 20 | 'I031': r'I031 control keywords right after special control definitions must not be spaced', 21 | 'I032': r'I032 control keywords must be divided by space all the time', 22 | 'I033': r'I033 control keywords right after special neighbour control definitions must not be spaced', 23 | 'I034': r'I034 except/finally/else, must not be spaced', 24 | 'I041': r'I041 an empty list/dict must be declared with []/()/{}', 25 | 'I042': r'I042 each list entry must be in its own line', 26 | 'I043': r'I043 each list entry must end with comma', 27 | 'I044': r'I044 a tuple must be enclosed within () and be spaced with 1 new line', 28 | 'I045': r'I045 each subentry opener must be in its own line', 29 | 'I046': r'I046 dict key and value must be on the same line', 30 | 'I047': r'I047 dict keys must not be on the same line', 31 | 'I048': r'I048 dict keys must be identically indented', 32 | 'I049': r'I049 dict key type must be on of the following: a const, a name, of a dict unpack', 33 | 'I051': r'I051 class definition without inheritance should be declared only by "class Name:"', 34 | 'I052': r'I052 main class definition should start with a "class Name("', 35 | 'I053': r'I053 each base class should be seperated by a new-line', 36 | 'I054': r'I054 each base class should be one indentation level above the main class', 37 | 'I055': r'I055 each base class definition should be ended with a comma', 38 | 'I056': r'I056 classes should never inherit from object', 39 | 'I061': r'I061 function definition with no arguments should be defined as: "def function_name():"', 40 | 'I062': r'I062 function definition with arguments should define the arguments starting by the next line"', 41 | 'I063': r'I063 each function argument should be in a new line', 42 | 'I064': r'I064 each function argument should be in the same indentation level, 1 level above the function definition line', 43 | 'I065': r'I065 each function argument should be ended with a comma', 44 | 'I066': r'I066 there must not be any empty lines in the function definition', 45 | 'I071': r'I071 exception handler parameter name should be "exception"', 46 | 'I081': r'I081 function default parameter must not be mutable', 47 | 'I091': r'I091 never use a builtin name', 48 | 'I092': r'I092 never leave a debug code', 49 | 'I093': r'I093 do not break a line with backslashes', 50 | 'I094': r'I094 functions first arguments should be either self/cls whether it is a standard method or a classmethod correspondingly', 51 | 'I095': r'I095 class name should start with upper case char and must not include underscore', 52 | 'I096': r'I096 unittest TestCase class name should start with the tested module name and end with TestCase', 53 | 'I097': r'I097 function arguments should be lower case', 54 | 'I098': r'I098 use only fstring for string interpolation. format and % are forbidden', 55 | 'I099': r'I099 raising an exception should raise an object not a type', 56 | } 57 | 58 | @classmethod 59 | def yield_error( 60 | cls, 61 | error_id, 62 | line_number, 63 | column_offset, 64 | ): 65 | yield ( 66 | line_number, 67 | column_offset, 68 | cls.errors[error_id], 69 | cls.checker_name, 70 | ) 71 | -------------------------------------------------------------------------------- /tests/test_naming.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | import tokenize 3 | import ast 4 | import io 5 | import unittest 6 | 7 | import flake8_intsights 8 | 9 | from . import texts 10 | 11 | 12 | class NamingTestCase( 13 | unittest.TestCase, 14 | ): 15 | def get_linting_errors( 16 | self, 17 | source_code, 18 | ): 19 | source_code = textwrap.dedent( 20 | text=source_code.lstrip(), 21 | ) 22 | file_tokens = list( 23 | tokenize.tokenize( 24 | readline=io.BytesIO( 25 | initial_bytes=source_code.encode('utf-8'), 26 | ).readline, 27 | ) 28 | ) 29 | tree = ast.parse(source_code) 30 | filename = 'test_naming' 31 | lines = io.StringIO(source_code).readlines() 32 | 33 | checker_obj = flake8_intsights.checker.Checker( 34 | tree=tree, 35 | filename=filename, 36 | lines=lines, 37 | file_tokens=file_tokens, 38 | ) 39 | checker_obj.checkers = [flake8_intsights.checkers.naming.Checker] 40 | linting_errors = list(checker_obj.run()) 41 | 42 | return linting_errors 43 | 44 | def test_case_1( 45 | self, 46 | ): 47 | self.assertCountEqual( 48 | first=self.get_linting_errors( 49 | source_code=texts.naming.naming_test_text_001, 50 | ), 51 | second=[ 52 | ( 53 | 65, 54 | 18, 55 | 'I097 function arguments should be lower case', 56 | 'intsights_checker', 57 | ), 58 | ( 59 | 14, 60 | 4, 61 | 'I094 functions first arguments should be either self/cls whether it is a standard method or a classmethod correspondingly', 62 | 'intsights_checker', 63 | ), 64 | ( 65 | 17, 66 | 17, 67 | 'I094 functions first arguments should be either self/cls whether it is a standard method or a classmethod correspondingly', 68 | 'intsights_checker', 69 | ), 70 | ( 71 | 23, 72 | 17, 73 | 'I094 functions first arguments should be either self/cls whether it is a standard method or a classmethod correspondingly', 74 | 'intsights_checker', 75 | ), 76 | ( 77 | 27, 78 | 4, 79 | 'I094 functions first arguments should be either self/cls whether it is a standard method or a classmethod correspondingly', 80 | 'intsights_checker', 81 | ), 82 | ( 83 | 31, 84 | 17, 85 | 'I094 functions first arguments should be either self/cls whether it is a standard method or a classmethod correspondingly', 86 | 'intsights_checker', 87 | ), 88 | ( 89 | 39, 90 | 17, 91 | 'I094 functions first arguments should be either self/cls whether it is a standard method or a classmethod correspondingly', 92 | 'intsights_checker', 93 | ), 94 | ], 95 | ) 96 | 97 | def test_case_2( 98 | self, 99 | ): 100 | self.assertEqual( 101 | first=self.get_linting_errors( 102 | source_code=texts.naming.naming_test_text_002, 103 | ), 104 | second=[ 105 | ( 106 | 4, 107 | 0, 108 | 'I095 class name should start with upper case char and must not include underscore', 109 | 'intsights_checker', 110 | ), 111 | ( 112 | 7, 113 | 0, 114 | 'I095 class name should start with upper case char and must not include underscore', 115 | 'intsights_checker', 116 | ), 117 | ( 118 | 15, 119 | 0, 120 | 'I096 unittest TestCase class name should start with the tested module name and end with TestCase', 121 | 'intsights_checker', 122 | ), 123 | ( 124 | 20, 125 | 0, 126 | 'I096 unittest TestCase class name should start with the tested module name and end with TestCase', 127 | 'intsights_checker', 128 | ), 129 | ], 130 | ) 131 | -------------------------------------------------------------------------------- /tests/test_blank_lines.py: -------------------------------------------------------------------------------- 1 | import tokenize 2 | import ast 3 | import io 4 | import unittest 5 | 6 | import flake8_intsights 7 | 8 | from . import texts 9 | 10 | 11 | class BlankLinesTestCase( 12 | unittest.TestCase, 13 | ): 14 | def get_linting_errors( 15 | self, 16 | source_code, 17 | ): 18 | file_tokens = list( 19 | tokenize.tokenize( 20 | readline=io.BytesIO( 21 | initial_bytes=source_code.encode('utf-8'), 22 | ).readline, 23 | ) 24 | ) 25 | tree = ast.parse(source_code) 26 | filename = 'test_blank_lines' 27 | lines = io.StringIO(source_code).readlines() 28 | 29 | checker_obj = flake8_intsights.checker.Checker( 30 | tree=tree, 31 | filename=filename, 32 | lines=lines, 33 | file_tokens=file_tokens, 34 | ) 35 | checker_obj.checkers = [flake8_intsights.checkers.blank_lines.Checker] 36 | linting_errors = list(checker_obj.run()) 37 | 38 | return linting_errors 39 | 40 | def test_case_1( 41 | self, 42 | ): 43 | self.assertEqual( 44 | first=self.get_linting_errors( 45 | source_code=texts.blank_lines.blank_lines_test_text_001, 46 | ), 47 | second=[], 48 | ) 49 | 50 | def test_case_2( 51 | self, 52 | ): 53 | self.assertEqual( 54 | first=self.get_linting_errors( 55 | source_code=texts.blank_lines.blank_lines_test_text_002, 56 | ), 57 | second=[ 58 | ( 59 | 2, 60 | 0, 61 | 'I001 at each python module, there should be a blank line at the end of the file', 62 | 'intsights_checker', 63 | ), 64 | ( 65 | 1, 66 | 0, 67 | 'I002 there should not be any blank lines at the top of a python module', 68 | 'intsights_checker', 69 | ), 70 | ], 71 | ) 72 | 73 | def test_case_3( 74 | self, 75 | ): 76 | self.assertEqual( 77 | first=self.get_linting_errors( 78 | source_code=texts.blank_lines.blank_lines_test_text_003, 79 | ), 80 | second=[ 81 | ( 82 | 3, 83 | 0, 84 | 'I001 at each python module, there should be a blank line at the end of the file', 85 | 'intsights_checker', 86 | ), 87 | ( 88 | 1, 89 | 0, 90 | 'I002 there should not be any blank lines at the top of a python module', 91 | 'intsights_checker', 92 | ), 93 | ], 94 | ) 95 | 96 | def test_case_4( 97 | self, 98 | ): 99 | self.assertEqual( 100 | first=self.get_linting_errors( 101 | source_code=texts.blank_lines.blank_lines_test_text_004, 102 | ), 103 | second=[ 104 | ( 105 | 1, 106 | 0, 107 | 'I001 at each python module, there should be a blank line at the end of the file', 108 | 'intsights_checker', 109 | ), 110 | ], 111 | ) 112 | 113 | def test_case_5( 114 | self, 115 | ): 116 | self.assertEqual( 117 | first=self.get_linting_errors( 118 | source_code=texts.blank_lines.blank_lines_test_text_005, 119 | ), 120 | second=[], 121 | ) 122 | 123 | def test_case_6( 124 | self, 125 | ): 126 | self.assertEqual( 127 | first=self.get_linting_errors( 128 | source_code=texts.blank_lines.blank_lines_test_text_006, 129 | ), 130 | second=[ 131 | ( 132 | 3, 133 | 0, 134 | 'I001 at each python module, there should be a blank line at the end of the file', 135 | 'intsights_checker', 136 | ), 137 | ], 138 | ) 139 | 140 | def test_case_7( 141 | self, 142 | ): 143 | self.assertEqual( 144 | first=self.get_linting_errors( 145 | source_code=texts.blank_lines.blank_lines_test_text_007, 146 | ), 147 | second=[ 148 | ( 149 | 1, 150 | 0, 151 | 'I002 there should not be any blank lines at the top of a python module', 152 | 'intsights_checker', 153 | ), 154 | ], 155 | ) 156 | 157 | def test_case_8( 158 | self, 159 | ): 160 | self.assertEqual( 161 | first=self.get_linting_errors( 162 | source_code=texts.blank_lines.blank_lines_test_text_008, 163 | ), 164 | second=[ 165 | ( 166 | 1, 167 | 0, 168 | 'I002 there should not be any blank lines at the top of a python module', 169 | 'intsights_checker', 170 | ), 171 | ], 172 | ) 173 | -------------------------------------------------------------------------------- /tests/test_builtins.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | import tokenize 3 | import ast 4 | import io 5 | import unittest 6 | 7 | import flake8_intsights 8 | 9 | from . import texts 10 | 11 | 12 | class BuiltinsTestCase( 13 | unittest.TestCase, 14 | ): 15 | def get_linting_errors( 16 | self, 17 | source_code, 18 | ): 19 | source_code = textwrap.dedent( 20 | text=source_code.lstrip(), 21 | ) 22 | file_tokens = list( 23 | tokenize.tokenize( 24 | readline=io.BytesIO( 25 | initial_bytes=source_code.encode('utf-8'), 26 | ).readline, 27 | ) 28 | ) 29 | tree = ast.parse(source_code) 30 | filename = 'test_builtins' 31 | lines = io.StringIO(source_code).readlines() 32 | 33 | checker_obj = flake8_intsights.checker.Checker( 34 | tree=tree, 35 | filename=filename, 36 | lines=lines, 37 | file_tokens=file_tokens, 38 | ) 39 | checker_obj.checkers = [flake8_intsights.checkers.builtins.Checker] 40 | linting_errors = list(checker_obj.run()) 41 | 42 | return linting_errors 43 | 44 | def test_case_1( 45 | self, 46 | ): 47 | self.assertEqual( 48 | first=self.get_linting_errors( 49 | source_code=texts.builtins.builtins_test_text_001, 50 | ), 51 | second=[ 52 | ( 53 | 2, 54 | 0, 55 | 'I091 never use a builtin name', 56 | 'intsights_checker', 57 | ), 58 | ], 59 | ) 60 | 61 | def test_case_2( 62 | self, 63 | ): 64 | self.assertEqual( 65 | first=self.get_linting_errors( 66 | source_code=texts.builtins.builtins_test_text_002, 67 | ), 68 | second=[ 69 | ( 70 | 10, 71 | 4, 72 | 'I091 never use a builtin name', 73 | 'intsights_checker', 74 | ), 75 | ( 76 | 23, 77 | 4, 78 | 'I091 never use a builtin name', 79 | 'intsights_checker', 80 | ), 81 | ( 82 | 27, 83 | 0, 84 | 'I091 never use a builtin name', 85 | 'intsights_checker', 86 | ), 87 | ], 88 | ) 89 | 90 | def test_case_3( 91 | self, 92 | ): 93 | self.assertEqual( 94 | first=self.get_linting_errors( 95 | source_code=texts.builtins.builtins_test_text_003, 96 | ), 97 | second=[ 98 | ( 99 | 4, 100 | 4, 101 | 'I091 never use a builtin name', 102 | 'intsights_checker', 103 | ), 104 | ( 105 | 10, 106 | 7, 107 | 'I091 never use a builtin name', 108 | 'intsights_checker', 109 | ), 110 | ( 111 | 16, 112 | 4, 113 | 'I091 never use a builtin name', 114 | 'intsights_checker', 115 | ), 116 | ], 117 | ) 118 | 119 | def test_case_4( 120 | self, 121 | ): 122 | self.assertEqual( 123 | first=self.get_linting_errors( 124 | source_code=texts.builtins.builtins_test_text_004, 125 | ), 126 | second=[ 127 | ( 128 | 7, 129 | 15, 130 | 'I091 never use a builtin name', 131 | 'intsights_checker', 132 | ), 133 | ( 134 | 13, 135 | 19, 136 | 'I091 never use a builtin name', 137 | 'intsights_checker', 138 | ), 139 | ( 140 | 16, 141 | 42, 142 | 'I091 never use a builtin name', 143 | 'intsights_checker', 144 | ), 145 | ( 146 | 22, 147 | 25, 148 | 'I091 never use a builtin name', 149 | 'intsights_checker', 150 | ), 151 | ], 152 | ) 153 | 154 | def test_case_5( 155 | self, 156 | ): 157 | self.assertEqual( 158 | first=self.get_linting_errors( 159 | source_code=texts.builtins.builtins_test_text_005, 160 | ), 161 | second=[ 162 | ( 163 | 7, 164 | 8, 165 | 'I091 never use a builtin name', 166 | 'intsights_checker', 167 | ), 168 | ( 169 | 15, 170 | 18, 171 | 'I091 never use a builtin name', 172 | 'intsights_checker', 173 | ), 174 | ], 175 | ) 176 | 177 | def test_case_6( 178 | self, 179 | ): 180 | self.assertEqual( 181 | first=self.get_linting_errors( 182 | source_code=texts.builtins.builtins_test_text_006, 183 | ), 184 | second=[ 185 | ( 186 | 4, 187 | 0, 188 | 'I091 never use a builtin name', 189 | 'intsights_checker', 190 | ), 191 | ], 192 | ) 193 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/duplications.py: -------------------------------------------------------------------------------- 1 | import astroid 2 | 3 | from . import _checker 4 | 5 | 6 | class Checker( 7 | _checker.BaseChecker, 8 | ): 9 | @classmethod 10 | def check( 11 | cls, 12 | filename, 13 | lines, 14 | tokens, 15 | start_position_to_token, 16 | ast_tree, 17 | astroid_tree, 18 | all_astroid_nodes, 19 | ): 20 | yield from cls.check_no_duplicate_classes( 21 | all_astroid_nodes=all_astroid_nodes, 22 | ) 23 | 24 | yield from cls.check_no_duplicate_functions( 25 | all_astroid_nodes=all_astroid_nodes, 26 | ) 27 | 28 | for node in all_astroid_nodes: 29 | if isinstance(node, astroid.Dict): 30 | yield from cls.check_no_duplicate_dict_keys( 31 | node=node, 32 | ) 33 | elif isinstance(node, astroid.ClassDef): 34 | yield from cls.check_no_duplicate_methods( 35 | node=node, 36 | ) 37 | 38 | @classmethod 39 | def check_no_duplicate_dict_keys( 40 | cls, 41 | node, 42 | ): 43 | dict_keys = set() 44 | 45 | for dict_item in node.items: 46 | item_key, item_value = dict_item 47 | 48 | if isinstance(item_key, astroid.DictUnpack): 49 | item_repr = item_value 50 | else: 51 | item_repr = getattr(item_key, 'value', item_key.as_string()) 52 | 53 | if item_repr not in dict_keys: 54 | dict_keys.add(item_repr) 55 | else: 56 | yield from cls.error_yielder.yield_error( 57 | error_id='I012', 58 | line_number=item_key.lineno, 59 | column_offset=item_key.col_offset, 60 | ) 61 | 62 | @classmethod 63 | def check_no_duplicate_classes( 64 | cls, 65 | all_astroid_nodes, 66 | ): 67 | class_tuples = set() 68 | 69 | for node in all_astroid_nodes: 70 | if not isinstance(node, astroid.ClassDef): 71 | continue 72 | else: 73 | class_node = node 74 | 75 | class_tuple = ( 76 | class_node.parent, 77 | class_node.name, 78 | ) 79 | if class_tuple not in class_tuples: 80 | class_tuples.add(class_tuple) 81 | else: 82 | yield from cls.error_yielder.yield_error( 83 | error_id='I012', 84 | line_number=class_node.lineno, 85 | column_offset=class_node.col_offset, 86 | ) 87 | 88 | @classmethod 89 | def check_no_duplicate_functions( 90 | cls, 91 | all_astroid_nodes, 92 | ): 93 | function_tuples = set() 94 | 95 | for node in all_astroid_nodes: 96 | if not isinstance(node, astroid.FunctionDef): 97 | continue 98 | elif isinstance(node.parent, astroid.ClassDef): 99 | continue 100 | else: 101 | function_node = node 102 | 103 | function_tuple = ( 104 | function_node.parent, 105 | function_node.name, 106 | ) 107 | if function_tuple not in function_tuples: 108 | function_tuples.add(function_tuple) 109 | else: 110 | yield from cls.error_yielder.yield_error( 111 | error_id='I012', 112 | line_number=function_node.lineno, 113 | column_offset=function_node.col_offset, 114 | ) 115 | 116 | @classmethod 117 | def check_no_duplicate_methods( 118 | cls, 119 | node, 120 | ): 121 | method_names = set() 122 | for node in node.body: 123 | if not isinstance(node, astroid.FunctionDef): 124 | continue 125 | else: 126 | method_node = node 127 | 128 | if method_node.decorators is not None: 129 | for decorator in method_node.decorators.nodes: 130 | is_setter_attribute = isinstance(decorator, astroid.Attribute) and decorator.attrname == 'setter' 131 | is_getter_property = isinstance(decorator, astroid.Name) and decorator.name == 'property' 132 | 133 | if is_setter_attribute: 134 | current_method_names = [ 135 | f'{method_node.name}.setter', 136 | ] 137 | elif is_getter_property: 138 | current_method_names = [ 139 | f'{method_node.name}.getter', 140 | ] 141 | else: 142 | current_method_names = [ 143 | f'{method_node.name}.setter', 144 | f'{method_node.name}.getter', 145 | method_node.name, 146 | ] 147 | else: 148 | current_method_names = [ 149 | f'{method_node.name}.setter', 150 | f'{method_node.name}.getter', 151 | method_node.name, 152 | ] 153 | 154 | for method_name in current_method_names: 155 | if method_name not in method_names: 156 | method_names.add(method_name) 157 | else: 158 | yield from cls.error_yielder.yield_error( 159 | error_id='I012', 160 | line_number=method_node.lineno, 161 | column_offset=method_node.col_offset, 162 | ) 163 | 164 | break 165 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/imports.py: -------------------------------------------------------------------------------- 1 | import astroid 2 | 3 | from . import _checker 4 | 5 | 6 | class Checker( 7 | _checker.BaseChecker, 8 | ): 9 | @classmethod 10 | def check( 11 | cls, 12 | filename, 13 | lines, 14 | tokens, 15 | start_position_to_token, 16 | ast_tree, 17 | astroid_tree, 18 | all_astroid_nodes, 19 | ): 20 | import_nodes = [ 21 | node 22 | for node in astroid_tree.body 23 | if isinstance(node, astroid.Import) 24 | ] 25 | from_import_nodes = [ 26 | node 27 | for node in astroid_tree.body 28 | if isinstance(node, astroid.ImportFrom) 29 | ] 30 | 31 | yield from cls.ensure_imports( 32 | imports=import_nodes, 33 | ) 34 | 35 | yield from cls.ensure_from_imports( 36 | from_imports=from_import_nodes, 37 | ) 38 | 39 | yield from cls.ensure_imports_order( 40 | imports=import_nodes, 41 | from_imports=from_import_nodes, 42 | ) 43 | 44 | yield from cls.ensure_last_import_spaced( 45 | lines=lines, 46 | imports=import_nodes, 47 | from_imports=from_import_nodes, 48 | ) 49 | 50 | @classmethod 51 | def ensure_imports_order( 52 | cls, 53 | imports, 54 | from_imports, 55 | ): 56 | imports_line_numbers = [ 57 | import_node.lineno 58 | for import_node in imports 59 | ] 60 | from_imports_line_numbers = [ 61 | from_import_node.lineno 62 | for from_import_node in from_imports 63 | ] 64 | 65 | if imports_line_numbers and from_imports_line_numbers: 66 | max_import_line_number = max(imports_line_numbers) 67 | min_from_import_line_number = min(from_imports_line_numbers) 68 | 69 | imports_correct_order = max_import_line_number < min_from_import_line_number 70 | imports_correct_separation = max_import_line_number + 2 == min_from_import_line_number 71 | 72 | if not imports_correct_order: 73 | yield from cls.error_yielder.yield_error( 74 | error_id='I026', 75 | line_number=min_from_import_line_number, 76 | column_offset=0, 77 | ) 78 | 79 | if imports_correct_order and not imports_correct_separation: 80 | yield from cls.error_yielder.yield_error( 81 | error_id='I027', 82 | line_number=min_from_import_line_number, 83 | column_offset=0, 84 | ) 85 | 86 | @classmethod 87 | def ensure_last_import_spaced( 88 | cls, 89 | lines, 90 | imports, 91 | from_imports, 92 | ): 93 | imports_line_numbers = [ 94 | import_node.lineno 95 | for import_node in imports 96 | ] 97 | from_imports_line_numbers = [ 98 | from_import_node.lineno 99 | for from_import_node in from_imports 100 | ] 101 | 102 | if imports_line_numbers or from_imports_line_numbers: 103 | max_import_line_number = max(imports_line_numbers + from_imports_line_numbers) 104 | next_two_lines = lines[max_import_line_number:max_import_line_number + 2] 105 | 106 | for line in next_two_lines: 107 | if line.strip(): 108 | yield from cls.error_yielder.yield_error( 109 | error_id='I028', 110 | line_number=max_import_line_number, 111 | column_offset=0, 112 | ) 113 | 114 | @classmethod 115 | def ensure_imports( 116 | cls, 117 | imports, 118 | ): 119 | for import_node in imports: 120 | if len(import_node.names) > 1: 121 | yield from cls.error_yielder.yield_error( 122 | error_id='I021', 123 | line_number=import_node.lineno, 124 | column_offset=import_node.col_offset, 125 | ) 126 | 127 | if import_node.names[0][1] is not None: 128 | yield from cls.error_yielder.yield_error( 129 | error_id='I025', 130 | line_number=import_node.lineno, 131 | column_offset=import_node.col_offset, 132 | ) 133 | 134 | @classmethod 135 | def ensure_from_imports( 136 | cls, 137 | from_imports, 138 | ): 139 | for from_import_node in from_imports: 140 | if len(from_import_node.names) > 1: 141 | yield from cls.error_yielder.yield_error( 142 | error_id='I021', 143 | line_number=from_import_node.lineno, 144 | column_offset=from_import_node.col_offset, 145 | ) 146 | 147 | if len(from_import_node.names) == 1 and from_import_node.names[0][0] == '*': 148 | yield from cls.error_yielder.yield_error( 149 | error_id='I022', 150 | line_number=from_import_node.lineno, 151 | column_offset=from_import_node.col_offset, 152 | ) 153 | 154 | if from_import_node.modname != '': 155 | yield from cls.error_yielder.yield_error( 156 | error_id='I023', 157 | line_number=from_import_node.lineno, 158 | column_offset=from_import_node.col_offset, 159 | ) 160 | 161 | if from_import_node.level and from_import_node.modname != '': 162 | yield from cls.error_yielder.yield_error( 163 | error_id='I024', 164 | line_number=from_import_node.lineno, 165 | column_offset=from_import_node.col_offset, 166 | ) 167 | 168 | if from_import_node.names[0][1] is not None: 169 | yield from cls.error_yielder.yield_error( 170 | error_id='I025', 171 | line_number=from_import_node.lineno, 172 | column_offset=from_import_node.col_offset, 173 | ) 174 | -------------------------------------------------------------------------------- /tests/test_indents.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | import tokenize 3 | import ast 4 | import io 5 | import unittest 6 | 7 | import flake8_intsights 8 | 9 | from . import texts 10 | 11 | 12 | class IndentsTestCase( 13 | unittest.TestCase, 14 | ): 15 | def get_linting_errors( 16 | self, 17 | source_code, 18 | ): 19 | source_code = textwrap.dedent( 20 | text=source_code.lstrip(), 21 | ) 22 | file_tokens = list( 23 | tokenize.tokenize( 24 | readline=io.BytesIO( 25 | initial_bytes=source_code.encode('utf-8'), 26 | ).readline, 27 | ) 28 | ) 29 | tree = ast.parse(source_code) 30 | filename = 'test_indents' 31 | lines = io.StringIO(source_code).readlines() 32 | 33 | checker_obj = flake8_intsights.checker.Checker( 34 | tree=tree, 35 | filename=filename, 36 | lines=lines, 37 | file_tokens=file_tokens, 38 | ) 39 | checker_obj.checkers = [flake8_intsights.checkers.indents.Checker] 40 | linting_errors = list(checker_obj.run()) 41 | 42 | return linting_errors 43 | 44 | def test_case_1( 45 | self, 46 | ): 47 | self.assertEqual( 48 | first=self.get_linting_errors( 49 | source_code=texts.indents.indents_test_text_001, 50 | ), 51 | second=[], 52 | ) 53 | 54 | def test_case_2( 55 | self, 56 | ): 57 | self.assertEqual( 58 | first=self.get_linting_errors( 59 | source_code=texts.indents.indents_test_text_002, 60 | ), 61 | second=[ 62 | ( 63 | 2, 64 | 0, 65 | 'I004 only 4 spaces indentation', 66 | 'intsights_checker', 67 | ), 68 | ], 69 | ) 70 | 71 | def test_case_3( 72 | self, 73 | ): 74 | self.assertEqual( 75 | first=self.get_linting_errors( 76 | source_code=texts.indents.indents_test_text_003, 77 | ), 78 | second=[ 79 | ( 80 | 2, 81 | 0, 82 | 'I003 no tabs indentation, only spaces', 83 | 'intsights_checker', 84 | ), 85 | ], 86 | ) 87 | 88 | def test_case_4( 89 | self, 90 | ): 91 | self.assertEqual( 92 | first=self.get_linting_errors( 93 | source_code=texts.indents.indents_test_text_004, 94 | ), 95 | second=[], 96 | ) 97 | 98 | def test_case_5( 99 | self, 100 | ): 101 | self.assertEqual( 102 | first=self.get_linting_errors( 103 | source_code=texts.indents.indents_test_text_005, 104 | ), 105 | second=[ 106 | ( 107 | 2, 108 | 0, 109 | 'I006 indentation should be gradually added', 110 | 'intsights_checker', 111 | ), 112 | ], 113 | ) 114 | 115 | def test_case_6( 116 | self, 117 | ): 118 | self.assertEqual( 119 | first=self.get_linting_errors( 120 | source_code=texts.indents.indents_test_text_006, 121 | ), 122 | second=[ 123 | ( 124 | 2, 125 | 0, 126 | 'I006 indentation should be gradually added', 127 | 'intsights_checker', 128 | ), 129 | ], 130 | ) 131 | 132 | def test_case_7( 133 | self, 134 | ): 135 | self.assertEqual( 136 | first=self.get_linting_errors( 137 | source_code=texts.indents.indents_test_text_007, 138 | ), 139 | second=[ 140 | ( 141 | 4, 142 | 0, 143 | 'I006 indentation should be gradually added', 144 | 'intsights_checker', 145 | ), 146 | ], 147 | ) 148 | 149 | def test_case_8( 150 | self, 151 | ): 152 | self.assertEqual( 153 | first=self.get_linting_errors( 154 | source_code=texts.indents.indents_test_text_008, 155 | ), 156 | second=[ 157 | ( 158 | 3, 159 | 0, 160 | 'I006 indentation should be gradually added', 161 | 'intsights_checker', 162 | ), 163 | ], 164 | ) 165 | 166 | def test_case_9( 167 | self, 168 | ): 169 | self.assertEqual( 170 | first=self.get_linting_errors( 171 | source_code=texts.indents.indents_test_text_009, 172 | ), 173 | second=[], 174 | ) 175 | 176 | def test_case_10( 177 | self, 178 | ): 179 | self.assertEqual( 180 | first=self.get_linting_errors( 181 | source_code=texts.indents.indents_test_text_010, 182 | ), 183 | second=[], 184 | ) 185 | 186 | def test_case_11( 187 | self, 188 | ): 189 | self.assertEqual( 190 | first=self.get_linting_errors( 191 | source_code=texts.indents.indents_test_text_011, 192 | ), 193 | second=[ 194 | ( 195 | 3, 196 | 0, 197 | 'I006 indentation should be gradually added', 198 | 'intsights_checker', 199 | ), 200 | ], 201 | ) 202 | 203 | def test_case_12( 204 | self, 205 | ): 206 | self.assertEqual( 207 | first=self.get_linting_errors( 208 | source_code=texts.indents.indents_test_text_012, 209 | ), 210 | second=[], 211 | ) 212 | 213 | def test_case_13( 214 | self, 215 | ): 216 | self.assertEqual( 217 | first=self.get_linting_errors( 218 | source_code=texts.indents.indents_test_text_013, 219 | ), 220 | second=[], 221 | ) 222 | 223 | def test_case_14( 224 | self, 225 | ): 226 | self.assertEqual( 227 | first=self.get_linting_errors( 228 | source_code=texts.indents.indents_test_text_014, 229 | ), 230 | second=[], 231 | ) 232 | 233 | def test_case_15( 234 | self, 235 | ): 236 | self.assertEqual( 237 | first=self.get_linting_errors( 238 | source_code=texts.indents.indents_test_text_015, 239 | ), 240 | second=[], 241 | ) 242 | -------------------------------------------------------------------------------- /tests/test_duplications.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | import tokenize 3 | import ast 4 | import io 5 | import unittest 6 | 7 | import flake8_intsights 8 | 9 | from . import texts 10 | 11 | 12 | class DuplicationsTestCase( 13 | unittest.TestCase, 14 | ): 15 | def get_linting_errors( 16 | self, 17 | source_code, 18 | ): 19 | source_code = textwrap.dedent( 20 | text=source_code.lstrip(), 21 | ) 22 | file_tokens = list( 23 | tokenize.tokenize( 24 | readline=io.BytesIO( 25 | initial_bytes=source_code.encode('utf-8'), 26 | ).readline, 27 | ) 28 | ) 29 | tree = ast.parse(source_code) 30 | filename = 'test_duplications' 31 | lines = io.StringIO(source_code).readlines() 32 | 33 | checker_obj = flake8_intsights.checker.Checker( 34 | tree=tree, 35 | filename=filename, 36 | lines=lines, 37 | file_tokens=file_tokens, 38 | ) 39 | checker_obj.checkers = [flake8_intsights.checkers.duplications.Checker] 40 | linting_errors = list(checker_obj.run()) 41 | 42 | return linting_errors 43 | 44 | def test_case_1( 45 | self, 46 | ): 47 | self.assertEqual( 48 | first=self.get_linting_errors( 49 | source_code=texts.duplications.duplications_test_text_001, 50 | ), 51 | second=[], 52 | ) 53 | 54 | def test_case_2( 55 | self, 56 | ): 57 | self.assertEqual( 58 | first=self.get_linting_errors( 59 | source_code=texts.duplications.duplications_test_text_002, 60 | ), 61 | second=[ 62 | ( 63 | 4, 64 | 0, 65 | 'I012 duplicate definition', 66 | 'intsights_checker', 67 | ), 68 | ], 69 | ) 70 | 71 | def test_case_3( 72 | self, 73 | ): 74 | self.assertEqual( 75 | first=self.get_linting_errors( 76 | source_code=texts.duplications.duplications_test_text_003, 77 | ), 78 | second=[], 79 | ) 80 | 81 | def test_case_4( 82 | self, 83 | ): 84 | self.assertEqual( 85 | first=self.get_linting_errors( 86 | source_code=texts.duplications.duplications_test_text_004, 87 | ), 88 | second=[ 89 | ( 90 | 4, 91 | 0, 92 | 'I012 duplicate definition', 93 | 'intsights_checker', 94 | ), 95 | ], 96 | ) 97 | 98 | def test_case_5( 99 | self, 100 | ): 101 | self.assertEqual( 102 | first=self.get_linting_errors( 103 | source_code=texts.duplications.duplications_test_text_005, 104 | ), 105 | second=[], 106 | ) 107 | 108 | def test_case_6( 109 | self, 110 | ): 111 | self.assertEqual( 112 | first=self.get_linting_errors( 113 | source_code=texts.duplications.duplications_test_text_006, 114 | ), 115 | second=[ 116 | ( 117 | 5, 118 | 4, 119 | 'I012 duplicate definition', 120 | 'intsights_checker', 121 | ), 122 | ], 123 | ) 124 | 125 | def test_case_7( 126 | self, 127 | ): 128 | self.assertEqual( 129 | first=self.get_linting_errors( 130 | source_code=texts.duplications.duplications_test_text_007, 131 | ), 132 | second=[], 133 | ) 134 | 135 | def test_case_8( 136 | self, 137 | ): 138 | self.assertEqual( 139 | first=self.get_linting_errors( 140 | source_code=texts.duplications.duplications_test_text_008, 141 | ), 142 | second=[ 143 | ( 144 | 10, 145 | 4, 146 | 'I012 duplicate definition', 147 | 'intsights_checker', 148 | ), 149 | ], 150 | ) 151 | 152 | def test_case_9( 153 | self, 154 | ): 155 | self.assertEqual( 156 | first=self.get_linting_errors( 157 | source_code=texts.duplications.duplications_test_text_009, 158 | ), 159 | second=[], 160 | ) 161 | 162 | def test_case_10( 163 | self, 164 | ): 165 | self.assertEqual( 166 | first=self.get_linting_errors( 167 | source_code=texts.duplications.duplications_test_text_010, 168 | ), 169 | second=[ 170 | ( 171 | 5, 172 | 4, 173 | 'I012 duplicate definition', 174 | 'intsights_checker', 175 | ), 176 | ], 177 | ) 178 | 179 | def test_case_11( 180 | self, 181 | ): 182 | self.assertEqual( 183 | first=self.get_linting_errors( 184 | source_code=texts.duplications.duplications_test_text_011, 185 | ), 186 | second=[], 187 | ) 188 | 189 | def test_case_12( 190 | self, 191 | ): 192 | self.assertEqual( 193 | first=self.get_linting_errors( 194 | source_code=texts.duplications.duplications_test_text_012, 195 | ), 196 | second=[ 197 | ( 198 | 5, 199 | 4, 200 | 'I012 duplicate definition', 201 | 'intsights_checker', 202 | ), 203 | ], 204 | ) 205 | 206 | def test_case_13( 207 | self, 208 | ): 209 | self.assertEqual( 210 | first=self.get_linting_errors( 211 | source_code=texts.duplications.duplications_test_text_013, 212 | ), 213 | second=[], 214 | ) 215 | 216 | def test_case_14( 217 | self, 218 | ): 219 | self.assertEqual( 220 | first=self.get_linting_errors( 221 | source_code=texts.duplications.duplications_test_text_014, 222 | ), 223 | second=[ 224 | ( 225 | 3, 226 | 4, 227 | 'I012 duplicate definition', 228 | 'intsights_checker', 229 | ), 230 | ], 231 | ) 232 | 233 | def test_case_15( 234 | self, 235 | ): 236 | self.assertEqual( 237 | first=self.get_linting_errors( 238 | source_code=texts.duplications.duplications_test_text_015, 239 | ), 240 | second=[], 241 | ) 242 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/builtins.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import inspect 3 | import builtins 4 | 5 | from . import _checker 6 | 7 | 8 | class Checker( 9 | _checker.BaseChecker, 10 | ): 11 | BUILTINS = [ 12 | member[0] 13 | for member in inspect.getmembers(builtins) 14 | if member[0] not in [ 15 | '__name__', 16 | '__doc__', 17 | 'credits', 18 | '_', 19 | ] 20 | ] 21 | 22 | FUNCTION_NODES = ( 23 | ast.FunctionDef, 24 | ast.AsyncFunctionDef, 25 | ) 26 | 27 | FOR_NODES = ( 28 | ast.For, 29 | ast.AsyncFor, 30 | ) 31 | 32 | WITH_NODES = ( 33 | ast.With, 34 | ast.AsyncWith, 35 | ) 36 | 37 | IMPORT_NODES = ( 38 | ast.Import, 39 | ast.ImportFrom, 40 | ) 41 | 42 | ITERATOR_NODES = ( 43 | ast.Tuple, 44 | ast.List, 45 | ) 46 | 47 | @classmethod 48 | def check( 49 | cls, 50 | filename, 51 | lines, 52 | tokens, 53 | start_position_to_token, 54 | ast_tree, 55 | astroid_tree, 56 | all_astroid_nodes, 57 | ): 58 | for node in ast.walk(ast_tree): 59 | if isinstance(node, ast.Assign): 60 | yield from cls.check_assignment( 61 | node=node, 62 | ) 63 | elif isinstance(node, cls.FUNCTION_NODES): 64 | yield from cls.check_function_definition( 65 | node=node, 66 | ) 67 | elif isinstance(node, cls.FOR_NODES): 68 | yield from cls.check_for_loop( 69 | node=node, 70 | ) 71 | elif isinstance(node, cls.WITH_NODES): 72 | yield from cls.check_with( 73 | node=node, 74 | ) 75 | elif isinstance(node, ast.ListComp): 76 | yield from cls.check_list_comprehension( 77 | node=node, 78 | ) 79 | elif isinstance(node, ast.ClassDef): 80 | yield from cls.check_class( 81 | node=node, 82 | ) 83 | 84 | @classmethod 85 | def check_assignment( 86 | cls, 87 | node, 88 | ): 89 | if isinstance(node.parent, ast.ClassDef): 90 | return 91 | 92 | stack = list(node.targets) 93 | while stack: 94 | item = stack.pop() 95 | if isinstance(item, cls.ITERATOR_NODES): 96 | stack.extend(list(item.elts)) 97 | elif isinstance(item, ast.Name) and item.id in cls.BUILTINS: 98 | yield from cls.error_yielder.yield_error( 99 | error_id='I091', 100 | line_number=item.lineno, 101 | column_offset=item.col_offset, 102 | ) 103 | elif isinstance(item, ast.Starred) and item.value.id in cls.BUILTINS: 104 | yield from cls.error_yielder.yield_error( 105 | error_id='I091', 106 | line_number=item.lineno, 107 | column_offset=item.col_offset, 108 | ) 109 | 110 | @classmethod 111 | def check_function_definition( 112 | cls, 113 | node, 114 | ): 115 | if node.name in cls.BUILTINS and not isinstance(node.parent, ast.ClassDef): 116 | yield from cls.error_yielder.yield_error( 117 | error_id='I091', 118 | line_number=node.lineno, 119 | column_offset=node.col_offset, 120 | ) 121 | 122 | for arg in node.args.args: 123 | if arg.arg in cls.BUILTINS: 124 | yield from cls.error_yielder.yield_error( 125 | error_id='I091', 126 | line_number=arg.lineno, 127 | column_offset=arg.col_offset, 128 | ) 129 | 130 | @classmethod 131 | def check_for_loop( 132 | cls, 133 | node, 134 | ): 135 | stack = [ 136 | node.target, 137 | ] 138 | while stack: 139 | item = stack.pop() 140 | if isinstance(item, cls.ITERATOR_NODES): 141 | stack.extend(list(item.elts)) 142 | elif isinstance(item, ast.Name) and item.id in cls.BUILTINS: 143 | yield from cls.error_yielder.yield_error( 144 | error_id='I091', 145 | line_number=item.lineno, 146 | column_offset=item.col_offset, 147 | ) 148 | elif isinstance(item, ast.Starred) and item.value.id in cls.BUILTINS: 149 | yield from cls.error_yielder.yield_error( 150 | error_id='I091', 151 | line_number=item.lineno, 152 | column_offset=item.col_offset, 153 | ) 154 | 155 | @classmethod 156 | def check_with( 157 | cls, 158 | node, 159 | ): 160 | for item in node.items: 161 | var = item.optional_vars 162 | if isinstance(var, cls.ITERATOR_NODES): 163 | for element in var.elts: 164 | if isinstance(element, ast.Name) and element.id in cls.BUILTINS: 165 | yield from cls.error_yielder.yield_error( 166 | error_id='I091', 167 | line_number=element.lineno, 168 | column_offset=element.col_offset, 169 | ) 170 | elif isinstance(element, ast.Starred) and element.value.id in cls.BUILTINS: 171 | yield from cls.error_yielder.yield_error( 172 | error_id='I091', 173 | line_number=element.lineno, 174 | column_offset=element.col_offset, 175 | ) 176 | elif isinstance(var, ast.Name) and var.id in cls.BUILTINS: 177 | yield from cls.error_yielder.yield_error( 178 | error_id='I091', 179 | line_number=var.lineno, 180 | column_offset=var.col_offset, 181 | ) 182 | 183 | @classmethod 184 | def check_list_comprehension( 185 | cls, 186 | node, 187 | ): 188 | for generator in node.generators: 189 | if isinstance(generator.target, ast.Name) and generator.target.id in cls.BUILTINS: 190 | yield from cls.error_yielder.yield_error( 191 | error_id='I091', 192 | line_number=generator.target.lineno, 193 | column_offset=generator.target.col_offset, 194 | ) 195 | elif isinstance(generator.target, cls.ITERATOR_NODES): 196 | for tuple_element in generator.target.elts: 197 | if isinstance(tuple_element, ast.Name) and tuple_element.id in cls.BUILTINS: 198 | yield from cls.error_yielder.yield_error( 199 | error_id='I091', 200 | line_number=tuple_element.lineno, 201 | column_offset=tuple_element.col_offset, 202 | ) 203 | 204 | @classmethod 205 | def check_class( 206 | cls, 207 | node, 208 | ): 209 | if node.name in cls.BUILTINS: 210 | yield from cls.error_yielder.yield_error( 211 | error_id='I091', 212 | line_number=node.lineno, 213 | column_offset=node.col_offset, 214 | ) 215 | -------------------------------------------------------------------------------- /tests/test_single_quotes.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | import tokenize 3 | import ast 4 | import io 5 | import unittest 6 | 7 | import flake8_intsights 8 | 9 | from . import texts 10 | 11 | 12 | class SingleQuotesTestCase( 13 | unittest.TestCase, 14 | ): 15 | def get_linting_errors( 16 | self, 17 | source_code, 18 | ): 19 | source_code = textwrap.dedent( 20 | text=source_code.lstrip(), 21 | ) 22 | file_tokens = list( 23 | tokenize.tokenize( 24 | readline=io.BytesIO( 25 | initial_bytes=source_code.encode('utf-8'), 26 | ).readline, 27 | ) 28 | ) 29 | tree = ast.parse(source_code) 30 | filename = 'test_single_quotes' 31 | lines = io.StringIO(source_code).readlines() 32 | 33 | checker_obj = flake8_intsights.checker.Checker( 34 | tree=tree, 35 | filename=filename, 36 | lines=lines, 37 | file_tokens=file_tokens, 38 | ) 39 | checker_obj.checkers = [flake8_intsights.checkers.single_quotes.Checker] 40 | linting_errors = list(checker_obj.run()) 41 | 42 | return linting_errors 43 | 44 | def test_case_1( 45 | self, 46 | ): 47 | self.assertEqual( 48 | first=self.get_linting_errors( 49 | source_code=texts.single_quotes.single_quotes_test_text_001, 50 | ), 51 | second=[], 52 | ) 53 | 54 | def test_case_2( 55 | self, 56 | ): 57 | self.assertEqual( 58 | first=self.get_linting_errors( 59 | source_code=texts.single_quotes.single_quotes_test_text_002, 60 | ), 61 | second=[ 62 | ( 63 | 1, 64 | 6, 65 | 'I011 only single quote is allowed in strings', 66 | 'intsights_checker', 67 | ), 68 | ], 69 | ) 70 | 71 | def test_case_3( 72 | self, 73 | ): 74 | self.assertEqual( 75 | first=self.get_linting_errors( 76 | source_code=texts.single_quotes.single_quotes_test_text_003, 77 | ), 78 | second=[], 79 | ) 80 | 81 | def test_case_4( 82 | self, 83 | ): 84 | self.assertEqual( 85 | first=self.get_linting_errors( 86 | source_code=texts.single_quotes.single_quotes_test_text_004, 87 | ), 88 | second=[ 89 | ( 90 | 1, 91 | 9, 92 | 'I011 only single quote is allowed in strings', 93 | 'intsights_checker', 94 | ), 95 | ], 96 | ) 97 | 98 | def test_case_5( 99 | self, 100 | ): 101 | self.assertEqual( 102 | first=self.get_linting_errors( 103 | source_code=texts.single_quotes.single_quotes_test_text_005, 104 | ), 105 | second=[], 106 | ) 107 | 108 | def test_case_6( 109 | self, 110 | ): 111 | self.assertEqual( 112 | first=self.get_linting_errors( 113 | source_code=texts.single_quotes.single_quotes_test_text_006, 114 | ), 115 | second=[ 116 | ( 117 | 2, 118 | 8, 119 | 'I011 only single quote is allowed in strings', 120 | 'intsights_checker', 121 | ), 122 | ], 123 | ) 124 | 125 | def test_case_7( 126 | self, 127 | ): 128 | self.assertEqual( 129 | first=self.get_linting_errors( 130 | source_code=texts.single_quotes.single_quotes_test_text_007, 131 | ), 132 | second=[], 133 | ) 134 | 135 | def test_case_8( 136 | self, 137 | ): 138 | self.assertEqual( 139 | first=self.get_linting_errors( 140 | source_code=texts.single_quotes.single_quotes_test_text_008, 141 | ), 142 | second=[ 143 | ( 144 | 2, 145 | 8, 146 | 'I011 only single quote is allowed in strings', 147 | 'intsights_checker', 148 | ), 149 | ], 150 | ) 151 | 152 | def test_case_9( 153 | self, 154 | ): 155 | self.assertEqual( 156 | first=self.get_linting_errors( 157 | source_code=texts.single_quotes.single_quotes_test_text_009, 158 | ), 159 | second=[], 160 | ) 161 | 162 | def test_case_10( 163 | self, 164 | ): 165 | self.assertEqual( 166 | first=self.get_linting_errors( 167 | source_code=texts.single_quotes.single_quotes_test_text_010, 168 | ), 169 | second=[ 170 | ( 171 | 4, 172 | 4, 173 | 'I011 only single quote is allowed in strings', 174 | 'intsights_checker', 175 | ), 176 | ], 177 | ) 178 | 179 | def test_case_11( 180 | self, 181 | ): 182 | self.assertEqual( 183 | first=self.get_linting_errors( 184 | source_code=texts.single_quotes.single_quotes_test_text_011, 185 | ), 186 | second=[], 187 | ) 188 | 189 | def test_case_12( 190 | self, 191 | ): 192 | self.assertEqual( 193 | first=self.get_linting_errors( 194 | source_code=texts.single_quotes.single_quotes_test_text_012, 195 | ), 196 | second=[ 197 | ( 198 | 2, 199 | 4, 200 | 'I011 only single quote is allowed in strings', 201 | 'intsights_checker', 202 | ), 203 | ], 204 | ) 205 | 206 | def test_case_13( 207 | self, 208 | ): 209 | self.assertEqual( 210 | first=self.get_linting_errors( 211 | source_code=texts.single_quotes.single_quotes_test_text_013, 212 | ), 213 | second=[], 214 | ) 215 | 216 | def test_case_14( 217 | self, 218 | ): 219 | self.assertEqual( 220 | first=self.get_linting_errors( 221 | source_code=texts.single_quotes.single_quotes_test_text_014, 222 | ), 223 | second=[ 224 | ( 225 | 2, 226 | 4, 227 | 'I011 only single quote is allowed in strings', 228 | 'intsights_checker', 229 | ), 230 | ], 231 | ) 232 | 233 | def test_case_15( 234 | self, 235 | ): 236 | self.assertEqual( 237 | first=self.get_linting_errors( 238 | source_code=texts.single_quotes.single_quotes_test_text_015, 239 | ), 240 | second=[], 241 | ) 242 | 243 | def test_case_16( 244 | self, 245 | ): 246 | self.assertEqual( 247 | first=self.get_linting_errors( 248 | source_code=texts.single_quotes.single_quotes_test_text_016, 249 | ), 250 | second=[ 251 | ( 252 | 2, 253 | 4, 254 | 'I011 only single quote is allowed in strings', 255 | 'intsights_checker', 256 | ), 257 | ( 258 | 2, 259 | 11, 260 | 'I011 only single quote is allowed in strings', 261 | 'intsights_checker', 262 | ), 263 | ], 264 | ) 265 | -------------------------------------------------------------------------------- /tests/test_mutables.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | import tokenize 3 | import ast 4 | import io 5 | import unittest 6 | 7 | import flake8_intsights 8 | 9 | from . import texts 10 | 11 | 12 | class MutablesTestCase( 13 | unittest.TestCase, 14 | ): 15 | def get_linting_errors( 16 | self, 17 | source_code, 18 | ): 19 | source_code = textwrap.dedent( 20 | text=source_code.lstrip(), 21 | ) 22 | file_tokens = list( 23 | tokenize.tokenize( 24 | readline=io.BytesIO( 25 | initial_bytes=source_code.encode('utf-8'), 26 | ).readline, 27 | ) 28 | ) 29 | tree = ast.parse(source_code) 30 | filename = 'test_mutables' 31 | lines = io.StringIO(source_code).readlines() 32 | 33 | checker_obj = flake8_intsights.checker.Checker( 34 | tree=tree, 35 | filename=filename, 36 | lines=lines, 37 | file_tokens=file_tokens, 38 | ) 39 | checker_obj.checkers = [flake8_intsights.checkers.mutables.Checker] 40 | linting_errors = list(checker_obj.run()) 41 | 42 | return linting_errors 43 | 44 | def test_case_1( 45 | self, 46 | ): 47 | self.assertEqual( 48 | first=self.get_linting_errors( 49 | source_code=texts.mutables.mutables_test_text_001, 50 | ), 51 | second=[], 52 | ) 53 | 54 | def test_case_2( 55 | self, 56 | ): 57 | self.assertEqual( 58 | first=self.get_linting_errors( 59 | source_code=texts.mutables.mutables_test_text_002, 60 | ), 61 | second=[], 62 | ) 63 | 64 | def test_case_3( 65 | self, 66 | ): 67 | self.assertEqual( 68 | first=self.get_linting_errors( 69 | source_code=texts.mutables.mutables_test_text_003, 70 | ), 71 | second=[ 72 | ( 73 | 2, 74 | 10, 75 | 'I081 function default parameter must not be mutable', 76 | 'intsights_checker', 77 | ), 78 | ], 79 | ) 80 | 81 | def test_case_4( 82 | self, 83 | ): 84 | self.assertEqual( 85 | first=self.get_linting_errors( 86 | source_code=texts.mutables.mutables_test_text_004, 87 | ), 88 | second=[ 89 | ( 90 | 2, 91 | 10, 92 | 'I081 function default parameter must not be mutable', 93 | 'intsights_checker', 94 | ), 95 | ], 96 | ) 97 | 98 | def test_case_5( 99 | self, 100 | ): 101 | self.assertEqual( 102 | first=self.get_linting_errors( 103 | source_code=texts.mutables.mutables_test_text_005, 104 | ), 105 | second=[ 106 | ( 107 | 2, 108 | 10, 109 | 'I081 function default parameter must not be mutable', 110 | 'intsights_checker', 111 | ), 112 | ], 113 | ) 114 | 115 | def test_case_6( 116 | self, 117 | ): 118 | self.assertEqual( 119 | first=self.get_linting_errors( 120 | source_code=texts.mutables.mutables_test_text_006, 121 | ), 122 | second=[ 123 | ( 124 | 2, 125 | 10, 126 | 'I081 function default parameter must not be mutable', 127 | 'intsights_checker', 128 | ), 129 | ], 130 | ) 131 | 132 | def test_case_7( 133 | self, 134 | ): 135 | self.assertEqual( 136 | first=self.get_linting_errors( 137 | source_code=texts.mutables.mutables_test_text_007, 138 | ), 139 | second=[], 140 | ) 141 | 142 | def test_case_8( 143 | self, 144 | ): 145 | self.assertEqual( 146 | first=self.get_linting_errors( 147 | source_code=texts.mutables.mutables_test_text_008, 148 | ), 149 | second=[], 150 | ) 151 | 152 | def test_case_9( 153 | self, 154 | ): 155 | self.assertEqual( 156 | first=self.get_linting_errors( 157 | source_code=texts.mutables.mutables_test_text_009, 158 | ), 159 | second=[ 160 | ( 161 | 3, 162 | 14, 163 | 'I081 function default parameter must not be mutable', 164 | 'intsights_checker', 165 | ), 166 | ], 167 | ) 168 | 169 | def test_case_10( 170 | self, 171 | ): 172 | self.assertEqual( 173 | first=self.get_linting_errors( 174 | source_code=texts.mutables.mutables_test_text_010, 175 | ), 176 | second=[ 177 | ( 178 | 3, 179 | 14, 180 | 'I081 function default parameter must not be mutable', 181 | 'intsights_checker', 182 | ), 183 | ], 184 | ) 185 | 186 | def test_case_11( 187 | self, 188 | ): 189 | self.assertEqual( 190 | first=self.get_linting_errors( 191 | source_code=texts.mutables.mutables_test_text_011, 192 | ), 193 | second=[ 194 | ( 195 | 3, 196 | 14, 197 | 'I081 function default parameter must not be mutable', 198 | 'intsights_checker', 199 | ), 200 | ], 201 | ) 202 | 203 | def test_case_12( 204 | self, 205 | ): 206 | self.assertEqual( 207 | first=self.get_linting_errors( 208 | source_code=texts.mutables.mutables_test_text_012, 209 | ), 210 | second=[ 211 | ( 212 | 3, 213 | 14, 214 | 'I081 function default parameter must not be mutable', 215 | 'intsights_checker', 216 | ), 217 | ], 218 | ) 219 | 220 | def test_case_13( 221 | self, 222 | ): 223 | self.assertEqual( 224 | first=self.get_linting_errors( 225 | source_code=texts.mutables.mutables_test_text_013, 226 | ), 227 | second=[], 228 | ) 229 | 230 | def test_case_14( 231 | self, 232 | ): 233 | self.assertEqual( 234 | first=self.get_linting_errors( 235 | source_code=texts.mutables.mutables_test_text_014, 236 | ), 237 | second=[], 238 | ) 239 | 240 | def test_case_15( 241 | self, 242 | ): 243 | self.assertEqual( 244 | first=self.get_linting_errors( 245 | source_code=texts.mutables.mutables_test_text_015, 246 | ), 247 | second=[ 248 | ( 249 | 4, 250 | 16, 251 | 'I081 function default parameter must not be mutable', 252 | 'intsights_checker', 253 | ), 254 | ], 255 | ) 256 | 257 | def test_case_16( 258 | self, 259 | ): 260 | self.assertEqual( 261 | first=self.get_linting_errors( 262 | source_code=texts.mutables.mutables_test_text_016, 263 | ), 264 | second=[ 265 | ( 266 | 3, 267 | 14, 268 | 'I081 function default parameter must not be mutable', 269 | 'intsights_checker', 270 | ), 271 | ( 272 | 4, 273 | 16, 274 | 'I081 function default parameter must not be mutable', 275 | 'intsights_checker', 276 | ), 277 | ], 278 | ) 279 | 280 | def test_case_17( 281 | self, 282 | ): 283 | self.assertEqual( 284 | first=self.get_linting_errors( 285 | source_code=texts.mutables.mutables_test_text_017, 286 | ), 287 | second=[ 288 | ( 289 | 2, 290 | 14, 291 | 'I081 function default parameter must not be mutable', 292 | 'intsights_checker', 293 | ), 294 | ( 295 | 4, 296 | 16, 297 | 'I081 function default parameter must not be mutable', 298 | 'intsights_checker', 299 | ), 300 | ], 301 | ) 302 | 303 | def test_case_18( 304 | self, 305 | ): 306 | self.assertEqual( 307 | first=self.get_linting_errors( 308 | source_code=texts.mutables.mutables_test_text_018, 309 | ), 310 | second=[ 311 | ( 312 | 3, 313 | 14, 314 | 'I081 function default parameter must not be mutable', 315 | 'intsights_checker', 316 | ), 317 | ], 318 | ) 319 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/spacing.py: -------------------------------------------------------------------------------- 1 | import astroid 2 | import tokenize 3 | import collections.abc 4 | 5 | from . import _checker 6 | 7 | 8 | class Checker( 9 | _checker.BaseChecker, 10 | ): 11 | NO_SPACE_CHILD_TYPES = ( 12 | astroid.ClassDef, 13 | astroid.FunctionDef, 14 | astroid.AsyncFunctionDef, 15 | astroid.For, 16 | astroid.AsyncFor, 17 | astroid.While, 18 | astroid.If, 19 | astroid.With, 20 | astroid.AsyncWith, 21 | astroid.TryExcept, 22 | astroid.TryFinally, 23 | astroid.ExceptHandler, 24 | ) 25 | 26 | NO_SPACE_BEFORE_TYPES = ( 27 | astroid.TryExcept, 28 | astroid.TryFinally, 29 | astroid.ExceptHandler, 30 | ) 31 | 32 | NO_SPACE_NEIGHBOUR_TYPES = ( 33 | astroid.Expr, 34 | ) 35 | 36 | NO_SPACE_NEIGHBOUR_TYPES_VALUES = ( 37 | astroid.Const, 38 | ) 39 | 40 | CONTROL_KEYWORDS = ( 41 | astroid.Return, 42 | astroid.Break, 43 | astroid.Continue, 44 | astroid.Yield, 45 | astroid.YieldFrom, 46 | astroid.Pass, 47 | astroid.Raise, 48 | ) 49 | 50 | @classmethod 51 | def check( 52 | cls, 53 | filename, 54 | lines, 55 | tokens, 56 | start_position_to_token, 57 | ast_tree, 58 | astroid_tree, 59 | all_astroid_nodes, 60 | ): 61 | for node in all_astroid_nodes: 62 | if isinstance(node, astroid.Arguments): 63 | continue 64 | 65 | yield from cls.check_no_space_below( 66 | node=node, 67 | lines=lines, 68 | ) 69 | 70 | yield from cls.check_right_after_else( 71 | node=node, 72 | lines=lines, 73 | ) 74 | 75 | yield from cls.check_one_separation_above( 76 | node=node, 77 | lines=lines, 78 | ) 79 | 80 | yield from cls.check_one_separation_below( 81 | all_astroid_nodes=all_astroid_nodes, 82 | node=node, 83 | lines=lines, 84 | ) 85 | 86 | yield from cls.check_no_space_above( 87 | lines=lines, 88 | tokens=tokens, 89 | ) 90 | 91 | @classmethod 92 | def check_no_space_below( 93 | cls, 94 | node, 95 | lines, 96 | ): 97 | if not isinstance(node, cls.NO_SPACE_CHILD_TYPES): 98 | return 99 | 100 | if not hasattr(node, 'body'): 101 | return 102 | 103 | if not isinstance(node.body, collections.abc.Iterable): 104 | return 105 | 106 | if getattr(node, 'doc', None) is not None: 107 | for current_line_number in range(node.lineno - 1, len(lines)): 108 | current_line = lines[current_line_number] 109 | if current_line.strip().endswith(':'): 110 | next_line = lines[current_line_number + 1] 111 | if next_line.strip() == '': 112 | yield from cls.error_yielder.yield_error( 113 | error_id='I031', 114 | line_number=current_line_number + 2, 115 | column_offset=0, 116 | ) 117 | 118 | return 119 | 120 | break 121 | 122 | if isinstance(node, astroid.TryFinally): 123 | first_child_node = node.finalbody[0] 124 | number_of_blank_lines_above = cls.number_of_blank_lines_above( 125 | lines=lines, 126 | node=first_child_node, 127 | ) 128 | if number_of_blank_lines_above != 0: 129 | yield from cls.error_yielder.yield_error( 130 | error_id='I031', 131 | line_number=first_child_node.lineno, 132 | column_offset=first_child_node.col_offset, 133 | ) 134 | else: 135 | first_child_node = None 136 | for child_node in cls.walk( 137 | node=node, 138 | ): 139 | if not hasattr(child_node, 'lineno'): 140 | continue 141 | 142 | if child_node in node.body: 143 | if isinstance(child_node, astroid.Expr): 144 | first_child_node = child_node.value 145 | else: 146 | first_child_node = child_node 147 | 148 | break 149 | 150 | if first_child_node: 151 | number_of_blank_lines_above = cls.number_of_blank_lines_above( 152 | lines=lines, 153 | node=first_child_node, 154 | ) 155 | if number_of_blank_lines_above != 0: 156 | yield from cls.error_yielder.yield_error( 157 | error_id='I031', 158 | line_number=first_child_node.lineno, 159 | column_offset=first_child_node.col_offset, 160 | ) 161 | 162 | @classmethod 163 | def check_no_space_above( 164 | cls, 165 | lines, 166 | tokens, 167 | ): 168 | no_space_above_keywords = [ 169 | 'else', 170 | 'except', 171 | 'finally', 172 | ] 173 | 174 | for token in tokens: 175 | if token.type == tokenize.NAME and token.string in no_space_above_keywords: 176 | token.lineno = token.start[0] 177 | number_of_blank_lines_above = cls.number_of_blank_lines_above( 178 | lines=lines, 179 | node=token, 180 | ) 181 | if number_of_blank_lines_above != 0: 182 | yield from cls.error_yielder.yield_error( 183 | error_id='I034', 184 | line_number=token.start[0], 185 | column_offset=token.start[1], 186 | ) 187 | 188 | @classmethod 189 | def check_right_after_else( 190 | cls, 191 | node, 192 | lines, 193 | ): 194 | if not isinstance(node, astroid.If): 195 | return 196 | 197 | if not node.orelse: 198 | return 199 | 200 | if lines[node.orelse[0].lineno - 1].strip().startswith('elif '): 201 | else_nodes = node.orelse[0].orelse 202 | else: 203 | else_nodes = node.orelse 204 | 205 | child_nodes = [] 206 | for child_node in else_nodes: 207 | if not hasattr(child_node, 'lineno'): 208 | continue 209 | 210 | if isinstance(child_node, astroid.Expr): 211 | child_nodes.append(child_node.value) 212 | else: 213 | child_nodes.append(child_node) 214 | 215 | if child_nodes: 216 | number_of_blank_lines_above = cls.number_of_blank_lines_above( 217 | lines=lines, 218 | node=child_nodes[0], 219 | ) 220 | if number_of_blank_lines_above != 0: 221 | yield from cls.error_yielder.yield_error( 222 | error_id='I031', 223 | line_number=child_nodes[0].lineno, 224 | column_offset=child_nodes[0].col_offset, 225 | ) 226 | 227 | @classmethod 228 | def check_one_separation_above( 229 | cls, 230 | node, 231 | lines, 232 | ): 233 | if not isinstance(node, cls.CONTROL_KEYWORDS): 234 | return 235 | 236 | node_previous_sibling = node.previous_sibling() 237 | if node_previous_sibling is None: 238 | return 239 | 240 | if hasattr(node_previous_sibling, 'body'): 241 | node_previous_sibling = node_previous_sibling.last_child() 242 | 243 | if isinstance(node_previous_sibling, cls.NO_SPACE_CHILD_TYPES): 244 | return 245 | 246 | number_of_separation_lines = cls.number_of_separation_lines( 247 | lines=lines, 248 | first_node=node_previous_sibling, 249 | second_node=node, 250 | ) 251 | if number_of_separation_lines != 1: 252 | yield from cls.error_yielder.yield_error( 253 | error_id='I032', 254 | line_number=node.lineno, 255 | column_offset=node.col_offset, 256 | ) 257 | 258 | @classmethod 259 | def check_one_separation_below( 260 | cls, 261 | all_astroid_nodes, 262 | node, 263 | lines, 264 | ): 265 | if not isinstance(node, cls.CONTROL_KEYWORDS): 266 | return 267 | 268 | node_next_sibling = node.next_sibling() 269 | if node_next_sibling is None: 270 | node_next_sibling = cls.get_node_last_sub_node( 271 | node=node, 272 | ) 273 | next_node_in_tree = cls.get_next_node_in_tree( 274 | all_astroid_nodes=all_astroid_nodes, 275 | node=node_next_sibling, 276 | ) 277 | if not next_node_in_tree: 278 | return 279 | else: 280 | next_node_in_tree = node_next_sibling 281 | 282 | if next_node_in_tree.lineno == node.lineno: 283 | return 284 | 285 | if isinstance(next_node_in_tree, cls.NO_SPACE_BEFORE_TYPES): 286 | return 287 | 288 | line_above_next_sibiling = cls.get_line_above( 289 | lines=lines, 290 | node=next_node_in_tree, 291 | ) 292 | if line_above_next_sibiling.strip() in [ 293 | 'try:', 294 | 'except:', 295 | 'finally:', 296 | 'else:', 297 | ]: 298 | return 299 | 300 | number_of_blank_lines_above = cls.number_of_blank_lines_above( 301 | lines=lines, 302 | node=next_node_in_tree, 303 | ) 304 | 305 | next_node_line = lines[next_node_in_tree.lineno - 1] 306 | if next_node_line.strip().startswith('elif '): 307 | if number_of_blank_lines_above != 0: 308 | yield from cls.error_yielder.yield_error( 309 | error_id='I032', 310 | line_number=next_node_in_tree.lineno - 1, 311 | column_offset=0, 312 | ) 313 | else: 314 | if number_of_blank_lines_above == 0: 315 | yield from cls.error_yielder.yield_error( 316 | error_id='I032', 317 | line_number=next_node_in_tree.lineno, 318 | column_offset=next_node_in_tree.col_offset, 319 | ) 320 | -------------------------------------------------------------------------------- /flake8_intsights/checkers/_checker.py: -------------------------------------------------------------------------------- 1 | import collections 2 | import astroid 3 | import tokenize 4 | 5 | from . import _errors 6 | 7 | 8 | class BaseChecker: 9 | LIST_TYPES = { 10 | astroid.List, 11 | astroid.Tuple, 12 | astroid.Set, 13 | } 14 | LATE_OPENERS_TYPES = { 15 | astroid.Call, 16 | } 17 | LATE_CLOSERS_TYPES = { 18 | astroid.Tuple, 19 | } 20 | OPENERS = { 21 | '[', 22 | '(', 23 | '{', 24 | } 25 | CLOSERES = { 26 | ']', 27 | ')', 28 | '}', 29 | } 30 | 31 | error_yielder = _errors.ErrorYielder 32 | 33 | @classmethod 34 | def walk( 35 | cls, 36 | node, 37 | ): 38 | nodes = set() 39 | 40 | nodes_to_go_through = collections.deque( 41 | iterable=[ 42 | node, 43 | ], 44 | ) 45 | while nodes_to_go_through: 46 | node = nodes_to_go_through.popleft() 47 | 48 | for name in node._astroid_fields: 49 | field = getattr(node, name, None) 50 | if isinstance(field, astroid.node_classes.NodeNG): 51 | nodes_to_go_through.append(field) 52 | elif isinstance(field, list): 53 | for item in field: 54 | if isinstance(item, tuple): 55 | for sub_item in item: 56 | if isinstance(sub_item, astroid.node_classes.NodeNG): 57 | nodes_to_go_through.append(sub_item) 58 | elif isinstance(item, astroid.node_classes.NodeNG): 59 | nodes_to_go_through.append(item) 60 | 61 | if node not in nodes: 62 | yield node 63 | 64 | nodes.add(node) 65 | 66 | @classmethod 67 | def get_line_above( 68 | cls, 69 | lines, 70 | node, 71 | ): 72 | node_line_number = node.lineno 73 | 74 | if node_line_number > 1: 75 | return lines[node_line_number - 2] 76 | else: 77 | return None 78 | 79 | @classmethod 80 | def number_of_blank_lines_above( 81 | cls, 82 | lines, 83 | node, 84 | ): 85 | node_line_number = node.lineno 86 | 87 | number_of_blank_lines = 0 88 | for line in reversed(lines[:node_line_number - 1]): 89 | if line.strip() != '': 90 | break 91 | 92 | number_of_blank_lines += 1 93 | 94 | return number_of_blank_lines 95 | 96 | @classmethod 97 | def number_of_separation_lines( 98 | cls, 99 | lines, 100 | first_node, 101 | second_node, 102 | ): 103 | first_node_index = first_node.lineno - 1 104 | second_node_index = second_node.lineno - 1 105 | 106 | number_of_lines = 0 107 | for line_index in range( 108 | second_node_index - 1, 109 | first_node_index, 110 | -1, 111 | ): 112 | if lines[line_index].strip() == '': 113 | number_of_lines += 1 114 | else: 115 | break 116 | 117 | return number_of_lines 118 | 119 | @classmethod 120 | def get_previous_token_from_position( 121 | cls, 122 | lineno, 123 | col_offset, 124 | tokens, 125 | start_position_to_token, 126 | token_type, 127 | token_string, 128 | ): 129 | from_token_position = ( 130 | lineno, 131 | col_offset, 132 | ) 133 | 134 | current_token_index = start_position_to_token[from_token_position]['index'] 135 | for token in reversed(tokens[:current_token_index]): 136 | if token.type == token_type and token.string == token_string: 137 | token_index = start_position_to_token[token.start]['index'] 138 | 139 | return { 140 | 'token': token, 141 | 'index': token_index, 142 | } 143 | 144 | @classmethod 145 | def get_next_token_from_position( 146 | cls, 147 | lineno, 148 | col_offset, 149 | tokens, 150 | start_position_to_token, 151 | token_type, 152 | token_string, 153 | ): 154 | from_token_position = ( 155 | lineno, 156 | col_offset, 157 | ) 158 | 159 | current_token_index = start_position_to_token[from_token_position]['index'] 160 | 161 | for token in tokens[current_token_index:]: 162 | if token.type == token_type and token.string == token_string: 163 | token_index = start_position_to_token[token.start]['index'] 164 | 165 | return { 166 | 'token': token, 167 | 'index': token_index, 168 | } 169 | 170 | @classmethod 171 | def get_token_by_position( 172 | cls, 173 | lineno, 174 | col_offset, 175 | tokens, 176 | ): 177 | for token in tokens: 178 | token_start_lineno = token.start[0] 179 | token_end_lineno = token.end[0] 180 | same_lines = token_start_lineno <= lineno and lineno <= token_end_lineno 181 | if not same_lines: 182 | continue 183 | 184 | token_start_col_offset = token.start[1] 185 | token_end_col_offset = token.end[1] 186 | same_cols = True 187 | if token_start_lineno == lineno: 188 | if token_start_col_offset > col_offset: 189 | same_cols = False 190 | 191 | if token_end_lineno == lineno: 192 | if token_end_col_offset < col_offset: 193 | same_cols = False 194 | 195 | if same_cols: 196 | return token 197 | 198 | return None 199 | 200 | @classmethod 201 | def get_node_opener_token( 202 | cls, 203 | node, 204 | tokens, 205 | start_position_to_token, 206 | ): 207 | node_start_lineno = node.lineno 208 | node_start_offset = node.col_offset 209 | node_opener_position = ( 210 | node_start_lineno, 211 | node_start_offset, 212 | ) 213 | 214 | if isinstance( 215 | node, 216 | ( 217 | astroid.List, 218 | astroid.Dict, 219 | astroid.Set, 220 | ) 221 | ): 222 | return start_position_to_token[node_opener_position] 223 | elif isinstance(node, astroid.Tuple): 224 | current_node_token_string = start_position_to_token[node_opener_position]['token'].string 225 | if current_node_token_string == '(': 226 | return start_position_to_token[node_opener_position] 227 | 228 | return cls.get_previous_token_from_position( 229 | lineno=node_start_lineno, 230 | col_offset=node_start_offset, 231 | tokens=tokens, 232 | start_position_to_token=start_position_to_token, 233 | token_type=tokenize.OP, 234 | token_string='(', 235 | ) 236 | elif isinstance( 237 | node, 238 | ( 239 | astroid.Call, 240 | astroid.FunctionDef, 241 | astroid.AsyncFunctionDef, 242 | ) 243 | ): 244 | return cls.get_next_token_from_position( 245 | lineno=node_start_lineno, 246 | col_offset=node_start_offset, 247 | tokens=tokens, 248 | start_position_to_token=start_position_to_token, 249 | token_type=tokenize.OP, 250 | token_string='(', 251 | ) 252 | 253 | @classmethod 254 | def get_node_closer_token( 255 | cls, 256 | node, 257 | tokens, 258 | start_position_to_token, 259 | ): 260 | opener_token = cls.get_node_opener_token( 261 | node=node, 262 | tokens=tokens, 263 | start_position_to_token=start_position_to_token, 264 | ) 265 | 266 | current_open_token_index = opener_token['index'] 267 | 268 | current_nesting_counter = 0 269 | for token in tokens[current_open_token_index:]: 270 | if token.type == tokenize.OP: 271 | if token.string in cls.OPENERS: 272 | current_nesting_counter += 1 273 | elif token.string in cls.CLOSERES: 274 | current_nesting_counter -= 1 275 | 276 | if current_nesting_counter == 0: 277 | return token 278 | else: 279 | return None 280 | 281 | @classmethod 282 | def get_line_leading_indentations( 283 | cls, 284 | line, 285 | ): 286 | number_of_leading_spaces = len(line) - len(line.lstrip(' ')) 287 | 288 | return number_of_leading_spaces / 4 289 | 290 | @classmethod 291 | def get_line_indentation_level( 292 | cls, 293 | line, 294 | ): 295 | spaces_from_start = 0 296 | 297 | for char in line: 298 | if char != ' ': 299 | break 300 | 301 | spaces_from_start += 1 302 | 303 | return spaces_from_start / 4 304 | 305 | @classmethod 306 | def get_node_last_sub_node( 307 | cls, 308 | node, 309 | ): 310 | node_children = list(node.get_children()) 311 | if not node_children: 312 | return node 313 | 314 | child_node = node_children[-1] 315 | child_node_children = list(child_node.get_children()) 316 | if child_node_children: 317 | return cls.get_node_last_sub_node( 318 | node=child_node, 319 | ) 320 | else: 321 | return child_node 322 | 323 | @classmethod 324 | def get_next_node_in_tree( 325 | cls, 326 | all_astroid_nodes, 327 | node, 328 | ): 329 | closest_node = None 330 | shortest_distance_between_nodes = None 331 | node_position_in_tree = getattr(node, 'lineno', 0) * 10000 + getattr(node, 'col_offset', 0) 332 | if node_position_in_tree == 0: 333 | return None 334 | 335 | for candidate_node in all_astroid_nodes: 336 | distance_between_nodes = candidate_node.position_in_tree - node_position_in_tree 337 | if distance_between_nodes <= 0: 338 | continue 339 | 340 | if shortest_distance_between_nodes is None or distance_between_nodes < shortest_distance_between_nodes: 341 | shortest_distance_between_nodes = distance_between_nodes 342 | closest_node = candidate_node 343 | 344 | return closest_node 345 | -------------------------------------------------------------------------------- /tests/texts/spacing.py: -------------------------------------------------------------------------------- 1 | spacing_test_text_001 = ''' 2 | def function(): 3 | return 1 4 | ''' 5 | spacing_test_text_002 = ''' 6 | def function(): 7 | 8 | return 1 9 | ''' 10 | spacing_test_text_003 = ''' 11 | def function(): 12 | arg = 1 13 | 14 | return 1 15 | ''' 16 | spacing_test_text_004 = ''' 17 | def function(): 18 | arg = 1 19 | return 1 20 | ''' 21 | spacing_test_text_005 = ''' 22 | def function(): 23 | arr = [ 24 | 1, 25 | ] 26 | 27 | return 1 28 | ''' 29 | spacing_test_text_006 = ''' 30 | def function(): 31 | arr = [ 32 | 1, 33 | ] 34 | return 1 35 | ''' 36 | spacing_test_text_007 = ''' 37 | def function(): 38 | if True: 39 | return 1 40 | ''' 41 | spacing_test_text_008 = ''' 42 | def function(): 43 | if True: 44 | 45 | return 1 46 | ''' 47 | spacing_test_text_009 = ''' 48 | def function(): 49 | while True: 50 | return 1 51 | ''' 52 | spacing_test_text_010 = ''' 53 | def function(): 54 | while True: 55 | 56 | return 1 57 | ''' 58 | spacing_test_text_011 = ''' 59 | def function(): 60 | if True: 61 | return 1 62 | else: 63 | return 2 64 | ''' 65 | spacing_test_text_012 = ''' 66 | def function(): 67 | if True: 68 | return 1 69 | else: 70 | 71 | return 2 72 | ''' 73 | spacing_test_text_013 = ''' 74 | def function(): 75 | if True: 76 | arg = 1 77 | 78 | return 1 79 | ''' 80 | spacing_test_text_014 = ''' 81 | def function(): 82 | if True: 83 | arg = 1 84 | return 1 85 | ''' 86 | spacing_test_text_015 = ''' 87 | def function(): 88 | try: 89 | arg = 10 90 | 91 | return 3 92 | except: 93 | return 4 94 | ''' 95 | spacing_test_text_016 = ''' 96 | def function(): 97 | try: 98 | arg = 10 99 | return 3 100 | except: 101 | return 4 102 | ''' 103 | spacing_test_text_017 = ''' 104 | def function(): 105 | try: 106 | return 3 107 | except: 108 | return 4 109 | ''' 110 | spacing_test_text_018 = ''' 111 | def function(): 112 | try: 113 | 114 | return 3 115 | except: 116 | return 4 117 | ''' 118 | spacing_test_text_019 = ''' 119 | def function(): 120 | try: 121 | return 3 122 | except: 123 | arg = 1 124 | 125 | return 4 126 | ''' 127 | spacing_test_text_020 = ''' 128 | def function(): 129 | try: 130 | return 3 131 | except: 132 | arg = 1 133 | return 4 134 | ''' 135 | spacing_test_text_021 = ''' 136 | def function(): 137 | try: 138 | return 3 139 | except: 140 | return 4 141 | finally: 142 | return 3 143 | ''' 144 | spacing_test_text_022 = ''' 145 | def function(): 146 | try: 147 | return 3 148 | except: 149 | 150 | return 4 151 | ''' 152 | spacing_test_text_023 = ''' 153 | def function(): 154 | if True: 155 | pass 156 | 157 | return 1 158 | ''' 159 | spacing_test_text_024 = ''' 160 | def function(): 161 | if True: 162 | pass 163 | return 1 164 | ''' 165 | spacing_test_text_025 = ''' 166 | def function(): 167 | """ 168 | """ 169 | return 1 170 | ''' 171 | spacing_test_text_026 = ''' 172 | def function(): 173 | """ 174 | """ 175 | 176 | return 1 177 | ''' 178 | spacing_test_text_027 = ''' 179 | def function(): 180 | """ 181 | """ 182 | arg = 1 183 | 184 | return 1 185 | ''' 186 | spacing_test_text_028 = ''' 187 | def function(): 188 | """ 189 | """ 190 | arg = 1 191 | return 1 192 | ''' 193 | spacing_test_text_029 = ''' 194 | def function(): 195 | arg = func( 196 | 1, 197 | ) 198 | 199 | return 1 200 | ''' 201 | spacing_test_text_030 = ''' 202 | def function(): 203 | arg = func( 204 | 1, 205 | ) 206 | return 1 207 | ''' 208 | spacing_test_text_031 = ''' 209 | def function(): 210 | arg = ( 211 | 1, 212 | ) 213 | 214 | return 1 215 | ''' 216 | spacing_test_text_032 = ''' 217 | def function(): 218 | arg = ( 219 | 1, 220 | ) 221 | return 1 222 | ''' 223 | spacing_test_text_033 = ''' 224 | def function(): 225 | yield ( 226 | 1, 227 | ) 228 | 229 | return 1 230 | ''' 231 | spacing_test_text_034 = ''' 232 | def function(): 233 | yield ( 234 | 1, 235 | ) 236 | return 1 237 | ''' 238 | spacing_test_text_035 = ''' 239 | def function(): 240 | arg = 1 241 | 242 | yield 1 243 | ''' 244 | spacing_test_text_036 = ''' 245 | def function(): 246 | arg = 1 247 | yield 1 248 | ''' 249 | spacing_test_text_037 = ''' 250 | def function(): 251 | arg = 1 252 | 253 | yield from test() 254 | ''' 255 | spacing_test_text_038 = ''' 256 | def function(): 257 | arg = 1 258 | yield from test() 259 | ''' 260 | spacing_test_text_039 = ''' 261 | def function(): 262 | yield from test() 263 | ''' 264 | spacing_test_text_040 = ''' 265 | def function(): 266 | 267 | yield from test() 268 | ''' 269 | spacing_test_text_041 = ''' 270 | def function(): 271 | yield 1 272 | ''' 273 | spacing_test_text_042 = ''' 274 | def function(): 275 | 276 | yield 1 277 | ''' 278 | spacing_test_text_043 = ''' 279 | def function(): 280 | if True: 281 | yield 1 282 | ''' 283 | spacing_test_text_044 = ''' 284 | def function(): 285 | if True: 286 | 287 | yield 1 288 | ''' 289 | spacing_test_text_045 = ''' 290 | def function(): 291 | if True: 292 | yield from 1 293 | ''' 294 | spacing_test_text_046 = ''' 295 | def function(): 296 | if True: 297 | 298 | yield from 1 299 | ''' 300 | spacing_test_text_047 = ''' 301 | def function(): 302 | if True: 303 | yield 1 304 | else: 305 | yield 1 306 | ''' 307 | spacing_test_text_048 = ''' 308 | def function(): 309 | if True: 310 | yield 1 311 | else: 312 | 313 | yield 1 314 | ''' 315 | spacing_test_text_049 = ''' 316 | def function(): 317 | if True: 318 | yield from 1 319 | else: 320 | yield from 1 321 | ''' 322 | spacing_test_text_050 = ''' 323 | def function(): 324 | if True: 325 | yield from 1 326 | else: 327 | 328 | yield from 1 329 | ''' 330 | spacing_test_text_051 = ''' 331 | def function( 332 | a, 333 | b, 334 | ): 335 | yield from 1 336 | ''' 337 | spacing_test_text_052 = ''' 338 | def function( 339 | a, 340 | b, 341 | ): 342 | 343 | yield from 1 344 | ''' 345 | spacing_test_text_053 = ''' 346 | def function( 347 | a, 348 | b, 349 | ): 350 | """ 351 | """ 352 | yield from 1 353 | ''' 354 | spacing_test_text_054 = ''' 355 | def function( 356 | a, 357 | b, 358 | ): 359 | """ 360 | """ 361 | 362 | yield from 1 363 | ''' 364 | spacing_test_text_055 = ''' 365 | def function(): 366 | if ( 367 | a == 2 368 | ): 369 | yield from 1 370 | ''' 371 | spacing_test_text_056 = ''' 372 | def function(): 373 | if ( 374 | a == 2 375 | ): 376 | 377 | yield from 1 378 | ''' 379 | spacing_test_text_057 = ''' 380 | def function(): 381 | if a == 2: 382 | yield 1 383 | 384 | a = 2 385 | ''' 386 | spacing_test_text_058 = ''' 387 | def function(): 388 | if a == 2: 389 | yield 1 390 | a = 2 391 | ''' 392 | spacing_test_text_059 = ''' 393 | def function(): 394 | if ( 395 | a == 2 396 | ): 397 | yield 1 398 | 399 | a = 2 400 | ''' 401 | spacing_test_text_060 = ''' 402 | def function(): 403 | if ( 404 | a == 2 405 | ): 406 | yield 1 407 | a = 2 408 | ''' 409 | spacing_test_text_061 = ''' 410 | def function(): 411 | if a == 2: 412 | yield 1 413 | 414 | a = 2 415 | ''' 416 | spacing_test_text_062 = ''' 417 | def function(): 418 | if a == 2: 419 | yield 1 420 | a = 2 421 | ''' 422 | spacing_test_text_063 = ''' 423 | def function(): 424 | if ( 425 | a == 2 426 | ): 427 | yield 1 428 | 429 | a = 2 430 | ''' 431 | spacing_test_text_064 = ''' 432 | def function(): 433 | if ( 434 | a == 2 435 | ): 436 | yield 1 437 | a = 2 438 | ''' 439 | spacing_test_text_065 = ''' 440 | def function(): 441 | if a == 2: 442 | yield from ( 443 | 1, 444 | 2, 445 | ) 446 | 447 | a = 2 448 | ''' 449 | spacing_test_text_066 = ''' 450 | def function(): 451 | if a == 2: 452 | yield from ( 453 | 1, 454 | 2, 455 | ) 456 | a = 2 457 | ''' 458 | spacing_test_text_067 = ''' 459 | def function(): 460 | if ( 461 | a == 2 462 | ): 463 | yield from ( 464 | 1, 465 | 2, 466 | ) 467 | 468 | a = 2 469 | ''' 470 | spacing_test_text_068 = ''' 471 | def function(): 472 | if ( 473 | a == 2 474 | ): 475 | yield from ( 476 | 1, 477 | 2, 478 | ) 479 | a = 2 480 | ''' 481 | spacing_test_text_069 = ''' 482 | def function(): 483 | if a == 2: 484 | yield from ( 485 | 1, 486 | 2, 487 | ) 488 | 489 | a = 2 490 | ''' 491 | spacing_test_text_070 = ''' 492 | def function(): 493 | if a == 2: 494 | yield from ( 495 | 1, 496 | 2, 497 | ) 498 | a = 2 499 | ''' 500 | spacing_test_text_071 = ''' 501 | def function(): 502 | if ( 503 | a == 2 504 | ): 505 | yield from ( 506 | 1, 507 | 2, 508 | ) 509 | 510 | a = 2 511 | ''' 512 | spacing_test_text_072 = ''' 513 | def function(): 514 | if ( 515 | a == 2 516 | ): 517 | yield from ( 518 | 1, 519 | 2, 520 | ) 521 | a = 2 522 | ''' 523 | spacing_test_text_073 = ''' 524 | def function(): 525 | if ( 526 | a == 2 527 | ): 528 | yield from ( 529 | 1, 530 | 2, 531 | ) 532 | elif a == 1: 533 | return 3 534 | ''' 535 | spacing_test_text_074 = ''' 536 | def function(): 537 | if ( 538 | a == 2 539 | ): 540 | yield from ( 541 | 1, 542 | 2, 543 | ) 544 | 545 | elif a == 1: 546 | return 3 547 | ''' 548 | spacing_test_text_075 = ''' 549 | def function(): 550 | if ( 551 | a == 2 552 | ): 553 | yield 1 554 | elif a == 1: 555 | return 3 556 | ''' 557 | spacing_test_text_076 = ''' 558 | def function(): 559 | if ( 560 | a == 2 561 | ): 562 | yield 1 563 | 564 | elif a == 1: 565 | return 3 566 | ''' 567 | spacing_test_text_077 = ''' 568 | def function(): 569 | if ( 570 | a == 2 571 | ): 572 | yield from ( 573 | 1, 574 | 2, 575 | ) 576 | elif ( 577 | a == 1 578 | ): 579 | return 3 580 | ''' 581 | spacing_test_text_078 = ''' 582 | def function(): 583 | if ( 584 | a == 2 585 | ): 586 | yield from ( 587 | 1, 588 | 2, 589 | ) 590 | 591 | elif ( 592 | a == 1 593 | ): 594 | return 3 595 | ''' 596 | spacing_test_text_079 = ''' 597 | def function(): 598 | if ( 599 | a == 2 600 | ): 601 | yield 1 602 | elif ( 603 | a == 1 604 | ): 605 | return 3 606 | ''' 607 | spacing_test_text_080 = ''' 608 | def function(): 609 | if ( 610 | a == 2 611 | ): 612 | yield 1 613 | 614 | elif ( 615 | a == 1 616 | ): 617 | return 3 618 | ''' 619 | spacing_test_text_081 = ''' 620 | def function(): 621 | return 3 622 | 623 | 624 | def function_2(): 625 | pass 626 | ''' 627 | spacing_test_text_081 = ''' 628 | def function(): 629 | return 3 630 | 631 | 632 | def function_2(): 633 | pass 634 | ''' 635 | spacing_test_text_082 = ''' 636 | def function(): 637 | if True: 638 | return 639 | 640 | try: 641 | pass 642 | except: 643 | pass 644 | finally: 645 | pass 646 | ''' 647 | spacing_test_text_083 = ''' 648 | def function(): 649 | if True: 650 | return 651 | 652 | try: 653 | pass 654 | except: 655 | pass 656 | finally: 657 | 658 | pass 659 | ''' 660 | spacing_test_text_084 = ''' 661 | if True: 662 | pass 663 | 664 | else: 665 | pass 666 | ''' 667 | spacing_test_text_085 = ''' 668 | try: 669 | pass 670 | 671 | except: 672 | pass 673 | ''' 674 | spacing_test_text_086 = ''' 675 | try: 676 | pass 677 | 678 | finally: 679 | pass 680 | ''' 681 | spacing_test_text_087 = ''' 682 | class A: 683 | pass 684 | ''' 685 | spacing_test_text_088 = ''' 686 | class A: 687 | 688 | pass 689 | ''' 690 | spacing_test_text_089 = ''' 691 | class A( 692 | B, 693 | ): 694 | pass 695 | ''' 696 | spacing_test_text_090 = ''' 697 | class A( 698 | B, 699 | ): 700 | 701 | pass 702 | ''' 703 | spacing_test_text_091 = ''' 704 | class A: 705 | """ 706 | """ 707 | pass 708 | ''' 709 | spacing_test_text_092 = ''' 710 | class A: 711 | 712 | """ 713 | """ 714 | pass 715 | ''' 716 | spacing_test_text_093 = ''' 717 | class A( 718 | B, 719 | ): 720 | """ 721 | """ 722 | pass 723 | ''' 724 | spacing_test_text_094 = ''' 725 | class A( 726 | B, 727 | ): 728 | 729 | """ 730 | """ 731 | pass 732 | ''' 733 | spacing_test_text_095 = ''' 734 | def function(): 735 | \'\'\' 736 | Example: 737 | 738 | test 739 | \'\'\' 740 | return value 741 | ''' 742 | spacing_test_text_096 = ''' 743 | raise Exception( 744 | msg=f'({10})', 745 | ) 746 | 747 | return SomeObject( 748 | param=f'\\'{object.attr}\\'', 749 | ) 750 | ''' 751 | -------------------------------------------------------------------------------- /tests/texts/declerations.py: -------------------------------------------------------------------------------- 1 | declerations_test_text_001 = ''' 2 | list1 = [ 3 | 1, 4 | ] 5 | ''' 6 | declerations_test_text_002 = ''' 7 | list1 = [ 8 | 1, 9 | 2, 10 | ] 11 | ''' 12 | declerations_test_text_003 = ''' 13 | tuple1 = ( 14 | 1, 15 | ) 16 | ''' 17 | declerations_test_text_004 = ''' 18 | tuple1 = ( 19 | 1, 20 | 2, 21 | ) 22 | ''' 23 | declerations_test_text_005 = ''' 24 | set1 = { 25 | 1, 26 | } 27 | ''' 28 | declerations_test_text_006 = ''' 29 | set1 = { 30 | 1, 31 | 2, 32 | } 33 | ''' 34 | declerations_test_text_007 = ''' 35 | dict1 = { 36 | 'key': 1, 37 | } 38 | ''' 39 | declerations_test_text_008 = ''' 40 | dict1 = { 41 | 'key1': 1, 42 | 'key2': 2, 43 | } 44 | ''' 45 | declerations_test_text_009 = ''' 46 | return [ 47 | 1, 48 | ] 49 | ''' 50 | declerations_test_text_010 = ''' 51 | return [ 52 | 1, 53 | 2, 54 | ] 55 | ''' 56 | declerations_test_text_011 = ''' 57 | return ( 58 | 1, 59 | ) 60 | ''' 61 | declerations_test_text_012 = ''' 62 | return ( 63 | 1, 64 | 2, 65 | ) 66 | ''' 67 | declerations_test_text_013 = ''' 68 | return { 69 | 1, 70 | } 71 | ''' 72 | declerations_test_text_014 = ''' 73 | return { 74 | 1, 75 | 2, 76 | } 77 | ''' 78 | declerations_test_text_015 = ''' 79 | return { 80 | 'key': 1, 81 | } 82 | ''' 83 | declerations_test_text_016 = ''' 84 | return { 85 | 'key1': 1, 86 | 'key2': 2, 87 | } 88 | ''' 89 | declerations_test_text_017 = ''' 90 | yield [ 91 | 1, 92 | ] 93 | ''' 94 | declerations_test_text_018 = ''' 95 | yield [ 96 | 1, 97 | 2, 98 | ] 99 | ''' 100 | declerations_test_text_019 = ''' 101 | yield ( 102 | 1, 103 | ) 104 | ''' 105 | declerations_test_text_020 = ''' 106 | yield ( 107 | 1, 108 | 2, 109 | ) 110 | ''' 111 | declerations_test_text_021 = ''' 112 | yield { 113 | 1, 114 | } 115 | ''' 116 | declerations_test_text_022 = ''' 117 | yield { 118 | 1, 119 | 2, 120 | } 121 | ''' 122 | declerations_test_text_023 = ''' 123 | yield { 124 | 'key': 1, 125 | } 126 | ''' 127 | declerations_test_text_024 = ''' 128 | yield { 129 | 'key1': 1, 130 | 'key2': 2, 131 | } 132 | ''' 133 | declerations_test_text_025 = ''' 134 | list1 = [ 135 | [ 136 | 1, 137 | ], 138 | ] 139 | ''' 140 | declerations_test_text_026 = ''' 141 | list1 = [ 142 | [ 143 | 1, 144 | 2, 145 | ], 146 | ] 147 | ''' 148 | declerations_test_text_027 = ''' 149 | tuple1 = ( 150 | ( 151 | 1, 152 | ), 153 | ) 154 | ''' 155 | declerations_test_text_028 = ''' 156 | tuple1 = ( 157 | ( 158 | 1, 159 | 2, 160 | ), 161 | ) 162 | ''' 163 | declerations_test_text_029 = ''' 164 | set1 = { 165 | { 166 | 1, 167 | }, 168 | } 169 | ''' 170 | declerations_test_text_030 = ''' 171 | set1 = { 172 | { 173 | 1, 174 | 2, 175 | }, 176 | } 177 | ''' 178 | declerations_test_text_031 = ''' 179 | dict1 = { 180 | 'key': { 181 | 'key': 1, 182 | }, 183 | } 184 | ''' 185 | declerations_test_text_032 = ''' 186 | dict1 = { 187 | 'key1': { 188 | 'key1': 1, 189 | 'key2': 2, 190 | }, 191 | 'key2': { 192 | 'key1': 1, 193 | 'key2': 2, 194 | }, 195 | } 196 | ''' 197 | declerations_test_text_033 = ''' 198 | return [ 199 | [ 200 | 1, 201 | ], 202 | ] 203 | ''' 204 | declerations_test_text_034 = ''' 205 | return [ 206 | [ 207 | 1, 208 | 2, 209 | ], 210 | ] 211 | ''' 212 | declerations_test_text_035 = ''' 213 | return ( 214 | ( 215 | 1, 216 | ), 217 | ) 218 | ''' 219 | declerations_test_text_036 = ''' 220 | return ( 221 | ( 222 | 1, 223 | 2, 224 | ), 225 | ) 226 | ''' 227 | declerations_test_text_037 = ''' 228 | return { 229 | { 230 | 1, 231 | }, 232 | } 233 | ''' 234 | declerations_test_text_038 = ''' 235 | return { 236 | { 237 | 1, 238 | 2, 239 | }, 240 | } 241 | ''' 242 | declerations_test_text_039 = ''' 243 | return { 244 | 'key': { 245 | 'key': 1, 246 | }, 247 | } 248 | ''' 249 | declerations_test_text_040 = ''' 250 | return { 251 | 'key1': { 252 | 'key1': 1, 253 | 'key2': 2, 254 | }, 255 | 'key2': { 256 | 'key1': 1, 257 | 'key2': 2, 258 | }, 259 | } 260 | ''' 261 | declerations_test_text_041 = ''' 262 | yield [ 263 | [ 264 | 1, 265 | ], 266 | ] 267 | ''' 268 | declerations_test_text_042 = ''' 269 | yield [ 270 | [ 271 | 1, 272 | 2, 273 | ], 274 | ] 275 | ''' 276 | declerations_test_text_043 = ''' 277 | yield ( 278 | ( 279 | 1, 280 | ), 281 | ) 282 | ''' 283 | declerations_test_text_044 = ''' 284 | yield ( 285 | ( 286 | 1, 287 | 2, 288 | ), 289 | ) 290 | ''' 291 | declerations_test_text_045 = ''' 292 | yield { 293 | { 294 | 1, 295 | }, 296 | } 297 | ''' 298 | declerations_test_text_046 = ''' 299 | yield { 300 | { 301 | 1, 302 | 2, 303 | }, 304 | } 305 | ''' 306 | declerations_test_text_047 = ''' 307 | yield { 308 | 'key': { 309 | 'key': 1, 310 | }, 311 | } 312 | ''' 313 | declerations_test_text_048 = ''' 314 | yield { 315 | 'key1': { 316 | 'key1': 1, 317 | 'key2': 2, 318 | }, 319 | 'key2': { 320 | 'key1': 1, 321 | 'key2': 2, 322 | }, 323 | } 324 | ''' 325 | declerations_test_text_049 = ''' 326 | list1 = [ 327 | [ 328 | 2, 329 | ], 330 | ] 331 | ''' 332 | declerations_test_text_050 = ''' 333 | list_1 = [ 334 | [ 335 | [ 336 | 2, 337 | ], 338 | ], 339 | ] 340 | ''' 341 | declerations_test_text_051 = ''' 342 | list_1 = [ 343 | ( 344 | 2, 345 | ), 346 | ] 347 | ''' 348 | declerations_test_text_052 = ''' 349 | list_1 = [ 350 | { 351 | 'key1': 'value1', 352 | }, 353 | ] 354 | ''' 355 | declerations_test_text_053 = ''' 356 | list_1 = [ 357 | call( 358 | param1, 359 | ), 360 | ] 361 | ''' 362 | declerations_test_text_054 = ''' 363 | entry_1, entry_2 = call() 364 | ''' 365 | declerations_test_text_055 = ''' 366 | ( 367 | entry_1, 368 | entry_2, 369 | ) = call() 370 | ''' 371 | declerations_test_text_056 = ''' 372 | [ 373 | 1 374 | for a, b in call() 375 | ] 376 | ''' 377 | declerations_test_text_057 = ''' 378 | { 379 | 'key': [ 380 | 'entry_1', 381 | 'entry_2', 382 | ] 383 | } 384 | ''' 385 | declerations_test_text_058 = ''' 386 | list_1 = [instance.attribute] 387 | ''' 388 | declerations_test_text_059 = ''' 389 | list_1 = [1] 390 | ''' 391 | declerations_test_text_060 = ''' 392 | list_1 = [test] 393 | ''' 394 | declerations_test_text_061 = ''' 395 | dict_1 = {} 396 | ''' 397 | declerations_test_text_062 = ''' 398 | list_1 = [term[1]] 399 | ''' 400 | declerations_test_text_063 = ''' 401 | test = { 402 | 'list_of_lists': [ 403 | [], 404 | ], 405 | } 406 | ''' 407 | declerations_test_text_064 = ''' 408 | class ClassName: 409 | pass 410 | ''' 411 | declerations_test_text_065 = ''' 412 | class ClassName( 413 | Class1, 414 | Class2, 415 | ): 416 | pass 417 | ''' 418 | declerations_test_text_066 = ''' 419 | class ClassName(): 420 | pass 421 | ''' 422 | declerations_test_text_067 = ''' 423 | class ClassName(Class1, Class2): 424 | pass 425 | ''' 426 | declerations_test_text_068 = ''' 427 | class ClassName( 428 | Class1, 429 | Class2 430 | ): 431 | pass 432 | ''' 433 | declerations_test_text_069 = ''' 434 | def function_name(): 435 | pass 436 | ''' 437 | declerations_test_text_070 = ''' 438 | def function_name( ): 439 | pass 440 | ''' 441 | declerations_test_text_071 = ''' 442 | def function_name( 443 | ): 444 | pass 445 | ''' 446 | declerations_test_text_072 = ''' 447 | def function_name( 448 | 449 | ): 450 | pass 451 | ''' 452 | declerations_test_text_073 = ''' 453 | def function_name( 454 | arg1, 455 | arg2, 456 | ): 457 | pass 458 | ''' 459 | declerations_test_text_074 = ''' 460 | def function_name( 461 | arg1, 462 | arg2 463 | ): 464 | pass 465 | ''' 466 | declerations_test_text_075 = ''' 467 | def function_name(arg1): 468 | pass 469 | ''' 470 | declerations_test_text_076 = ''' 471 | def function_name( 472 | arg1, arg2, 473 | ): 474 | pass 475 | ''' 476 | declerations_test_text_077 = ''' 477 | def function_name( 478 | arg1, 479 | arg2, 480 | ): 481 | pass 482 | ''' 483 | declerations_test_text_078 = ''' 484 | def function_name( 485 | arg1, 486 | **kwargs 487 | ): 488 | pass 489 | ''' 490 | declerations_test_text_079 = ''' 491 | class Class: 492 | def function_name_two( 493 | self, 494 | arg1, 495 | arg2, 496 | ): 497 | pass 498 | ''' 499 | declerations_test_text_080 = ''' 500 | class Class: 501 | @property 502 | def function_name_one( 503 | self, 504 | ): 505 | pass 506 | ''' 507 | declerations_test_text_081 = ''' 508 | def function_name( 509 | *args, 510 | **kwargs 511 | ): 512 | pass 513 | ''' 514 | declerations_test_text_082 = ''' 515 | class A: 516 | def b(): 517 | class B: 518 | pass 519 | ''' 520 | declerations_test_text_083 = ''' 521 | @decorator( 522 | param=1, 523 | ) 524 | def function_name( 525 | param_one, 526 | param_two, 527 | ): 528 | pass 529 | ''' 530 | declerations_test_text_084 = ''' 531 | class ClassA: 532 | def function_a(): 533 | pass 534 | 535 | class TestServerHandler( 536 | http.server.BaseHTTPRequestHandler, 537 | ): 538 | pass 539 | ''' 540 | declerations_test_text_085 = ''' 541 | def function( 542 | param_a, 543 | param_b=[ 544 | 'test', 545 | ], 546 | ): 547 | pass 548 | ''' 549 | declerations_test_text_086 = ''' 550 | @decorator 551 | class DecoratedClass( 552 | ClassBase, 553 | ): 554 | pass 555 | ''' 556 | declerations_test_text_087 = ''' 557 | class ClassName( 558 | object, 559 | ): 560 | pass 561 | ''' 562 | declerations_test_text_088 = ''' 563 | pixel[x,y] = 10 564 | ''' 565 | declerations_test_text_089 = ''' 566 | @decorator.one 567 | @decorator.two() 568 | class DecoratedClass: 569 | pass 570 | ''' 571 | declerations_test_text_090 = ''' 572 | @staticmethod 573 | def static_method(): 574 | pass 575 | ''' 576 | declerations_test_text_091 = ''' 577 | @decorator1 578 | @decorator2 579 | def static_method( 580 | param1, 581 | param2, 582 | ): 583 | pass 584 | ''' 585 | declerations_test_text_092 = ''' 586 | @decorator1( 587 | param=1, 588 | ) 589 | def method(): 590 | pass 591 | ''' 592 | declerations_test_text_093 = ''' 593 | try: 594 | pass 595 | except Exception: 596 | pass 597 | ''' 598 | declerations_test_text_094 = ''' 599 | try: 600 | pass 601 | except ( 602 | Exception1, 603 | Exception2, 604 | ): 605 | pass 606 | ''' 607 | declerations_test_text_095 = ''' 608 | try: 609 | pass 610 | except Exception as exception: 611 | pass 612 | ''' 613 | declerations_test_text_096 = ''' 614 | try: 615 | pass 616 | except ( 617 | Exception1, 618 | Exception2, 619 | ) as exception: 620 | pass 621 | ''' 622 | declerations_test_text_097 = ''' 623 | try: 624 | pass 625 | except Exception as e: 626 | pass 627 | ''' 628 | declerations_test_text_098 = ''' 629 | try: 630 | pass 631 | except ( 632 | Exception1, 633 | Exception2, 634 | ) as e: 635 | pass 636 | ''' 637 | declerations_test_text_099 = ''' 638 | dict1 = { 639 | 'key_one': 1, 'key_two': 2, 640 | } 641 | ''' 642 | declerations_test_text_100 = ''' 643 | dict1 = { 644 | 'key_one': 1, 645 | 'key_two': 2, 646 | } 647 | ''' 648 | declerations_test_text_101 = ''' 649 | dict1 = { 650 | 'key_one': 1, 651 | 'key_two': 2, 652 | } 653 | ''' 654 | declerations_test_text_102 = ''' 655 | dict1 = { 656 | 'key_one': 657 | 1, 658 | } 659 | ''' 660 | declerations_test_text_103 = ''' 661 | dict_one = { 662 | 'list_comp': [ 663 | { 664 | 'key_one': 'value', 665 | } 666 | for i in range(5) 667 | ], 668 | 'dict_comp': { 669 | 'key_one': i 670 | for i in range(5) 671 | }, 672 | 'set_comp': { 673 | i 674 | for i in range(5) 675 | }, 676 | 'generator_comp': ( 677 | i 678 | for i in range(5) 679 | ), 680 | } 681 | ''' 682 | declerations_test_text_104 = ''' 683 | dict_one = { 684 | 'text_key': 'value', 685 | f'formatted_text_key': 'value', 686 | name_key: 'value', 687 | 1: 'value', 688 | dictionary['name']: 'value', 689 | object.attribute: 'value', 690 | } 691 | dict_two = { 692 | 'key_text_multiline': \'\'\' 693 | text 694 | \'\'\', 695 | 1: 'text', 696 | function( 697 | param=1, 698 | ): 'text', 699 | 'text'.format( 700 | param=1, 701 | ): 'text', 702 | 'long_text': ( 703 | 'first line' 704 | 'second line' 705 | ), 706 | **other_dict, 707 | } 708 | ''' 709 | declerations_test_text_105 = ''' 710 | async def function( 711 | param1, 712 | 713 | ): 714 | pass 715 | ''' 716 | declerations_test_text_106 = ''' 717 | def no_args_function(): 718 | pass 719 | def no_args_function() : 720 | pass 721 | def no_args_function (): 722 | pass 723 | def no_args_function( ): 724 | pass 725 | def no_args_function(): 726 | pass 727 | 728 | def no_args_function() -> None: 729 | pass 730 | def no_args_function() -> None : 731 | pass 732 | def no_args_function () -> None: 733 | pass 734 | def no_args_function( ) -> None: 735 | pass 736 | def no_args_function() -> None: 737 | pass 738 | ''' 739 | declerations_test_text_107 = ''' 740 | class Class: 741 | @decorator( 742 | param=1, 743 | ) 744 | async def function(): 745 | pass 746 | ''' 747 | declerations_test_text_108 = ''' 748 | list_a = [ 749 | \'\'\' 750 | multiline 751 | string 752 | \'\'\', 753 | \'\'\' 754 | multiline 755 | string 756 | \'\'\', 757 | ] 758 | ''' 759 | declerations_test_text_109 = ''' 760 | list_with_empty_tuple = [ 761 | (), 762 | ] 763 | ''' 764 | -------------------------------------------------------------------------------- /tests/test_imports.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | import tokenize 3 | import ast 4 | import io 5 | import unittest 6 | 7 | import flake8_intsights 8 | 9 | from . import texts 10 | 11 | 12 | class ImportsTestCase( 13 | unittest.TestCase, 14 | ): 15 | def get_linting_errors( 16 | self, 17 | source_code, 18 | ): 19 | source_code = textwrap.dedent( 20 | text=source_code.lstrip(), 21 | ) 22 | file_tokens = list( 23 | tokenize.tokenize( 24 | readline=io.BytesIO( 25 | initial_bytes=source_code.encode('utf-8'), 26 | ).readline, 27 | ) 28 | ) 29 | tree = ast.parse(source_code) 30 | filename = 'test_imports' 31 | lines = io.StringIO(source_code).readlines() 32 | 33 | checker_obj = flake8_intsights.checker.Checker( 34 | tree=tree, 35 | filename=filename, 36 | lines=lines, 37 | file_tokens=file_tokens, 38 | ) 39 | checker_obj.checkers = [flake8_intsights.checkers.imports.Checker] 40 | linting_errors = list(checker_obj.run()) 41 | 42 | return linting_errors 43 | 44 | def test_case_1( 45 | self, 46 | ): 47 | self.assertEqual( 48 | first=self.get_linting_errors( 49 | source_code=texts.imports.imports_test_text_001, 50 | ), 51 | second=[], 52 | ) 53 | 54 | def test_case_2( 55 | self, 56 | ): 57 | self.assertEqual( 58 | first=self.get_linting_errors( 59 | source_code=texts.imports.imports_test_text_002, 60 | ), 61 | second=[], 62 | ) 63 | 64 | def test_case_3( 65 | self, 66 | ): 67 | self.assertEqual( 68 | first=self.get_linting_errors( 69 | source_code=texts.imports.imports_test_text_003, 70 | ), 71 | second=[], 72 | ) 73 | 74 | def test_case_4( 75 | self, 76 | ): 77 | self.assertEqual( 78 | first=self.get_linting_errors( 79 | source_code=texts.imports.imports_test_text_004, 80 | ), 81 | second=[], 82 | ) 83 | 84 | def test_case_5( 85 | self, 86 | ): 87 | self.assertEqual( 88 | first=self.get_linting_errors( 89 | source_code=texts.imports.imports_test_text_005, 90 | ), 91 | second=[], 92 | ) 93 | 94 | def test_case_6( 95 | self, 96 | ): 97 | self.assertEqual( 98 | first=self.get_linting_errors( 99 | source_code=texts.imports.imports_test_text_006, 100 | ), 101 | second=[], 102 | ) 103 | 104 | def test_case_7( 105 | self, 106 | ): 107 | self.assertEqual( 108 | first=self.get_linting_errors( 109 | source_code=texts.imports.imports_test_text_007, 110 | ), 111 | second=[], 112 | ) 113 | 114 | def test_case_8( 115 | self, 116 | ): 117 | self.assertEqual( 118 | first=self.get_linting_errors( 119 | source_code=texts.imports.imports_test_text_008, 120 | ), 121 | second=[], 122 | ) 123 | 124 | def test_case_9( 125 | self, 126 | ): 127 | self.assertEqual( 128 | first=self.get_linting_errors( 129 | source_code=texts.imports.imports_test_text_009, 130 | ), 131 | second=[], 132 | ) 133 | 134 | def test_case_10( 135 | self, 136 | ): 137 | self.assertEqual( 138 | first=self.get_linting_errors( 139 | source_code=texts.imports.imports_test_text_010, 140 | ), 141 | second=[ 142 | ( 143 | 1, 144 | 0, 145 | 'I025 never provide a module alias', 146 | 'intsights_checker', 147 | ), 148 | ], 149 | ) 150 | 151 | def test_case_11( 152 | self, 153 | ): 154 | self.assertEqual( 155 | first=self.get_linting_errors( 156 | source_code=texts.imports.imports_test_text_011, 157 | ), 158 | second=[ 159 | ( 160 | 1, 161 | 0, 162 | 'I025 never provide a module alias', 163 | 'intsights_checker', 164 | ), 165 | ], 166 | ) 167 | 168 | def test_case_12( 169 | self, 170 | ): 171 | self.assertEqual( 172 | first=self.get_linting_errors( 173 | source_code=texts.imports.imports_test_text_012, 174 | ), 175 | second=[ 176 | ( 177 | 1, 178 | 0, 179 | 'I021 only one import module per line', 180 | 'intsights_checker', 181 | ), 182 | ], 183 | ) 184 | 185 | def test_case_13( 186 | self, 187 | ): 188 | self.assertEqual( 189 | first=self.get_linting_errors( 190 | source_code=texts.imports.imports_test_text_013, 191 | ), 192 | second=[ 193 | ( 194 | 1, 195 | 0, 196 | 'I021 only one import module per line', 197 | 'intsights_checker', 198 | ), 199 | ], 200 | ) 201 | 202 | def test_case_14( 203 | self, 204 | ): 205 | self.assertEqual( 206 | first=self.get_linting_errors( 207 | source_code=texts.imports.imports_test_text_014, 208 | ), 209 | second=[ 210 | ( 211 | 1, 212 | 0, 213 | 'I023 never from import a module name, just relative from imports', 214 | 'intsights_checker', 215 | ), 216 | ], 217 | ) 218 | 219 | def test_case_15( 220 | self, 221 | ): 222 | self.assertEqual( 223 | first=self.get_linting_errors( 224 | source_code=texts.imports.imports_test_text_015, 225 | ), 226 | second=[ 227 | ( 228 | 1, 229 | 0, 230 | 'I023 never from import a module name, just relative from imports', 231 | 'intsights_checker', 232 | ), 233 | ( 234 | 1, 235 | 0, 236 | 'I025 never provide a module alias', 237 | 'intsights_checker', 238 | ), 239 | ], 240 | ) 241 | 242 | def test_case_16( 243 | self, 244 | ): 245 | self.assertEqual( 246 | first=self.get_linting_errors( 247 | source_code=texts.imports.imports_test_text_016, 248 | ), 249 | second=[ 250 | ( 251 | 1, 252 | 0, 253 | 'I022 never import * from module', 254 | 'intsights_checker', 255 | ), 256 | ( 257 | 1, 258 | 0, 259 | 'I023 never from import a module name, just relative from imports', 260 | 'intsights_checker', 261 | ), 262 | ], 263 | ) 264 | 265 | def test_case_17( 266 | self, 267 | ): 268 | self.assertEqual( 269 | first=self.get_linting_errors( 270 | source_code=texts.imports.imports_test_text_017, 271 | ), 272 | second=[ 273 | ( 274 | 1, 275 | 0, 276 | 'I021 only one import module per line', 277 | 'intsights_checker', 278 | ), 279 | ( 280 | 1, 281 | 0, 282 | 'I023 never from import a module name, just relative from imports', 283 | 'intsights_checker', 284 | ), 285 | ], 286 | ) 287 | 288 | def test_case_18( 289 | self, 290 | ): 291 | self.assertEqual( 292 | first=self.get_linting_errors( 293 | source_code=texts.imports.imports_test_text_018, 294 | ), 295 | second=[ 296 | ( 297 | 1, 298 | 0, 299 | 'I023 never from import a module name, just relative from imports', 300 | 'intsights_checker', 301 | ), 302 | ( 303 | 1, 304 | 0, 305 | 'I024 never relative import from a module, import the whole module instead', 306 | 'intsights_checker', 307 | ), 308 | ], 309 | ) 310 | 311 | def test_case_19( 312 | self, 313 | ): 314 | self.assertEqual( 315 | first=self.get_linting_errors( 316 | source_code=texts.imports.imports_test_text_019, 317 | ), 318 | second=[ 319 | ( 320 | 1, 321 | 0, 322 | 'I021 only one import module per line', 323 | 'intsights_checker', 324 | ), 325 | ( 326 | 1, 327 | 0, 328 | 'I023 never from import a module name, just relative from imports', 329 | 'intsights_checker', 330 | ), 331 | ( 332 | 1, 333 | 0, 334 | 'I024 never relative import from a module, import the whole module instead', 335 | 'intsights_checker', 336 | ), 337 | ], 338 | ) 339 | 340 | def test_case_20( 341 | self, 342 | ): 343 | self.assertEqual( 344 | first=self.get_linting_errors( 345 | source_code=texts.imports.imports_test_text_020, 346 | ), 347 | second=[ 348 | ( 349 | 1, 350 | 0, 351 | 'I023 never from import a module name, just relative from imports', 352 | 'intsights_checker', 353 | ), 354 | ( 355 | 1, 356 | 0, 357 | 'I024 never relative import from a module, import the whole module instead', 358 | 'intsights_checker', 359 | ), 360 | ], 361 | ) 362 | 363 | def test_case_21( 364 | self, 365 | ): 366 | self.assertEqual( 367 | first=self.get_linting_errors( 368 | source_code=texts.imports.imports_test_text_021, 369 | ), 370 | second=[ 371 | ( 372 | 1, 373 | 0, 374 | 'I021 only one import module per line', 375 | 'intsights_checker', 376 | ), 377 | ( 378 | 1, 379 | 0, 380 | 'I023 never from import a module name, just relative from imports', 381 | 'intsights_checker', 382 | ), 383 | ( 384 | 1, 385 | 0, 386 | 'I024 never relative import from a module, import the whole module instead', 387 | 'intsights_checker', 388 | ), 389 | ], 390 | ) 391 | 392 | def test_case_22( 393 | self, 394 | ): 395 | self.assertEqual( 396 | first=self.get_linting_errors( 397 | source_code=texts.imports.imports_test_text_022, 398 | ), 399 | second=[ 400 | ( 401 | 1, 402 | 0, 403 | 'I025 never provide a module alias', 404 | 'intsights_checker', 405 | ), 406 | ], 407 | ) 408 | 409 | def test_case_23( 410 | self, 411 | ): 412 | self.assertEqual( 413 | first=self.get_linting_errors( 414 | source_code=texts.imports.imports_test_text_023, 415 | ), 416 | second=[ 417 | ( 418 | 1, 419 | 0, 420 | 'I022 never import * from module', 421 | 'intsights_checker', 422 | ), 423 | ( 424 | 1, 425 | 0, 426 | 'I023 never from import a module name, just relative from imports', 427 | 'intsights_checker', 428 | ), 429 | ( 430 | 1, 431 | 0, 432 | 'I024 never relative import from a module, import the whole module instead', 433 | 'intsights_checker', 434 | ), 435 | ], 436 | ) 437 | 438 | def test_case_24( 439 | self, 440 | ): 441 | self.assertEqual( 442 | first=self.get_linting_errors( 443 | source_code=texts.imports.imports_test_text_024, 444 | ), 445 | second=[ 446 | ( 447 | 2, 448 | 0, 449 | 'I027 standard imports and relative imports should be seperated by one newline', 450 | 'intsights_checker', 451 | ), 452 | ], 453 | ) 454 | 455 | def test_case_25( 456 | self, 457 | ): 458 | self.assertEqual( 459 | first=self.get_linting_errors( 460 | source_code=texts.imports.imports_test_text_025, 461 | ), 462 | second=[ 463 | ( 464 | 1, 465 | 0, 466 | 'I026 relative imports must be decleared after non-relative ones', 467 | 'intsights_checker', 468 | ), 469 | ], 470 | ) 471 | 472 | def test_case_26( 473 | self, 474 | ): 475 | self.assertEqual( 476 | first=self.get_linting_errors( 477 | source_code=texts.imports.imports_test_text_026, 478 | ), 479 | second=[], 480 | ) 481 | 482 | def test_case_27( 483 | self, 484 | ): 485 | self.assertEqual( 486 | first=self.get_linting_errors( 487 | source_code=texts.imports.imports_test_text_027, 488 | ), 489 | second=[], 490 | ) 491 | 492 | def test_case_28( 493 | self, 494 | ): 495 | self.assertEqual( 496 | first=self.get_linting_errors( 497 | source_code=texts.imports.imports_test_text_028, 498 | ), 499 | second=[], 500 | ) 501 | 502 | def test_case_29( 503 | self, 504 | ): 505 | self.assertEqual( 506 | first=self.get_linting_errors( 507 | source_code=texts.imports.imports_test_text_029, 508 | ), 509 | second=[ 510 | ( 511 | 1, 512 | 0, 513 | 'I027 last import must be spaced with two lines', 514 | 'intsights_checker', 515 | ), 516 | ], 517 | ) 518 | 519 | def test_case_30( 520 | self, 521 | ): 522 | self.assertEqual( 523 | first=self.get_linting_errors( 524 | source_code=texts.imports.imports_test_text_030, 525 | ), 526 | second=[ 527 | ( 528 | 1, 529 | 0, 530 | 'I027 last import must be spaced with two lines', 531 | 'intsights_checker', 532 | ), 533 | ], 534 | ) 535 | -------------------------------------------------------------------------------- /tests/test_checker.py: -------------------------------------------------------------------------------- 1 | import tokenize 2 | import astroid 3 | import io 4 | import unittest 5 | 6 | import flake8_intsights 7 | 8 | 9 | class CheckerTestCase( 10 | unittest.TestCase, 11 | ): 12 | @classmethod 13 | def setUp( 14 | cls, 15 | ): 16 | cls.checker = flake8_intsights.checkers._checker.BaseChecker() 17 | 18 | def test_number_of_blank_lines_above( 19 | self, 20 | ): 21 | source_code = ( 22 | 'line_1 = 1\n' 23 | 'line_2 = 2\n' 24 | 'line_3 = 3\n' 25 | ) 26 | 27 | parsed_source = astroid.parse(source_code) 28 | lines = source_code.split('\n') 29 | 30 | self.assertEqual( 31 | first=self.checker.number_of_blank_lines_above(lines, parsed_source.body[0]), 32 | second=0, 33 | ) 34 | self.assertEqual( 35 | first=self.checker.number_of_blank_lines_above(lines, parsed_source.body[1]), 36 | second=0, 37 | ) 38 | self.assertEqual( 39 | first=self.checker.number_of_blank_lines_above(lines, parsed_source.body[2]), 40 | second=0, 41 | ) 42 | 43 | source_code = ( 44 | 'line_1 = 1\n\n' 45 | 'line_2 = 2\n\n\n' 46 | 'line_3 = 3\n' 47 | ) 48 | 49 | parsed_source = astroid.parse(source_code) 50 | lines = source_code.split('\n') 51 | 52 | self.assertEqual( 53 | first=self.checker.number_of_blank_lines_above(lines, parsed_source.body[0]), 54 | second=0, 55 | ) 56 | self.assertEqual( 57 | first=self.checker.number_of_blank_lines_above(lines, parsed_source.body[1]), 58 | second=1, 59 | ) 60 | self.assertEqual( 61 | first=self.checker.number_of_blank_lines_above(lines, parsed_source.body[2]), 62 | second=2, 63 | ) 64 | 65 | def test_get_previous_token_from_position( 66 | self, 67 | ): 68 | source_code = ( 69 | 'tuple_1 = (\n' 70 | ' 1,\n' 71 | ' 2,\n' 72 | ')\n' 73 | ) 74 | 75 | tokens = list( 76 | tokenize.tokenize( 77 | readline=io.BytesIO( 78 | initial_bytes=source_code.encode('utf-8'), 79 | ).readline, 80 | ) 81 | ) 82 | start_position_to_token = { 83 | token.start: { 84 | 'token': token, 85 | 'index': index, 86 | } 87 | for index, token in enumerate(tokens, 0) 88 | } 89 | 90 | previous_token = self.checker.get_previous_token_from_position( 91 | lineno=2, 92 | col_offset=3, 93 | tokens=tokens, 94 | start_position_to_token=start_position_to_token, 95 | token_type=tokenize.OP, 96 | token_string='(', 97 | ) 98 | 99 | self.assertEqual( 100 | first=previous_token['token'].start, 101 | second=( 102 | 1, 103 | 10, 104 | ), 105 | ) 106 | 107 | source_code = ( 108 | 'list_1 = [\n' 109 | ' 1,\n' 110 | ' 2,\n' 111 | ']\n' 112 | ) 113 | 114 | tokens = list( 115 | tokenize.tokenize( 116 | readline=io.BytesIO( 117 | initial_bytes=source_code.encode('utf-8'), 118 | ).readline, 119 | ) 120 | ) 121 | start_position_to_token = { 122 | token.start: { 123 | 'token': token, 124 | 'index': index, 125 | } 126 | for index, token in enumerate(tokens, 0) 127 | } 128 | 129 | previous_token = self.checker.get_previous_token_from_position( 130 | lineno=2, 131 | col_offset=3, 132 | tokens=tokens, 133 | start_position_to_token=start_position_to_token, 134 | token_type=tokenize.OP, 135 | token_string='[', 136 | ) 137 | 138 | self.assertEqual( 139 | first=previous_token['token'].start, 140 | second=( 141 | 1, 142 | 9, 143 | ), 144 | ) 145 | 146 | source_code = ( 147 | 'set_1 = {\n' 148 | ' 1,\n' 149 | ' 2,\n' 150 | '}\n' 151 | ) 152 | 153 | tokens = list( 154 | tokenize.tokenize( 155 | readline=io.BytesIO( 156 | initial_bytes=source_code.encode('utf-8'), 157 | ).readline, 158 | ) 159 | ) 160 | start_position_to_token = { 161 | token.start: { 162 | 'token': token, 163 | 'index': index, 164 | } 165 | for index, token in enumerate(tokens, 0) 166 | } 167 | 168 | previous_token = self.checker.get_previous_token_from_position( 169 | lineno=2, 170 | col_offset=3, 171 | tokens=tokens, 172 | start_position_to_token=start_position_to_token, 173 | token_type=tokenize.OP, 174 | token_string='{', 175 | ) 176 | 177 | self.assertEqual( 178 | first=previous_token['token'].start, 179 | second=( 180 | 1, 181 | 8, 182 | ), 183 | ) 184 | 185 | source_code = ( 186 | 'dict_1 = {\n' 187 | ' 1: 1,\n' 188 | ' 2: 2,\n' 189 | '}\n' 190 | ) 191 | 192 | tokens = list( 193 | tokenize.tokenize( 194 | readline=io.BytesIO( 195 | initial_bytes=source_code.encode('utf-8'), 196 | ).readline, 197 | ) 198 | ) 199 | start_position_to_token = { 200 | token.start: { 201 | 'token': token, 202 | 'index': index, 203 | } 204 | for index, token in enumerate(tokens, 0) 205 | } 206 | 207 | previous_token = self.checker.get_previous_token_from_position( 208 | lineno=2, 209 | col_offset=3, 210 | tokens=tokens, 211 | start_position_to_token=start_position_to_token, 212 | token_type=tokenize.OP, 213 | token_string='{', 214 | ) 215 | 216 | self.assertEqual( 217 | first=previous_token['token'].start, 218 | second=( 219 | 1, 220 | 9, 221 | ), 222 | ) 223 | 224 | def test_get_next_token_from_position( 225 | self, 226 | ): 227 | source_code = ( 228 | 'tuple_1 = (\n' 229 | ' 1,\n' 230 | ' 2,\n' 231 | ')\n' 232 | ) 233 | 234 | tokens = list( 235 | tokenize.tokenize( 236 | readline=io.BytesIO( 237 | initial_bytes=source_code.encode('utf-8'), 238 | ).readline, 239 | ) 240 | ) 241 | start_position_to_token = { 242 | token.start: { 243 | 'token': token, 244 | 'index': index, 245 | } 246 | for index, token in enumerate(tokens, 0) 247 | } 248 | 249 | next_token = self.checker.get_next_token_from_position( 250 | lineno=2, 251 | col_offset=3, 252 | tokens=tokens, 253 | start_position_to_token=start_position_to_token, 254 | token_type=tokenize.OP, 255 | token_string=')', 256 | ) 257 | 258 | self.assertEqual( 259 | first=next_token['token'].start, 260 | second=( 261 | 4, 262 | 0, 263 | ), 264 | ) 265 | 266 | source_code = ( 267 | 'list_1 = [\n' 268 | ' 1,\n' 269 | ' 2,\n' 270 | ']\n' 271 | ) 272 | 273 | tokens = list( 274 | tokenize.tokenize( 275 | readline=io.BytesIO( 276 | initial_bytes=source_code.encode('utf-8'), 277 | ).readline, 278 | ) 279 | ) 280 | start_position_to_token = { 281 | token.start: { 282 | 'token': token, 283 | 'index': index, 284 | } 285 | for index, token in enumerate(tokens, 0) 286 | } 287 | 288 | next_token = self.checker.get_next_token_from_position( 289 | lineno=2, 290 | col_offset=3, 291 | tokens=tokens, 292 | start_position_to_token=start_position_to_token, 293 | token_type=tokenize.OP, 294 | token_string=']', 295 | ) 296 | 297 | self.assertEqual( 298 | first=next_token['token'].start, 299 | second=( 300 | 4, 301 | 0, 302 | ), 303 | ) 304 | 305 | source_code = ( 306 | 'set_1 = {\n' 307 | ' 1,\n' 308 | ' 2,\n' 309 | '}\n' 310 | ) 311 | 312 | tokens = list( 313 | tokenize.tokenize( 314 | readline=io.BytesIO( 315 | initial_bytes=source_code.encode('utf-8'), 316 | ).readline, 317 | ) 318 | ) 319 | start_position_to_token = { 320 | token.start: { 321 | 'token': token, 322 | 'index': index, 323 | } 324 | for index, token in enumerate(tokens, 0) 325 | } 326 | 327 | next_token = self.checker.get_next_token_from_position( 328 | lineno=2, 329 | col_offset=3, 330 | tokens=tokens, 331 | start_position_to_token=start_position_to_token, 332 | token_type=tokenize.OP, 333 | token_string='}', 334 | ) 335 | 336 | self.assertEqual( 337 | first=next_token['token'].start, 338 | second=( 339 | 4, 340 | 0, 341 | ), 342 | ) 343 | 344 | source_code = ( 345 | 'dict_1 = {\n' 346 | ' 1: 1,\n' 347 | ' 2: 2,\n' 348 | '}\n' 349 | ) 350 | 351 | tokens = list( 352 | tokenize.tokenize( 353 | readline=io.BytesIO( 354 | initial_bytes=source_code.encode('utf-8'), 355 | ).readline, 356 | ) 357 | ) 358 | start_position_to_token = { 359 | token.start: { 360 | 'token': token, 361 | 'index': index, 362 | } 363 | for index, token in enumerate(tokens, 0) 364 | } 365 | 366 | next_token = self.checker.get_next_token_from_position( 367 | lineno=2, 368 | col_offset=3, 369 | tokens=tokens, 370 | start_position_to_token=start_position_to_token, 371 | token_type=tokenize.OP, 372 | token_string='}', 373 | ) 374 | 375 | self.assertEqual( 376 | first=next_token['token'].start, 377 | second=( 378 | 4, 379 | 0, 380 | ), 381 | ) 382 | 383 | def test_get_node_opener_token( 384 | self, 385 | ): 386 | source_code = ( 387 | 'tuple_1 = (\n' 388 | ' 1,\n' 389 | ' 2,\n' 390 | ')\n' 391 | ) 392 | 393 | parsed_source = astroid.parse(source_code) 394 | tokens = list( 395 | tokenize.tokenize( 396 | readline=io.BytesIO( 397 | initial_bytes=source_code.encode('utf-8'), 398 | ).readline, 399 | ) 400 | ) 401 | start_position_to_token = { 402 | token.start: { 403 | 'token': token, 404 | 'index': index, 405 | } 406 | for index, token in enumerate(tokens, 0) 407 | } 408 | 409 | node_opener_token = self.checker.get_node_opener_token( 410 | node=parsed_source.body[0].value, 411 | tokens=tokens, 412 | start_position_to_token=start_position_to_token, 413 | ) 414 | 415 | self.assertEqual( 416 | first=node_opener_token['token'].start, 417 | second=( 418 | 1, 419 | 10, 420 | ), 421 | ) 422 | 423 | source_code = ( 424 | 'list_1 = [\n' 425 | ' 1,\n' 426 | ' 2,\n' 427 | ']\n' 428 | ) 429 | 430 | parsed_source = astroid.parse(source_code) 431 | tokens = list( 432 | tokenize.tokenize( 433 | readline=io.BytesIO( 434 | initial_bytes=source_code.encode('utf-8'), 435 | ).readline, 436 | ) 437 | ) 438 | start_position_to_token = { 439 | token.start: { 440 | 'token': token, 441 | 'index': index, 442 | } 443 | for index, token in enumerate(tokens, 0) 444 | } 445 | 446 | node_opener_token = self.checker.get_node_opener_token( 447 | node=parsed_source.body[0].value, 448 | tokens=tokens, 449 | start_position_to_token=start_position_to_token, 450 | ) 451 | 452 | self.assertEqual( 453 | first=node_opener_token['token'].start, 454 | second=( 455 | 1, 456 | 9, 457 | ), 458 | ) 459 | 460 | source_code = ( 461 | 'set_1 = {\n' 462 | ' 1,\n' 463 | ' 2,\n' 464 | '}\n' 465 | ) 466 | 467 | parsed_source = astroid.parse(source_code) 468 | tokens = list( 469 | tokenize.tokenize( 470 | readline=io.BytesIO( 471 | initial_bytes=source_code.encode('utf-8'), 472 | ).readline, 473 | ) 474 | ) 475 | start_position_to_token = { 476 | token.start: { 477 | 'token': token, 478 | 'index': index, 479 | } 480 | for index, token in enumerate(tokens, 0) 481 | } 482 | 483 | node_opener_token = self.checker.get_node_opener_token( 484 | node=parsed_source.body[0].value, 485 | tokens=tokens, 486 | start_position_to_token=start_position_to_token, 487 | ) 488 | 489 | self.assertEqual( 490 | first=node_opener_token['token'].start, 491 | second=( 492 | 1, 493 | 8, 494 | ), 495 | ) 496 | 497 | source_code = ( 498 | 'dict_1 = {\n' 499 | ' 1: 1,\n' 500 | ' 2: 2,\n' 501 | '}\n' 502 | ) 503 | 504 | parsed_source = astroid.parse(source_code) 505 | tokens = list( 506 | tokenize.tokenize( 507 | readline=io.BytesIO( 508 | initial_bytes=source_code.encode('utf-8'), 509 | ).readline, 510 | ) 511 | ) 512 | start_position_to_token = { 513 | token.start: { 514 | 'token': token, 515 | 'index': index, 516 | } 517 | for index, token in enumerate(tokens, 0) 518 | } 519 | 520 | node_opener_token = self.checker.get_node_opener_token( 521 | node=parsed_source.body[0].value, 522 | tokens=tokens, 523 | start_position_to_token=start_position_to_token, 524 | ) 525 | 526 | self.assertEqual( 527 | first=node_opener_token['token'].start, 528 | second=( 529 | 1, 530 | 9, 531 | ), 532 | ) 533 | 534 | source_code = ( 535 | 'call(\n' 536 | ' 1,\n' 537 | ' b=2,\n' 538 | ')\n' 539 | ) 540 | 541 | parsed_source = astroid.parse(source_code) 542 | tokens = list( 543 | tokenize.tokenize( 544 | readline=io.BytesIO( 545 | initial_bytes=source_code.encode('utf-8'), 546 | ).readline, 547 | ) 548 | ) 549 | start_position_to_token = { 550 | token.start: { 551 | 'token': token, 552 | 'index': index, 553 | } 554 | for index, token in enumerate(tokens, 0) 555 | } 556 | 557 | node_opener_token = self.checker.get_node_opener_token( 558 | node=parsed_source.body[0].value, 559 | tokens=tokens, 560 | start_position_to_token=start_position_to_token, 561 | ) 562 | 563 | self.assertEqual( 564 | first=node_opener_token['token'].start, 565 | second=( 566 | 1, 567 | 4, 568 | ), 569 | ) 570 | 571 | def test_get_node_closer_token( 572 | self, 573 | ): 574 | source_code = ( 575 | 'tuple_1 = (\n' 576 | ' 1,\n' 577 | ' 2,\n' 578 | ')\n' 579 | ) 580 | 581 | parsed_source = astroid.parse(source_code) 582 | tokens = list( 583 | tokenize.tokenize( 584 | readline=io.BytesIO( 585 | initial_bytes=source_code.encode('utf-8'), 586 | ).readline, 587 | ) 588 | ) 589 | start_position_to_token = { 590 | token.start: { 591 | 'token': token, 592 | 'index': index, 593 | } 594 | for index, token in enumerate(tokens, 0) 595 | } 596 | 597 | node_closer_token = self.checker.get_node_closer_token( 598 | node=parsed_source.body[0].value, 599 | tokens=tokens, 600 | start_position_to_token=start_position_to_token, 601 | ) 602 | 603 | self.assertEqual( 604 | first=node_closer_token.start, 605 | second=( 606 | 4, 607 | 0, 608 | ), 609 | ) 610 | 611 | source_code = ( 612 | 'list_1 = [\n' 613 | ' 1,\n' 614 | ' 2,\n' 615 | ']\n' 616 | ) 617 | 618 | parsed_source = astroid.parse(source_code) 619 | tokens = list( 620 | tokenize.tokenize( 621 | readline=io.BytesIO( 622 | initial_bytes=source_code.encode('utf-8'), 623 | ).readline, 624 | ) 625 | ) 626 | start_position_to_token = { 627 | token.start: { 628 | 'token': token, 629 | 'index': index, 630 | } 631 | for index, token in enumerate(tokens, 0) 632 | } 633 | 634 | node_closer_token = self.checker.get_node_closer_token( 635 | node=parsed_source.body[0].value, 636 | tokens=tokens, 637 | start_position_to_token=start_position_to_token, 638 | ) 639 | 640 | self.assertEqual( 641 | first=node_closer_token.start, 642 | second=( 643 | 4, 644 | 0, 645 | ), 646 | ) 647 | 648 | source_code = ( 649 | 'set_1 = {\n' 650 | ' 1,\n' 651 | ' 2,\n' 652 | '}\n' 653 | ) 654 | 655 | parsed_source = astroid.parse(source_code) 656 | tokens = list( 657 | tokenize.tokenize( 658 | readline=io.BytesIO( 659 | initial_bytes=source_code.encode('utf-8'), 660 | ).readline, 661 | ) 662 | ) 663 | start_position_to_token = { 664 | token.start: { 665 | 'token': token, 666 | 'index': index, 667 | } 668 | for index, token in enumerate(tokens, 0) 669 | } 670 | 671 | node_closer_token = self.checker.get_node_closer_token( 672 | node=parsed_source.body[0].value, 673 | tokens=tokens, 674 | start_position_to_token=start_position_to_token, 675 | ) 676 | 677 | self.assertEqual( 678 | first=node_closer_token.start, 679 | second=( 680 | 4, 681 | 0, 682 | ), 683 | ) 684 | 685 | source_code = ( 686 | 'dict_1 = {\n' 687 | ' 1: 1,\n' 688 | ' 2: 2,\n' 689 | '}\n' 690 | ) 691 | 692 | parsed_source = astroid.parse(source_code) 693 | tokens = list( 694 | tokenize.tokenize( 695 | readline=io.BytesIO( 696 | initial_bytes=source_code.encode('utf-8'), 697 | ).readline, 698 | ) 699 | ) 700 | start_position_to_token = { 701 | token.start: { 702 | 'token': token, 703 | 'index': index, 704 | } 705 | for index, token in enumerate(tokens, 0) 706 | } 707 | 708 | node_closer_token = self.checker.get_node_closer_token( 709 | node=parsed_source.body[0].value, 710 | tokens=tokens, 711 | start_position_to_token=start_position_to_token, 712 | ) 713 | 714 | self.assertEqual( 715 | first=node_closer_token.start, 716 | second=( 717 | 4, 718 | 0, 719 | ), 720 | ) 721 | 722 | source_code = ( 723 | 'call(\n' 724 | ' 1,\n' 725 | ' b=2,\n' 726 | ')\n' 727 | ) 728 | 729 | parsed_source = astroid.parse(source_code) 730 | tokens = list( 731 | tokenize.tokenize( 732 | readline=io.BytesIO( 733 | initial_bytes=source_code.encode('utf-8'), 734 | ).readline, 735 | ) 736 | ) 737 | start_position_to_token = { 738 | token.start: { 739 | 'token': token, 740 | 'index': index, 741 | } 742 | for index, token in enumerate(tokens, 0) 743 | } 744 | 745 | node_closer_token = self.checker.get_node_closer_token( 746 | node=parsed_source.body[0].value, 747 | tokens=tokens, 748 | start_position_to_token=start_position_to_token, 749 | ) 750 | 751 | self.assertEqual( 752 | first=node_closer_token.start, 753 | second=( 754 | 4, 755 | 0, 756 | ), 757 | ) 758 | --------------------------------------------------------------------------------