├── .editorconfig ├── .git-blame-ignore-revs ├── .github ├── actions │ └── setup │ │ └── action.yml └── workflows │ ├── publish-docs.yml │ └── tox.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .pre-commit-hooks.yaml ├── CHANGELOG.md ├── LICENSE.txt ├── Makefile ├── README.md ├── bin ├── bnf_to_cnf │ ├── bnf_to_cnf │ │ ├── __init__.py │ │ ├── driver.py │ │ ├── functools.py │ │ ├── node.py │ │ ├── parser.py │ │ ├── translator.py │ │ └── validate.py │ ├── poetry.lock │ ├── pyproject.toml │ └── tests │ │ ├── test_node.py │ │ ├── test_parse.py │ │ ├── test_translate.py │ │ ├── test_validation.py │ │ └── utils.py ├── doc_extract │ ├── doc_extract │ │ ├── __init__.py │ │ ├── driver.py │ │ ├── extract.py │ │ └── repository.py │ ├── mypy.ini │ ├── poetry.lock │ ├── pyproject.toml │ └── static │ │ ├── .gitignore │ │ ├── Makefile │ │ ├── elm-package.json │ │ ├── elm.json │ │ ├── index.html │ │ ├── src │ │ └── Main.elm │ │ └── styles.css └── sphinx_polyversion │ ├── pyproject.toml │ └── sphinx_polyversion.py ├── darglint2 ├── __init__.py ├── __main__.py ├── analysis │ ├── __init__.py │ ├── abstract_callable_visitor.py │ ├── analysis_helpers.py │ ├── analysis_visitor.py │ ├── argument_visitor.py │ ├── assert_visitor.py │ ├── function_and_method_visitor.py │ ├── function_scoped_visitor.py │ ├── raise_visitor.py │ ├── return_visitor.py │ ├── variable_visitor.py │ └── yield_visitor.py ├── config.py ├── custom_assert.py ├── docstring │ ├── __init__.py │ ├── base.py │ ├── docstring.py │ ├── google.py │ ├── numpy.py │ ├── sections.py │ ├── sphinx.py │ └── style.py ├── driver.py ├── error_report.py ├── errors.py ├── flake8_entry.py ├── function_description.py ├── integrity_checker.py ├── lex.py ├── node.py ├── parse │ ├── __init__.py │ ├── combinator.py │ ├── cyk.py │ ├── google.py │ ├── grammar.py │ ├── grammars │ │ ├── Makefile │ │ ├── __init__.py │ │ ├── google_arguments_section.bnf │ │ ├── google_arguments_section.py │ │ ├── google_base.bnf │ │ ├── google_long_description.bnf │ │ ├── google_long_description.py │ │ ├── google_raises_section.bnf │ │ ├── google_raises_section.py │ │ ├── google_returns_section.bnf │ │ ├── google_returns_section.py │ │ ├── google_returns_section_without_type.bnf │ │ ├── google_returns_section_without_type.py │ │ ├── google_short_description.bnf │ │ ├── google_short_description.py │ │ ├── google_types.bnf │ │ ├── google_yields_section.bnf │ │ ├── google_yields_section.py │ │ ├── google_yields_section_without_type.bnf │ │ ├── google_yields_section_without_type.py │ │ ├── noqa_statement.bnf │ │ ├── numpy.bnf │ │ ├── numpy_arguments_section.bnf │ │ ├── numpy_arguments_section.py │ │ ├── numpy_long_description.bnf │ │ ├── numpy_long_description.py │ │ ├── numpy_other_arguments_section.bnf │ │ ├── numpy_other_arguments_section.py │ │ ├── numpy_raises_section.bnf │ │ ├── numpy_raises_section.py │ │ ├── numpy_receives_section.bnf │ │ ├── numpy_receives_section.py │ │ ├── numpy_returns_section.bnf │ │ ├── numpy_returns_section.py │ │ ├── numpy_short_description.bnf │ │ ├── numpy_short_description.py │ │ ├── numpy_warns_section.bnf │ │ ├── numpy_warns_section.py │ │ ├── numpy_yields_section.bnf │ │ ├── numpy_yields_section.py │ │ ├── sphinx_argument_type_section.bnf │ │ ├── sphinx_argument_type_section.py │ │ ├── sphinx_arguments_section.bnf │ │ ├── sphinx_arguments_section.py │ │ ├── sphinx_base.bnf │ │ ├── sphinx_base.py │ │ ├── sphinx_long_description.bnf │ │ ├── sphinx_long_description.py │ │ ├── sphinx_raises_section.bnf │ │ ├── sphinx_raises_section.py │ │ ├── sphinx_return_type_section.bnf │ │ ├── sphinx_return_type_section.py │ │ ├── sphinx_returns_section.bnf │ │ ├── sphinx_returns_section.py │ │ ├── sphinx_short_description.bnf │ │ ├── sphinx_short_description.py │ │ ├── sphinx_variable_type_section.bnf │ │ ├── sphinx_variable_type_section.py │ │ ├── sphinx_variables_section.bnf │ │ ├── sphinx_variables_section.py │ │ ├── sphinx_yield_type_section.bnf │ │ ├── sphinx_yield_type_section.py │ │ ├── sphinx_yields_section.bnf │ │ ├── sphinx_yields_section.py │ │ └── terminals.bnf │ ├── identifiers.py │ ├── long_description.py │ ├── numpy.py │ └── sphinx.py ├── peaker.py ├── strictness.py ├── token.py └── utils.py ├── docker-build ├── Dockerfile.test36 ├── Dockerfile.test37 ├── Dockerfile.test38 ├── Dockerfile.test39 └── Makefile ├── docs ├── Makefile ├── make.bat ├── poster.pdf ├── poster.tex └── source │ ├── .gitignore │ ├── _polyversion │ ├── static │ │ └── .nojekyll │ └── templates │ │ └── index.html │ ├── _static │ └── css │ │ ├── ext-links.css │ │ └── version-selector.css │ ├── _templates │ ├── components │ │ └── edit-this-page.html │ ├── sidebar │ │ └── brand.html │ └── versioning.html │ ├── changelog.rst │ ├── conf.py │ ├── index.rst │ ├── license.rst │ └── readme.rst ├── integration_tests ├── __init__.py ├── analysis_tests.py ├── compatibility.py ├── end_to_end.py ├── files │ ├── example-ascii.py │ ├── example-latin1.py │ ├── example-utf8.py │ ├── google_example.py │ ├── missing_arg_type.py │ ├── numpy_example.py │ ├── problematic.py │ ├── sphinx_example.py │ ├── strictness_example.py │ └── two_spaces.py ├── goldens.py ├── grammar_size.py ├── max_golden_profile.py ├── performance.py ├── sources.py └── test_flake8.py ├── poetry.lock ├── pyproject.toml ├── tests ├── .pydocstyle.ini ├── __init__.py ├── conftest.py ├── sphinx_docstrings.py ├── test_abstract_callable_visitor.py ├── test_analysis_visitor.py ├── test_argument_visitor.py ├── test_assert_visitor.py ├── test_config.py ├── test_custom_assert.py ├── test_cyk.py ├── test_docstring.py ├── test_error.py ├── test_error_report.py ├── test_function_and_method_visitor.py ├── test_function_description.py ├── test_function_scoped_visitor.py ├── test_generated_grammar.py ├── test_grammar.py ├── test_identifiers.py ├── test_integrity_checker.py ├── test_lex.py ├── test_long_description_parse.py ├── test_new_google_parser.py ├── test_node.py ├── test_numpy_parser.py ├── test_parser.py ├── test_parser_combinator.py ├── test_peaker.py ├── test_raise_visitor.py ├── test_returns_visitor.py ├── test_sphinx_parser.py ├── test_variable_visitor.py ├── test_yield_visitor.py └── utils.py └── tox.ini /.editorconfig: -------------------------------------------------------------------------------- 1 | # https://editorconfig.org 2 | 3 | root = true 4 | 5 | [*] 6 | charset = utf-8 7 | indent_style = space 8 | indent_size = 4 9 | end_of_line = lf 10 | insert_final_newline = true 11 | trim_trailing_whitespace = true 12 | 13 | [Makefile] 14 | indent_style = tab 15 | 16 | [*.{yml,yaml}] 17 | indent_style = space 18 | indent_size = 2 19 | -------------------------------------------------------------------------------- /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Run pre-commit formatters on repository. 2 | 0477eebb9ea39f886c092321ded777c8a1937d90 3 | 4 | # Convert type comments to type annotations. 5 | 012995361d0c7608e50db9421e2df2afc8d9b2ab 6 | -------------------------------------------------------------------------------- /.github/actions/setup/action.yml: -------------------------------------------------------------------------------- 1 | name: Setup 2 | description: Sets up python and poetry 3 | 4 | # note: 5 | # this is a local composite action 6 | # documentation: https://docs.github.com/en/actions/creating-actions/creating-a-composite-action 7 | # code example: https://github.com/GuillaumeFalourd/poc-github-actions/blob/main/.github/actions/local-action/action.yaml 8 | 9 | inputs: 10 | setup-pre-commit: 11 | description: Whether pre-commit shall be setup, too 12 | required: false 13 | default: "false" 14 | install-options: 15 | description: Additional arguments to pass to `poetry install`. 16 | required: false 17 | default: "" 18 | python-version: 19 | description: "The python version to install" 20 | required: false 21 | default: "3.10" 22 | cache-python: 23 | description: Whether to cache poetry venvs 24 | required: false 25 | default: "false" 26 | 27 | runs: 28 | using: "composite" 29 | steps: 30 | # Python + Poetry + Caching 31 | # See https://github.com/actions/setup-python/blob/main/docs/advanced-usage.md#caching- 32 | - name: Install poetry 33 | run: pipx install poetry 34 | shell: bash 35 | 36 | - name: setup python 37 | uses: actions/setup-python@v4 38 | if: inputs.cache-python == 'true' 39 | with: 40 | python-version: ${{ inputs.python-version }} 41 | cache: "poetry" 42 | 43 | - name: setup python 44 | uses: actions/setup-python@v4 45 | if: inputs.cache-python != 'true' 46 | with: 47 | python-version: ${{ inputs.python-version }} 48 | 49 | # list cached envs 50 | - name: Cached envs 51 | shell: bash 52 | run: ls $(poetry config virtualenvs.path) 53 | if: inputs.cache-python == 'true' 54 | 55 | # Install deps 56 | - name: Install project dependencies 57 | run: | 58 | poetry install ${{ inputs.install-options }} 59 | shell: bash 60 | 61 | # Pre-commit 62 | - name: Install pre-commit 63 | if: ${{ inputs.setup-pre-commit == 'true' }} 64 | run: pipx install pre-commit 65 | shell: bash 66 | - name: Set PY variable 67 | if: ${{ inputs.setup-pre-commit == 'true' }} 68 | run: echo "PY=$(python -VV | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV 69 | shell: bash 70 | - name: Configure Caching for Pre-Commit 71 | if: ${{ inputs.setup-pre-commit == 'true' }} 72 | uses: actions/cache@v3 73 | with: 74 | path: ~/.cache/pre-commit 75 | key: pre-commit|${{ env.PY }}|${{ hashFiles('.pre-commit-config.yaml') }} 76 | -------------------------------------------------------------------------------- /.github/workflows/publish-docs.yml: -------------------------------------------------------------------------------- 1 | # Builds multiple versions of the Docs using the polyversion script 2 | name: Publish Docs 3 | 4 | on: 5 | push: 6 | branches: 7 | - "master" 8 | 9 | workflow_dispatch: 10 | inputs: 11 | branch-regex: 12 | description: Python regex matching the branches to build the docs for 13 | type: string 14 | tag-regex: 15 | description: Python regex matching the tags to build the docs for 16 | type: string 17 | 18 | # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages 19 | permissions: 20 | contents: read 21 | pages: write 22 | id-token: write 23 | 24 | # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. 25 | # We may cancel in-progress runs as their results would be overidden any ways. 26 | concurrency: 27 | group: "pages" 28 | cancel-in-progress: true 29 | 30 | jobs: 31 | publish-docs: 32 | runs-on: ubuntu-latest 33 | environment: 34 | name: github-pages 35 | url: ${{ steps.deployment.outputs.page_url }} 36 | steps: 37 | # Checkout current ref to get the current version of the build script 38 | - name: Checkout 39 | uses: actions/checkout@v3 40 | 41 | # Install poetry 42 | - name: Install poetry 43 | run: pipx install poetry 44 | shell: bash 45 | 46 | # Load cache 47 | - name: Determine poetry venv location 48 | run: echo venv_path="$(poetry config virtualenvs.path)" >> $GITHUB_ENV 49 | 50 | - name: Retrieve cache 51 | uses: actions/cache/restore@v3 52 | id: cache-restore 53 | with: 54 | path: ${{ env.venv_path }} 55 | key: "nocache" 56 | restore-keys: publish-docs|poetry| 57 | 58 | # Setup python + poetry 59 | - name: Setup build deps 60 | uses: ./.github/actions/setup 61 | with: 62 | install-options: --only polyversion --sync --no-root 63 | cache-python: false 64 | 65 | # Configure pages provides deployment URL 66 | - name: Setup Pages 67 | uses: actions/configure-pages@v3 68 | 69 | # Prepare 70 | - name: Fetch tags and branches 71 | run: git fetch -t origin 72 | 73 | - name: Retrieve CPU core count 74 | shell: python 75 | continue-on-error: true 76 | run: | 77 | import os 78 | with open(os.getenv('GITHUB_ENV'), 'a') as f: 79 | f.write('cpu_cores=' + str(len(os.sched_getaffinity(0)))) 80 | 81 | # Set default values for inputs that may be provided via workflow_dispatch 82 | - name: Process args 83 | run: | 84 | branch_regex='${{ inputs.branch-regex }}' 85 | tag_regex='${{ inputs.tag-regex }}' 86 | if [ -z "$tag_regex" ]; then tag_regex='v\d+.\d+.\d+'; fi 87 | if [ -z "$branch_regex" ]; then branch_regex='master'; fi 88 | echo branch_regex="$branch_regex" >> "$GITHUB_ENV" 89 | echo tag_regex="$tag_regex" >> "$GITHUB_ENV" 90 | 91 | # Build in a reproducible location (allows caching) 92 | - name: Build using `sphinx_polyversion` 93 | run: > 94 | poetry run sphinx_polyversion 95 | -a -v 96 | docs/source 97 | build 98 | --temp_dir tmp 99 | --poetry-groups docs 100 | --tag_regex "${{ env.tag_regex }}" 101 | --branch_regex "${{ env.branch_regex }}" 102 | 103 | # Upload cache 104 | - name: Hash venv dir 105 | shell: bash 106 | run: | 107 | echo 'cache_key=publish-docs|poetry|'"$(find ${{ env.venv_path }} -type f -exec sha256sum {} \; | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV 108 | 109 | - name: Upload new cache 110 | uses: actions/cache/save@v3 111 | continue-on-error: true 112 | if: steps.cache-restore.outputs.cache-matched-key != env.cache_key 113 | with: 114 | path: ${{ env.venv_path }} 115 | key: ${{ env.cache_key }} 116 | 117 | # Upload built docs 118 | - name: Upload build artifact 119 | uses: actions/upload-pages-artifact@v1 120 | with: 121 | path: "build" 122 | retention-days: 7 123 | 124 | # Deploy uploaded artifact 125 | - name: Deploy to GitHub Pages 126 | id: deployment 127 | uses: actions/deploy-pages@v2 128 | -------------------------------------------------------------------------------- /.github/workflows/tox.yml: -------------------------------------------------------------------------------- 1 | # From: 2 | # https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#running-tests-with-tox 3 | # 4 | # Adapted original `darglint/.travis.yml`, see 5 | # https://github.com/akaihola/darglint2/blob/abc26b768cd7135d848223ba53f68323593c33d5/.travis.yml 6 | 7 | name: Run Pytest using Tox 8 | 9 | on: [push, pull_request] 10 | 11 | jobs: 12 | build: 13 | runs-on: ${{ matrix.os }} 14 | strategy: 15 | matrix: 16 | os: ["ubuntu-latest"] 17 | python: ["3.7", "3.8", "3.9", "3.10", "3.11"] 18 | 19 | steps: 20 | - uses: actions/checkout@v3 21 | - name: Setup Python 22 | uses: actions/setup-python@v4 23 | with: 24 | python-version: ${{ matrix.python }} 25 | - name: Install tox and any other packages 26 | run: pip install pytest tox 27 | - name: Run tox 28 | # Run tox using the version of Python in `PATH` 29 | run: tox -e py 30 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | .static_storage/ 56 | .media/ 57 | local_settings.py 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | _data/ 106 | 107 | .pytest_cache/ 108 | 109 | TODO.md 110 | 111 | # INTEGRATION TESTS 112 | # 113 | # Integration tests require some files which don't properly belong 114 | # in the repository. So, they'll be excluded here. 115 | 116 | # Individual docstrings to parse, and the attributes they're expected 117 | # to have. 118 | integration_tests/goldens.json 119 | 120 | # The largest of the above docstrings. 121 | integration_tests/max_golden.json 122 | 123 | # The cached result of the latest test run, if we want to rerun the 124 | # test just to see the results. 125 | .performance_testrun 126 | .performance_module_testrun 127 | 128 | # When tinkering with performance, this will give an idea of how the 129 | # performance has changed over time. 130 | .performance_history 131 | 132 | # Cloned whole repos to test against. Ideally, these will be quite 133 | # large, so it doesn't make sense to include them. 134 | integration_tests/repos/ 135 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: f71fa2c1f9cf5cb705f73dffe4b21f7c61470ba9 # frozen: v4.4.0 4 | hooks: 5 | # check file system problems 6 | - id: check-case-conflict 7 | - id: check-symlinks 8 | - id: destroyed-symlinks 9 | 10 | # unify whitespace and line ending 11 | - id: trailing-whitespace 12 | args: [--markdown-linebreak-ext=md] 13 | - id: end-of-file-fixer 14 | - id: mixed-line-ending 15 | 16 | # sort requirements.txt files 17 | - id: requirements-txt-fixer 18 | 19 | # check more 20 | - id: check-yaml 21 | - id: check-toml 22 | - id: check-xml 23 | - id: check-executables-have-shebangs 24 | - id: check-merge-conflict 25 | 26 | - repo: https://github.com/python-poetry/poetry 27 | rev: "7fe4678f92dcab48a35169db6567cbb3f5a7e3ef" # frozen: 1.4.0 28 | hooks: 29 | - id: poetry-check 30 | - id: poetry-lock 31 | args: ["--no-update"] 32 | 33 | - repo: https://github.com/floatingpurr/sync_with_poetry 34 | rev: 6fd6194c7bbd629b18a7bcd7a4a38a57f128b451 # 0.4.0 35 | hooks: 36 | - id: sync_with_poetry 37 | files: ^(\.pre-commit-config\.yaml|poetry\.lock)$ 38 | pass_filenames: false 39 | args: 40 | - "poetry.lock" 41 | 42 | - repo: https://github.com/pycqa/isort 43 | rev: dbf82f2dd09ae41d9355bcd7ab69187a19e6bf2f # frozen: 5.12.0 44 | hooks: 45 | - id: isort 46 | name: isort (python) 47 | 48 | - repo: https://github.com/psf/black 49 | rev: b0d1fba7ac3be53c71fb0d3211d911e629f8aecb # frozen: 23.1.0 50 | hooks: 51 | - id: black 52 | 53 | - repo: https://github.com/pre-commit/mirrors-prettier 54 | rev: "6f3cb139ef36133b6f903b97facc57b07cef57c9" # frozen: v3.0.0-alpha.6 55 | hooks: 56 | - id: prettier 57 | exclude: ^docs/source/(_polyversion/|_)templates/ 58 | 59 | - repo: https://github.com/abravalheri/validate-pyproject 60 | rev: cc27459d0f843a9333dd181e7451a8d041591a25 # frozen: v0.12.1 61 | hooks: 62 | - id: validate-pyproject 63 | files: pyproject.toml$ 64 | 65 | # linters: flake8, mypy, pydocstyle,darglint2 66 | - repo: https://github.com/pycqa/flake8 67 | rev: "3.9.2" # frozen: 6.0.0 68 | hooks: 69 | - id: flake8 70 | 71 | - repo: https://github.com/akaihola/darglint2 72 | rev: 7d5a99253ae80435824463c9f36256e870041258 # frozen: v1.8.2 73 | hooks: 74 | - id: darglint2 75 | exclude: ^(docs|tests|integration_tests) 76 | 77 | # configuration for the pre-commit.ci bot 78 | # only relevant when actually using the bot 79 | ci: 80 | autofix_commit_msg: | 81 | [pre-commit.ci] auto fixes from pre-commit hooks 82 | 83 | for more information, see https://pre-commit.ci, https://pre-commit.com and 84 | the `.pre-commit-config.yaml` file in this repository. 85 | 86 | autofix_prs: true # default 87 | autoupdate_branch: "develop" 88 | autoupdate_commit_msg: | 89 | [pre-commit.ci] Autoupdate pre-commit hook versions. 90 | 91 | for more information, see https://pre-commit.ci, https://pre-commit.com and 92 | the `.pre-commit-config.yaml` file in this repository. 93 | 94 | submodules: false # default 95 | -------------------------------------------------------------------------------- /.pre-commit-hooks.yaml: -------------------------------------------------------------------------------- 1 | - id: darglint2 2 | name: darglint2 3 | description: "`darglint2` is a tool for linting python docstrings." 4 | entry: darglint2 5 | language: python 6 | types: [python] 7 | require_serial: true 8 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Terrence Reilly 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | MOCK_VERSION ?= v1.5 2 | .PHONY: docs 3 | docs: 4 | MOCK_VERSION=$(MOCK_VERSION) poetry run sphinx-build -a -v docs/source docs/build 5 | 6 | .PHONY: docs-polyversion 7 | docs-polyversion: 8 | poetry run sphinx_polyversion -a -v docs/source docs/build --poetry-groups docs 9 | 10 | .PHONY: lint 11 | lint: 12 | poetry run pre-commit run --all-files 13 | 14 | .PHONY: test 15 | test: 16 | poetry run tox 17 | 18 | .PHONY: clean 19 | clean: 20 | rm -R -f docs/build 21 | -------------------------------------------------------------------------------- /bin/bnf_to_cnf/bnf_to_cnf/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akaihola/darglint2/72c51e90c187610c52fd5076b4c28e1202ece84f/bin/bnf_to_cnf/bnf_to_cnf/__init__.py -------------------------------------------------------------------------------- /bin/bnf_to_cnf/bnf_to_cnf/driver.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from pathlib import Path 3 | from typing import Dict, Iterator, Optional, Union 4 | 5 | from .node import Node 6 | from .parser import Parser 7 | from .translator import Translator 8 | from .validate import Validator 9 | 10 | parser = argparse.ArgumentParser(description="Convert BNF grammar to CNF") 11 | parser.add_argument( 12 | "file", 13 | type=Path, 14 | help=("The file to read the grammar from."), 15 | ) 16 | parser.add_argument( 17 | "-f", 18 | "--format", 19 | choices=["cyk", "py"], 20 | default="py", 21 | nargs="?", 22 | type=str, 23 | help=( 24 | 'The output format. Can be either "cyk" or "py". "cyk" ' 25 | "outputs the file in CYK format, as a .cyk file. Py " 26 | "generates a grammar which can be read by darglint2." 27 | ), 28 | ) 29 | parser.add_argument( 30 | "-o", "--output", nargs=1, type=str, default=None, help=("The output file.") 31 | ) 32 | 33 | 34 | class Driver(object): 35 | def __init__(self): 36 | self.data: Optional[str] = None 37 | self.parser = Parser() 38 | self.validator = Validator() 39 | self.translator = Translator() 40 | self.tree: Optional[Node] = None 41 | 42 | def read(self, filename: Union[str, Path]) -> "Driver": 43 | with open(filename, "r") as fin: 44 | self.data = fin.read() 45 | return self 46 | 47 | def parse(self) -> "Driver": 48 | self.tree = self.parser.parse(self.data) 49 | return self 50 | 51 | def translate(self) -> "Driver": 52 | self.translator.translate(self.tree) 53 | return self 54 | 55 | def validate(self) -> "Driver": 56 | self.validator.validate(self.tree) 57 | return self 58 | 59 | def write(self, _format: str) -> str: 60 | assert self.tree is not None 61 | if _format == "cyk": 62 | return str(self.tree) 63 | elif _format == "py": 64 | return self.tree.to_python() 65 | else: 66 | raise Exception(f"Unrecognized format type {_format}") 67 | 68 | def get_imports(self) -> Iterator[str]: 69 | assert self.tree is not None 70 | for _import in self.tree.filter(Node.is_import): 71 | assert _import.value is not None 72 | yield _import.value 73 | 74 | def merge(self, driver: "Driver"): 75 | """Merge in the grammar at the given filename with this grammar. 76 | 77 | Args: 78 | driver: Another driver to merge into this one. 79 | 80 | """ 81 | assert self.tree is not None 82 | assert driver.tree is not None 83 | self.tree.merge(driver.tree) 84 | 85 | 86 | def load_script(filepath: Path, cache: Dict[str, Driver] = None): 87 | """Recursively load a script, parsing it and adding dependencies. 88 | 89 | Args: 90 | filepath: The path of the file to open. 91 | cache: A cache to avoid duplicate work. 92 | 93 | Returns: 94 | The fully parsed grammar. 95 | 96 | Raises: 97 | ValueError: If `filepath` is in `cache` already. 98 | 99 | """ 100 | if cache is None: 101 | cache = {} 102 | 103 | filepath = filepath.resolve() 104 | filepath_str = str(filepath) 105 | 106 | if filepath_str in cache: 107 | raise ValueError(f"File {filepath_str} was already imported.") 108 | 109 | driver = Driver().read(filepath).parse() 110 | cache[filepath_str] = driver 111 | directory = filepath.parent 112 | 113 | # We know that merging doesn't introduce new imports, 114 | # so it's safe to immediately merge subgrammars. 115 | for imported_file in driver.get_imports(): 116 | imported_path = (directory / imported_file).resolve() 117 | if str(imported_path) not in cache: 118 | # We skip already imported scripts, to avoid 119 | # having multiple copies of the productions. 120 | subdriver = load_script(imported_path, cache) 121 | driver.merge(subdriver) 122 | 123 | return driver 124 | 125 | 126 | def main(): 127 | args = parser.parse_args() 128 | driver = load_script(args.file) 129 | translated = driver.translate().validate().write(args.format) 130 | 131 | if args.output: 132 | with open(args.output[0], "w") as fout: 133 | fout.write(translated) 134 | else: 135 | print(translated) 136 | 137 | 138 | if __name__ == "__main__": 139 | main() 140 | -------------------------------------------------------------------------------- /bin/bnf_to_cnf/bnf_to_cnf/functools.py: -------------------------------------------------------------------------------- 1 | from typing import Callable, Iterator, TypeVar 2 | 3 | 4 | def exists(it: Iterator) -> bool: 5 | try: 6 | next(it) 7 | except StopIteration: 8 | return False 9 | return True 10 | 11 | 12 | T = TypeVar("T") 13 | 14 | 15 | def and_(*args: Callable[[T], bool]) -> Callable[[T], bool]: 16 | def _inner(x: T) -> bool: 17 | for fn in args: 18 | if not fn(x): 19 | return False 20 | return True 21 | 22 | return _inner 23 | 24 | 25 | def or_(*args: Callable[[T], bool]) -> Callable[[T], bool]: 26 | def _inner(x: T) -> bool: 27 | for fn in args: 28 | if fn(x): 29 | return True 30 | return False 31 | 32 | return _inner 33 | 34 | 35 | def not_(fn: Callable[[T], bool]) -> Callable[[T], bool]: 36 | def _inner(x: T) -> bool: 37 | return not fn(x) 38 | 39 | return _inner 40 | -------------------------------------------------------------------------------- /bin/bnf_to_cnf/bnf_to_cnf/parser.py: -------------------------------------------------------------------------------- 1 | from lark import Lark 2 | 3 | from .node import Node, NodeType 4 | 5 | 6 | class Parser(object): 7 | grammar = r""" 8 | start: grammar 9 | 10 | grammar: imports? external_imports? name? start_expression? production+ 11 | 12 | production: annotations? symbol _OPER expression 13 | _OPER: "::=" 14 | 15 | expression: sequence (_BAR sequence)* 16 | _BAR: "|" 17 | sequence: probability? annotations? (symbol | TERMINAL) (_WHITESPACE (symbol | TERMINAL))* 18 | TERMINAL: "\"" (LETTER | ESCAPED | NUMBER | "_" | "-" | ":")+ "\"" 19 | | "ε" 20 | ESCAPED: "\\" ("." | "," | "*" | "^" | "(" 21 | | ")" | "+" | "-" | "/" | "\"" 22 | | " " | "]" | "[" | "|") 23 | 24 | probability: NUMBER+ 25 | 26 | start_expression: _START symbol 27 | _START: "start:" 28 | 29 | name: _GRAMMAR NAME 30 | NAME: LETTER+ 31 | _GRAMMAR: "Grammar:" 32 | 33 | external_imports: external_import+ 34 | external_import: _FROM FILENAME _IMPORT _LP items _RP 35 | _FROM: "from" 36 | _LP: "(" 37 | _RP: ")" 38 | items: ITEM ","? 39 | | ITEM "," items 40 | ITEM: /\w+/ 41 | 42 | imports: import+ 43 | import: _IMPORT FILENAME 44 | FILENAME: /(\w|\\|\.|-|_)+/ 45 | _IMPORT: "import" 46 | 47 | annotations: annotation+ 48 | annotation: _AT IDENT 49 | _AT: "@" 50 | 51 | symbol: _LB IDENT _RB 52 | _LB: "<" 53 | _RB: ">" 54 | IDENT: LETTER (LETTER | NUMBER | "_" | "-")* 55 | 56 | %import common.LETTER 57 | %import common.NUMBER 58 | 59 | _COMMENT: /#[^\n]*/ 60 | %ignore _COMMENT 61 | 62 | _WHITESPACE: (" " | "\n" | "\t")+ 63 | %ignore _WHITESPACE 64 | """ # noqa: E501 65 | 66 | def __init__(self): 67 | self.delegate = Lark(self.grammar) 68 | 69 | def parse(self, value: str) -> Node: 70 | tree = self.delegate.parse(value) 71 | return Node.from_lark_tree(tree) 72 | 73 | def parse_production(self, value: str) -> Node: 74 | """Parse just an production. 75 | 76 | Args: 77 | value: The string to parse. 78 | 79 | Raises: 80 | Exception: If there is more than a single production in the 81 | value. 82 | 83 | Returns: 84 | A node which is the head of the production (not the grammar.) 85 | 86 | """ 87 | if "\n" in value: 88 | raise Exception( 89 | "There should only be a single product, but " "a newline is present." 90 | ) 91 | grammar = self.parse(value) 92 | if grammar.children[0].node_type == NodeType.PRODUCTION: 93 | production = grammar.children[0] 94 | else: 95 | production = grammar.children[1] 96 | grammar.children = list() 97 | return production 98 | -------------------------------------------------------------------------------- /bin/bnf_to_cnf/bnf_to_cnf/validate.py: -------------------------------------------------------------------------------- 1 | """A utility to convert from Backus-Naur form to Chomsky Normal Form. 2 | 3 | Backus-Naur Form (BNF) is a more natural way to encode the grammars for 4 | docstrings. However, the CYK parsing algorithm (in its simplest form) 5 | Chomsky Normal From (CNF). This utility, intended for use during 6 | development, converts BNF grammars to CNF grammars, using the algorithm 7 | outlined in the Wikipedia page on CNF: 8 | https://en.wikipedia.org/wiki/Chomsky_normal_form. 9 | 10 | The general plan here is to parse the BNF, manipulate the tree, then 11 | produce the CNF of the grammar from the modified tree. 12 | 13 | """ 14 | import re 15 | 16 | from .node import Node, NodeType 17 | 18 | 19 | class ValidationError(Exception): 20 | pass 21 | 22 | 23 | class Validator(object): 24 | """Validates a CNF production. 25 | 26 | For now, because it's simple, this validator operates 27 | on a string representation of the production. In the 28 | future, we should probably move it to validate the tree. 29 | 30 | """ 31 | 32 | def __init__(self, raise_exception: bool = False): 33 | """Create a new Validator. 34 | 35 | Args: 36 | raise_exception: If False, this validation returns 37 | whether the production is valid or not. If true, 38 | then it raises an exception with an explanation of 39 | why. 40 | 41 | """ 42 | self.raise_exception = raise_exception 43 | 44 | def _wrap(self, reason: str) -> bool: 45 | if self.raise_exception: 46 | raise ValidationError(reason) 47 | return False 48 | 49 | def _validate_sequence(self, sequence: Node) -> bool: 50 | assert ( 51 | sequence.node_type == NodeType.SEQUENCE 52 | ), f'"{sequence}" is not a Sequence' 53 | assert len(sequence.children) > 0 54 | if len(sequence.children) == 1: 55 | if sequence.children[0].node_type != NodeType.TERMINAL: 56 | return self._wrap( 57 | f'"{sequence}" has only one node: it should be terminal.' 58 | ) 59 | elif len(sequence.children) == 2: 60 | if not all([x.node_type == NodeType.SYMBOL for x in sequence.children]): 61 | return self._wrap( 62 | f'"{sequence}" has two nodes: they should ' f"be non-terminals." 63 | ) 64 | else: 65 | return self._wrap(f'"{sequence} has more than 3 token on the RHS.') 66 | return True 67 | 68 | def _validate_production(self, production: Node) -> bool: 69 | assert production.node_type == NodeType.PRODUCTION 70 | for sequence in production.children[1].children: 71 | if not self._validate_sequence(sequence): 72 | return False 73 | return True 74 | 75 | def _validate_import(self, _import: Node) -> bool: 76 | assert _import.node_type == NodeType.IMPORT 77 | 78 | # There should be a value, if there is an import. 79 | return _import.value is not None 80 | 81 | def _validate_name(self, name: Node) -> bool: 82 | name_pattern = re.compile(r"\w+") 83 | if name.value is None: 84 | return False 85 | return name_pattern.fullmatch(name.value) is not None 86 | 87 | def validate(self, grammar: Node) -> bool: 88 | """Validate that the given production is in CNF. 89 | 90 | Args: 91 | grammar: The grammar to validate. 92 | 93 | Throws: 94 | ValidationError: If the grammar was not valid 95 | and raise_exception is True. 96 | 97 | Returns: 98 | Whether the grammar is valid or not. 99 | 100 | """ 101 | assert grammar.node_type == NodeType.GRAMMAR 102 | 103 | for _import in grammar.filter(Node.is_import): 104 | if not self._validate_import(_import): 105 | return False 106 | for name in grammar.filter(Node.is_name): 107 | if not self._validate_name(name): 108 | return False 109 | for production in grammar.filter(Node.is_production): 110 | if not self._validate_production(production): 111 | return False 112 | return True 113 | -------------------------------------------------------------------------------- /bin/bnf_to_cnf/poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "lark-parser" 5 | version = "0.7.8" 6 | description = "a modern parsing library" 7 | category = "main" 8 | optional = false 9 | python-versions = "*" 10 | files = [ 11 | {file = "lark-parser-0.7.8.tar.gz", hash = "sha256:26215ebb157e6fb2ee74319aa4445b9f3b7e456e26be215ce19fdaaa901c20a4"}, 12 | ] 13 | 14 | [metadata] 15 | lock-version = "2.0" 16 | python-versions = "^3.7" 17 | content-hash = "0bff61d1f953365d970582fe159520cd3ecc5844d289e27a9c1bcd930c197946" 18 | -------------------------------------------------------------------------------- /bin/bnf_to_cnf/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["poetry-core>=1.0.0"] 3 | build-backend = "poetry.core.masonry.api" 4 | 5 | [tool.poetry] 6 | name = "bnf_to_cnf" 7 | version = "0.1.0" 8 | description = "Converts BNF notation to CNF, as a python class." 9 | authors = ["Terrence Reilly "] 10 | license = "MIT" 11 | repository = "https://github.com/akaihola/darglint2" 12 | keywords = ["grammar", "bnf", "cnf"] 13 | classifiers = [ 14 | "Development Status :: 2 - Pre-Alpha", 15 | "Topic :: Software Development :: Code Generators", 16 | "Topic :: Software Development :: Compilers", 17 | "License :: OSI pproved :: MIT License", 18 | "Programming Language :: Python :: 3.7", 19 | "Intended Audience :: Developers", 20 | ] 21 | 22 | [tool.poetry.scripts] 23 | bnf_to_cnf = "bnf_to_cnf.driver:main" 24 | 25 | [tool.poetry.dependencies] 26 | python = "^3.7" 27 | lark-parser = "^0.7.8" 28 | -------------------------------------------------------------------------------- /bin/bnf_to_cnf/tests/test_validation.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from bnf_to_cnf.parser import Parser 4 | from bnf_to_cnf.validate import Validator 5 | 6 | 7 | class ValidateCnfTests(TestCase): 8 | def setUp(self): 9 | self.parser = Parser() 10 | 11 | def _p(self, grammar): 12 | return self.parser.parse(grammar) 13 | 14 | def test_terminal_production(self): 15 | """Make sure we a terminal production passes.""" 16 | valid_productions = [ 17 | ' ::= ":"', 18 | ' ::= "Args"', 19 | ' ::= "\\"\\"\\""', 20 | # ' ::= "."', 21 | ] 22 | for production in valid_productions: 23 | self.assertTrue( 24 | Validator().validate(self._p(production)), 25 | '"{}" should not be valid'.format(production), 26 | ) 27 | 28 | def test_escaped_characters_okay(self): 29 | """Make sure that special characters are okay if escaped.""" 30 | for c in ", +*()[]|": 31 | self.assertTrue( 32 | Validator().validate(self._p(' ::= "A\\{}B"'.format(c))), 33 | 'Escaping "{}" should allow it.'.format(c), 34 | ) 35 | -------------------------------------------------------------------------------- /bin/bnf_to_cnf/tests/utils.py: -------------------------------------------------------------------------------- 1 | import random 2 | import string 3 | 4 | 5 | def random_string(min_length: int = 1, max_length: int = 20) -> str: 6 | ret = "" 7 | for i in range(random.randint(min_length, max_length)): 8 | ret += random.choice(string.ascii_letters) 9 | return ret 10 | -------------------------------------------------------------------------------- /bin/doc_extract/doc_extract/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akaihola/darglint2/72c51e90c187610c52fd5076b4c28e1202ece84f/bin/doc_extract/doc_extract/__init__.py -------------------------------------------------------------------------------- /bin/doc_extract/doc_extract/driver.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import json 3 | import random 4 | import sys 5 | from typing import Any, Dict, List 6 | 7 | from .extract import extract 8 | from .repository import Repository 9 | 10 | parser = argparse.ArgumentParser(description="Extract docstrings from git repos.") 11 | parser.add_argument( 12 | "-s", 13 | "--source", 14 | nargs="?", 15 | type=str, 16 | default="", 17 | help=("The file to get repositories from. If missing, " "reads from stdin."), 18 | ) 19 | parser.add_argument( 20 | "-o", 21 | "--output", 22 | nargs="?", 23 | default="", 24 | help=( 25 | "The file to place the extracted docstrings into, in " 26 | "JSON format. If missing, prints to stdout." 27 | ), 28 | ) 29 | parser.add_argument( 30 | "-v", "--version", action="store_true", help=("Get the version number.") 31 | ) 32 | parser.add_argument( 33 | "--shuffle", 34 | action="store_true", 35 | help=("Shuffle the resulting docstrings."), 36 | ) 37 | 38 | 39 | version = "0.0.1" 40 | 41 | 42 | class Driver(object): 43 | def __init__( 44 | self, 45 | source: str = "", 46 | output: str = "", 47 | shuffle: bool = False, 48 | ): 49 | self.source = source 50 | self.output = output 51 | self.shuffle = shuffle 52 | 53 | def run(self) -> None: 54 | if self.source: 55 | try: 56 | with open(self.source, "r") as fin: 57 | paths = [x.strip() for x in fin.readlines()] 58 | except Exception as ex: 59 | print(f"Failed to open sourcefile {ex}") 60 | else: 61 | paths = [x.strip() for x in sys.stdin.readlines()] 62 | 63 | contents: List[Dict[str, Any]] = list() 64 | 65 | for path in paths: 66 | try: 67 | repo = Repository(path) 68 | for filename in repo.python_files: 69 | for docstring in extract(repo.python_files[filename]): 70 | if not docstring: 71 | continue 72 | contents.append( 73 | { 74 | "filename": filename, 75 | "docstring": docstring, 76 | "repository": path, 77 | "type": "NUMPY", 78 | "metadata": { 79 | "arguments": [], 80 | "raises": [], 81 | "variables": [], 82 | "sections": [], 83 | "noqas": [], 84 | }, 85 | } 86 | ) 87 | except Exception as ex: 88 | print(f"Unable to read from {path}: {ex}", file=sys.stderr) 89 | 90 | if self.shuffle: 91 | random.shuffle(contents) 92 | 93 | try: 94 | with open(self.output, "w") if self.output else sys.stdout as fout: 95 | json.dump(contents, fout) 96 | except Exception as ex: 97 | print(f"Unable to write to output {ex}") 98 | 99 | 100 | def main(): 101 | args = parser.parse_args() 102 | if args.version: 103 | print(version) 104 | return 105 | try: 106 | Driver( 107 | source=args.source, 108 | output=args.output, 109 | shuffle=args.shuffle, 110 | ).run() 111 | except Exception as ex: 112 | print(f"Encountered error during execution: {ex}") 113 | 114 | 115 | if __name__ == "__main__": 116 | main() 117 | -------------------------------------------------------------------------------- /bin/doc_extract/doc_extract/extract.py: -------------------------------------------------------------------------------- 1 | """Functions for extracting docstrings from a file.""" 2 | 3 | import ast 4 | 5 | from darglint2.function_description import get_function_descriptions 6 | 7 | 8 | def extract(contents: str): 9 | """Extract all docstrings from the given file. 10 | 11 | Args: 12 | contents: The contents of the python file. 13 | 14 | Yields: 15 | The docstrings from the function descriptions. 16 | 17 | """ 18 | tree = ast.parse(contents) 19 | for function_description in get_function_descriptions(tree): 20 | yield function_description.docstring 21 | -------------------------------------------------------------------------------- /bin/doc_extract/doc_extract/repository.py: -------------------------------------------------------------------------------- 1 | """Containts the repository class representing a git repo.""" 2 | 3 | import os 4 | import subprocess 5 | import tempfile 6 | from typing import Dict, Iterator 7 | 8 | 9 | class Repository(object): 10 | """Represents a git repository containing a python project.""" 11 | 12 | def __init__(self, path: str) -> None: 13 | self.files: Dict[str, str] = dict() 14 | try: 15 | with tempfile.TemporaryDirectory() as tempdir: 16 | subprocess.run(["git", "clone", path, tempdir]) 17 | for filename in self._get_files(tempdir): 18 | try: 19 | with open(filename, "r") as fin: 20 | self.files[filename] = fin.read() 21 | except Exception as ex: 22 | print(f"Unable to read from file {filename}: {ex}") 23 | except Exception as ex: 24 | print(f"Unable to create temporary directory: {ex}") 25 | 26 | def _get_files(self, tempdir: str) -> Iterator[str]: 27 | for dirpath, _, filenames in os.walk(tempdir): 28 | for filename in filenames: 29 | if filename.endswith(".py"): 30 | yield os.path.join(dirpath, filename) 31 | 32 | @property 33 | def python_files(self) -> Dict[str, str]: 34 | return self.files 35 | -------------------------------------------------------------------------------- /bin/doc_extract/mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | ignore_missing_imports = True 3 | -------------------------------------------------------------------------------- /bin/doc_extract/poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "darglint2" 5 | version = "1.8.2" 6 | description = "A utility for ensuring docstrings stay up to date with the source code." 7 | category = "main" 8 | optional = false 9 | python-versions = "^3.7" 10 | files = [] 11 | develop = false 12 | 13 | [package.source] 14 | type = "directory" 15 | url = "../.." 16 | 17 | [metadata] 18 | lock-version = "2.0" 19 | python-versions = "^3.7" 20 | content-hash = "5dc1b58b18dbd186d4f1c3c7fa9a4110281b1d16f329c10d668ffc27b44b97e0" 21 | -------------------------------------------------------------------------------- /bin/doc_extract/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["poetry-core>=1.0.0"] 3 | build-backend = "poetry.core.masonry.api" 4 | 5 | [tool.poetry] 6 | name = "doc_extract" 7 | version = "0.0.1" 8 | description = "Extracts docstrings from a repo and allows annotating them." 9 | authors = ["Terrence Reilly "] 10 | license = "MIT" 11 | repository = "https://github.com/akaihola/darglint2" 12 | keywords = ["utils", "documentation"] 13 | include = ["doc_extract", "static"] 14 | classifiers = [ 15 | "Development Status :: 2 - Pre-Alpha", 16 | "License :: OSI pproved :: MIT License", 17 | "Programming Language :: Python :: 3.7", 18 | "Intended Audience :: Developers", 19 | ] 20 | 21 | [tool.poetry.scripts] 22 | doc_extract = "doc_extract.driver:main" 23 | 24 | [tool.poetry.dependencies] 25 | python = "^3.7" 26 | darglint2 = {path = "../.."} 27 | -------------------------------------------------------------------------------- /bin/doc_extract/static/.gitignore: -------------------------------------------------------------------------------- 1 | main.js 2 | elm-stuff/ 3 | -------------------------------------------------------------------------------- /bin/doc_extract/static/Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: all 2 | all: 3 | elm make --debug src/Main.elm --output=main.js 4 | -------------------------------------------------------------------------------- /bin/doc_extract/static/elm-package.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.0.0", 3 | "summary": "helpful summary of your project, less than 80 characters", 4 | "repository": "https://github.com/user/project.git", 5 | "license": "BSD3", 6 | "source-directories": ["."], 7 | "exposed-modules": [], 8 | "dependencies": { 9 | "elm-lang/core": "5.1.1 <= v < 6.0.0", 10 | "elm-lang/html": "2.0.0 <= v < 3.0.0" 11 | }, 12 | "elm-version": "0.18.0 <= v < 0.19.0" 13 | } 14 | -------------------------------------------------------------------------------- /bin/doc_extract/static/elm.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "application", 3 | "source-directories": ["src"], 4 | "elm-version": "0.19.1", 5 | "dependencies": { 6 | "direct": { 7 | "elm/browser": "1.0.1", 8 | "elm/core": "1.0.2", 9 | "elm/file": "1.0.5", 10 | "elm/html": "1.0.0", 11 | "elm/json": "1.1.3" 12 | }, 13 | "indirect": { 14 | "elm/bytes": "1.0.8", 15 | "elm/time": "1.0.0", 16 | "elm/url": "1.0.0", 17 | "elm/virtual-dom": "1.0.2" 18 | } 19 | }, 20 | "test-dependencies": { 21 | "direct": {}, 22 | "indirect": {} 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /bin/doc_extract/static/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 15 | 16 | 17 | 18 |
19 | 20 | 21 | -------------------------------------------------------------------------------- /bin/doc_extract/static/styles.css: -------------------------------------------------------------------------------- 1 | .main-content { 2 | width: 800px; 3 | height: 580px; 4 | 5 | display: flex; 6 | flex-direction: column; 7 | } 8 | 9 | .actions { 10 | display: flex; 11 | flex-direction: row; 12 | justify-content: space-between; 13 | } 14 | 15 | .actions .default-entry { 16 | cursor: pointer; 17 | margin-top: auto; 18 | } 19 | 20 | .docstring-container { 21 | display: flex; 22 | flex-direction: row; 23 | height: 100%; 24 | } 25 | 26 | .docstring-container .docstring { 27 | margin: auto; 28 | display: flex; 29 | flex-direction: column; 30 | flex: auto; 31 | } 32 | 33 | .docstring-container .docstring .word { 34 | margin-right: 0.5em; 35 | -webkit-user-select: none; /* Safari */ 36 | -moz-user-select: none; /* Firefox */ 37 | -ms-user-select: none; /* IE10+/Edge */ 38 | user-select: none; /* Standard */ 39 | } 40 | 41 | .docstring-container .docstring .line { 42 | display: flex; 43 | flex-direction: row; 44 | min-height: 1em; 45 | } 46 | 47 | .docstring-container .docstring .indent { 48 | width: 4em; 49 | height: 1px; 50 | display: inline-block; 51 | } 52 | 53 | .docstring-container .metadata-container { 54 | margin: 20px 0 20px 20px; 55 | } 56 | 57 | .docstring-container .metadata { 58 | display: flex; 59 | flex-direction: column; 60 | justify-content: space-around; 61 | } 62 | 63 | .docstring-container .metadata .dropbox { 64 | display: flex; 65 | flex-direction: column; 66 | box-shadow: inset 0px 0px 1px 1px rgba(0, 0, 0, 0.3); 67 | width: 200px; 68 | min-height: 100px; 69 | margin: 4px; 70 | overflow-y: scroll; 71 | } 72 | 73 | .selected { 74 | background-color: rgba(0, 256, 0, 0.1); 75 | } 76 | 77 | .docstring-container .metadata .dropbox .section-title { 78 | margin-left: auto; 79 | margin-right: auto; 80 | font-style: italic; 81 | cursor: pointer; 82 | } 83 | 84 | .selector { 85 | display: flex; 86 | flex-direction: row; 87 | justify-content: space-around; 88 | margin-top: auto; 89 | } 90 | 91 | .selector .page-number { 92 | display: flex; 93 | flex-direction: row; 94 | } 95 | 96 | .selector input { 97 | -webkit-appearance: none; 98 | -moz-appearance: textfield; 99 | margin: 0; 100 | width: 25px; 101 | } 102 | 103 | .metadata { 104 | display: flex; 105 | flex-direction: column; 106 | } 107 | 108 | .metadatum { 109 | cursor: pointer; 110 | } 111 | -------------------------------------------------------------------------------- /bin/sphinx_polyversion/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["poetry-core>=1.0.0"] 3 | build-backend = "poetry.core.masonry.api" 4 | 5 | [tool.poetry] 6 | name = "sphinx_polyversion" 7 | version = "0.0.1" 8 | description = "Build multiple versions of your sphinx docs into one website" 9 | authors = ["yfprojects "] 10 | license = "MIT" 11 | repository = "https://github.com/akaihola/darglint2" 12 | keywords = ["utils", "documentation", "sphinx"] 13 | classifiers = [ 14 | "Development Status :: 2 - Pre-Alpha", 15 | "License :: OSI pproved :: MIT License", 16 | "Programming Language :: Python :: 3.7", 17 | "Intended Audience :: Developers", 18 | ] 19 | 20 | 21 | [tool.poetry.dependencies] 22 | python = ">=3.7" 23 | jinja2 = "^3" 24 | 25 | [tool.poetry.scripts] 26 | sphinx_polyversion = "sphinx_polyversion:run" 27 | -------------------------------------------------------------------------------- /darglint2/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "1.8.2" 2 | -------------------------------------------------------------------------------- /darglint2/__main__.py: -------------------------------------------------------------------------------- 1 | from .driver import main 2 | 3 | main() 4 | -------------------------------------------------------------------------------- /darglint2/analysis/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akaihola/darglint2/72c51e90c187610c52fd5076b4c28e1202ece84f/darglint2/analysis/__init__.py -------------------------------------------------------------------------------- /darglint2/analysis/abstract_callable_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | 3 | from .analysis_helpers import _has_decorator 4 | 5 | 6 | class AbstractCallableVisitor(ast.NodeVisitor): 7 | def __init__(self, *args, **kwargs): 8 | super().__init__(*args, **kwargs) 9 | 10 | self.is_abstract = None 11 | 12 | def _is_docstring(self, node: ast.AST) -> bool: 13 | return isinstance(node, ast.Expr) and ( 14 | (isinstance(node.value, ast.Constant) and isinstance(node.value.value, str)) 15 | or (isinstance(node.value, ast.Str)) # Python < 3.8 16 | ) 17 | 18 | def _is_ellipsis(self, node: ast.AST) -> bool: 19 | return isinstance(node, ast.Expr) and ( 20 | (isinstance(node.value, ast.Constant) and node.value.value is Ellipsis) 21 | or (isinstance(node.value, ast.Ellipsis)) # Python < 3.8 22 | ) 23 | 24 | def _is_raise_NotImplementedException(self, node: ast.AST) -> bool: 25 | return isinstance(node, ast.Raise) and ( 26 | (isinstance(node.exc, ast.Name) and node.exc.id == "NotImplementedError") 27 | or ( 28 | isinstance(node.exc, ast.Call) 29 | and isinstance(node.exc.func, ast.Name) 30 | and node.exc.func.id == "NotImplementedError" 31 | ) 32 | ) 33 | 34 | def _is_return_NotImplemented(self, node: ast.AST) -> bool: 35 | return ( 36 | isinstance(node, ast.Return) 37 | and isinstance(node.value, ast.Name) 38 | and node.value.id == "NotImplemented" 39 | ) 40 | 41 | def analyze_pure_abstract(self, node: ast.AST) -> bool: 42 | assert isinstance( 43 | node, (ast.FunctionDef, ast.AsyncFunctionDef) 44 | ), "Assuming this analysis is only called on functions" 45 | 46 | if not _has_decorator(node, "abstractmethod"): 47 | return False 48 | 49 | children = len(node.body) 50 | 51 | # maximum docstring and one statement 52 | if children > 2: 53 | return False 54 | 55 | if children == 2: 56 | if not self._is_docstring(node.body[0]): 57 | return False 58 | 59 | statement = node.body[1] 60 | else: 61 | statement = node.body[0] 62 | 63 | if ( 64 | isinstance(statement, ast.Pass) 65 | or self._is_ellipsis(statement) 66 | or self._is_raise_NotImplementedException(statement) 67 | or self._is_return_NotImplemented(statement) 68 | or (children == 1 and self._is_docstring(statement)) 69 | ): 70 | return True 71 | 72 | return False 73 | 74 | def visit_FunctionDef(self, node: ast.FunctionDef) -> ast.AST: 75 | self.is_abstract = self.analyze_pure_abstract(node) 76 | return self.generic_visit(node) 77 | 78 | def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> ast.AST: 79 | self.is_abstract = self.analyze_pure_abstract(node) 80 | return self.generic_visit(node) 81 | -------------------------------------------------------------------------------- /darglint2/analysis/analysis_helpers.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from typing import Iterable, Union 3 | 4 | 5 | def _has_decorator( 6 | function: Union[ast.FunctionDef, ast.AsyncFunctionDef], 7 | decorators: Union[str, Iterable[str]], 8 | ) -> bool: 9 | if isinstance(decorators, str): 10 | decorators = (decorators,) 11 | 12 | for decorator in function.decorator_list: 13 | # Attributes (setters and getters) won't have an id. 14 | if isinstance(decorator, ast.Name) and decorator.id in decorators: 15 | return True 16 | return False 17 | -------------------------------------------------------------------------------- /darglint2/analysis/analysis_visitor.py: -------------------------------------------------------------------------------- 1 | from .abstract_callable_visitor import AbstractCallableVisitor 2 | from .argument_visitor import ArgumentVisitor 3 | from .assert_visitor import AssertVisitor 4 | from .function_scoped_visitor import FunctionScopedVisitorMixin 5 | from .raise_visitor import RaiseVisitor 6 | from .return_visitor import ReturnVisitor 7 | from .variable_visitor import VariableVisitor 8 | from .yield_visitor import YieldVisitor 9 | 10 | 11 | # ATTENTION: FunctionScopedVisitorMixin needs to be first, 12 | # otherwise it is not able to stop descending into wrapped 13 | # functions. 14 | class AnalysisVisitor( 15 | FunctionScopedVisitorMixin, 16 | AbstractCallableVisitor, 17 | RaiseVisitor, 18 | YieldVisitor, 19 | ArgumentVisitor, 20 | VariableVisitor, 21 | ReturnVisitor, 22 | AssertVisitor, 23 | ): 24 | """Finds attributes which should be part of the function signature.""" 25 | 26 | pass 27 | -------------------------------------------------------------------------------- /darglint2/analysis/argument_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from typing import Any, Dict, List 3 | 4 | 5 | class ArgumentVisitor(ast.NodeVisitor): 6 | """Reports which arguments a function contains.""" 7 | 8 | def __init__(self, *args: List[Any], **kwargs: Dict[str, Any]) -> None: 9 | # https://github.com/python/mypy/issues/5887 10 | super(ArgumentVisitor, self).__init__(*args, **kwargs) # type: ignore 11 | 12 | # The arguments found in the function. 13 | self.arguments: List[str] = list() 14 | self.types: List[str] = list() 15 | 16 | def add_arg_by_name(self, name, arg): 17 | self.arguments.append(name) 18 | if arg.annotation is not None and hasattr(arg.annotation, "id"): 19 | self.types.append(arg.annotation.id) 20 | else: 21 | self.types.append(None) 22 | 23 | def visit_arguments(self, node: ast.arguments) -> ast.AST: 24 | if hasattr(node, "posonlyargs"): 25 | for arg in node.posonlyargs: 26 | self.add_arg_by_name(arg.arg, arg) 27 | 28 | for arg in node.args: 29 | self.add_arg_by_name(arg.arg, arg) 30 | 31 | for arg in node.kwonlyargs: 32 | self.add_arg_by_name(arg.arg, arg) 33 | 34 | # Handle single-star arguments. 35 | if node.vararg is not None: 36 | name = "*" + node.vararg.arg 37 | self.add_arg_by_name(name, node.vararg) 38 | 39 | if node.kwarg is not None: 40 | name = "**" + node.kwarg.arg 41 | self.add_arg_by_name(name, node.kwarg) 42 | return self.generic_visit(node) 43 | -------------------------------------------------------------------------------- /darglint2/analysis/assert_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from typing import Any, List 3 | 4 | 5 | class AssertVisitor(ast.NodeVisitor): 6 | def __init__(self, *args: Any, **kwargs: Any) -> None: 7 | # Allow the raise visitor to be used in a mixin. 8 | # TODO: https://github.com/python/mypy/issues/4001 9 | super(AssertVisitor, self).__init__(*args, **kwargs) # type: ignore 10 | 11 | self.asserts: List[ast.Assert] = list() 12 | 13 | def visit_Assert(self, node: ast.Assert) -> ast.AST: 14 | self.asserts.append(node) 15 | return self.generic_visit(node) 16 | -------------------------------------------------------------------------------- /darglint2/analysis/function_and_method_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from typing import List, Set, Union 3 | 4 | from .analysis_helpers import _has_decorator 5 | 6 | 7 | class FunctionAndMethodVisitor(ast.NodeVisitor): 8 | def __init__(self) -> None: 9 | self.callables: Set[Union[ast.FunctionDef, ast.AsyncFunctionDef]] = set() 10 | self._methods: Set[Union[ast.FunctionDef, ast.AsyncFunctionDef]] = set() 11 | self._properties: Set[Union[ast.FunctionDef, ast.AsyncFunctionDef]] = set() 12 | 13 | @property 14 | def functions(self) -> List[Union[ast.FunctionDef, ast.AsyncFunctionDef]]: 15 | return list(self.callables - self._methods - self._properties) 16 | 17 | @property 18 | def methods(self) -> List[Union[ast.FunctionDef, ast.AsyncFunctionDef]]: 19 | return list(self._methods) 20 | 21 | @property 22 | def properties(self) -> List[Union[ast.FunctionDef, ast.AsyncFunctionDef]]: 23 | return list(self._properties) 24 | 25 | def visit_ClassDef(self, node: ast.ClassDef) -> ast.AST: 26 | for item in node.body: 27 | if isinstance(item, ast.FunctionDef) or isinstance( 28 | item, ast.AsyncFunctionDef 29 | ): 30 | if _has_decorator(item, "property"): 31 | self._properties.add(item) 32 | else: 33 | self._methods.add(item) 34 | return self.generic_visit(node) 35 | 36 | def visit_FunctionDef(self, node: ast.FunctionDef) -> ast.AST: 37 | self.callables.add(node) 38 | return self.generic_visit(node) 39 | 40 | def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> ast.AST: 41 | self.callables.add(node) 42 | return self.generic_visit(node) 43 | -------------------------------------------------------------------------------- /darglint2/analysis/function_scoped_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from typing import Any 3 | 4 | 5 | class FunctionScopedVisitorMixin(ast.NodeVisitor): 6 | """A visitor which is scoped to a single function. 7 | 8 | This visitor assumes that its `visit` method is called 9 | on a `ast.FunctionDef`, and it will not examine nested 10 | functions. 11 | 12 | """ 13 | 14 | def __init__(self, *args: Any, **kwargs: Any) -> None: 15 | # TODO: https://github.com/python/mypy/issues/4001 16 | super(FunctionScopedVisitorMixin, self).__init__(*args, **kwargs) # type: ignore # noqa: E501 17 | 18 | # Whether we have passed the initial `FunctionDef` node. 19 | self.in_function = False 20 | 21 | def visit_Lambda(self, node: ast.Lambda) -> ast.AST: 22 | if not self.in_function: 23 | self.in_function = True 24 | return getattr(super(), "visit_Lambda", super().generic_visit)(node) 25 | else: 26 | # Return a synthetic Pass node, to make type checking happy 27 | # (and to not violate the contract.) Since it has no children, 28 | # it will effectively stop the visit. 29 | return ast.Pass() 30 | 31 | def visit_FunctionDef(self, node: ast.FunctionDef) -> ast.AST: 32 | if not self.in_function: 33 | self.in_function = True 34 | return getattr(super(), "visit_FunctionDef", super().generic_visit)(node) 35 | else: 36 | return ast.Pass() 37 | 38 | def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> ast.AST: 39 | if not self.in_function: 40 | self.in_function = True 41 | return getattr(super(), "visit_AsyncFunctionDef", super().generic_visit)( 42 | node 43 | ) 44 | else: 45 | return ast.Pass() 46 | -------------------------------------------------------------------------------- /darglint2/analysis/return_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from typing import Any, List, Optional 3 | 4 | 5 | class ReturnVisitor(ast.NodeVisitor): 6 | """A visitor which checks for *returns* nodes.""" 7 | 8 | def __init__(self, *args: Any, **kwargs: Any) -> None: 9 | # TODO: https://github.com/python/mypy/issues/4001 10 | super(ReturnVisitor, self).__init__(*args, **kwargs) # type: ignore 11 | 12 | # A list of the return nodes encountered. 13 | self.returns: List[Optional[ast.Return]] = list() 14 | self.return_types: List[Optional[ast.AST]] = list() 15 | 16 | def visit_Return(self, node: ast.Return) -> ast.AST: 17 | self.returns.append(node) 18 | return self.generic_visit(node) 19 | -------------------------------------------------------------------------------- /darglint2/analysis/variable_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from typing import Any, List 3 | 4 | 5 | class VariableVisitor(ast.NodeVisitor): 6 | def __init__(self, *args: Any, **kwargs: Any) -> None: 7 | # TODO: https://github.com/python/mypy/issues/4001 8 | super(VariableVisitor, self).__init__(*args, **kwargs) # type: ignore 9 | self.variables: List[ast.Name] = list() 10 | 11 | def visit_Name(self, node: ast.Name) -> ast.AST: 12 | # Only gather names during assignment. Others are unnecessary, 13 | # and could be from a different context. 14 | if hasattr(node, "ctx") and isinstance(node.ctx, ast.Store): 15 | self.variables.append(node) 16 | return self.generic_visit(node) 17 | -------------------------------------------------------------------------------- /darglint2/analysis/yield_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from typing import Any, List, Union 3 | 4 | 5 | class YieldVisitor(ast.NodeVisitor): 6 | """A visitor which checks for *returns* nodes.""" 7 | 8 | def __init__(self, *args: Any, **kwargs: Any) -> None: 9 | # TODO: https://github.com/python/mypy/issues/4001 10 | super(YieldVisitor, self).__init__(*args, **kwargs) # type: ignore 11 | 12 | # A list of the return nodes encountered. 13 | self.yields: List[Union[ast.Yield, ast.YieldFrom]] = list() 14 | 15 | def visit_Yield(self, node: ast.Yield) -> ast.AST: 16 | self.yields.append(node) 17 | return self.generic_visit(node) 18 | 19 | def visit_YieldFrom(self, node: ast.YieldFrom) -> ast.AST: 20 | self.yields.append(node) 21 | return self.generic_visit(node) 22 | -------------------------------------------------------------------------------- /darglint2/custom_assert.py: -------------------------------------------------------------------------------- 1 | """Defines a custom assert function for darglint2.""" 2 | 3 | from typing import Any, Optional 4 | 5 | from .config import AssertStyle, get_config, get_logger 6 | 7 | 8 | def Assert(expr: Any, message: Optional[str]) -> None: 9 | """Asserts that the given expression is true. 10 | 11 | Args: 12 | expr: The expression to evaluate. It will be 13 | interpreted as a boolean. 14 | message: A message describing the expectation of 15 | this assertion, describing the error encountered, 16 | or some other debugging information. 17 | 18 | Raises: 19 | AssertionError: If darglint2 is configured to raise 20 | on failed assertions, otherwise logs. 21 | 22 | """ 23 | if expr: 24 | return 25 | 26 | style = get_config().assert_style 27 | if style == AssertStyle.RAISE: 28 | raise AssertionError(message) 29 | elif style == AssertStyle.LOG: 30 | logger = get_logger() 31 | logger.error(message or "Failed assertion") 32 | -------------------------------------------------------------------------------- /darglint2/docstring/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akaihola/darglint2/72c51e90c187610c52fd5076b4c28e1202ece84f/darglint2/docstring/__init__.py -------------------------------------------------------------------------------- /darglint2/docstring/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Callable, ClassVar, Dict, Iterable, List, Optional, Tuple, Union 3 | 4 | from ..strictness import Strictness 5 | from .sections import Sections 6 | 7 | 8 | class BaseDocstring(ABC): 9 | """The interface for a docstring object which can be used with checkers. 10 | 11 | Unfortunately, slight differences in the parsers result 12 | in kind of inconsistent values for different nodes. (Sometimes 13 | there are blank lines and such.) For that reason, we include some 14 | tests to try to make the output from the below three methods as 15 | consistent as possible. 16 | 17 | """ 18 | 19 | supported_sections: ClassVar[Tuple[Sections, ...]] = tuple(Sections) 20 | 21 | @abstractmethod 22 | def get_section(self, section: Sections) -> Optional[str]: 23 | """Get an entire section of the docstring. 24 | 25 | Args: 26 | section: The section to get. 27 | 28 | Raises: 29 | Exception: If the section type is unsupported. 30 | 31 | Returns: 32 | A string representing the section as a contiguous piece, 33 | or None if the section was not there. 34 | 35 | # noqa: I202 36 | # noqa: I402 37 | 38 | """ 39 | pass 40 | 41 | @abstractmethod 42 | def get_types(self, section: Sections) -> Optional[Union[str, List[Optional[str]]]]: 43 | """Get the type of the section, or of the items in the section. 44 | 45 | Args: 46 | section: The section whose type or types we are getting. 47 | If the section is a returns section, for example, we get 48 | the return type (or none if not specified). If it is an 49 | arguments section, we get the types for each item. 50 | 51 | Raises: 52 | Exception: If the given section does not have a type. 53 | (For example, the short description.) 54 | 55 | Returns: 56 | The type or types of the section, or None if it's not 57 | defined. 58 | 59 | # noqa: I202 60 | # noqa: I402 61 | 62 | """ 63 | pass 64 | 65 | @abstractmethod 66 | def get_items(self, section: Sections) -> Optional[List[str]]: 67 | """Get the item names in the section. 68 | 69 | Args: 70 | section: The section whose item names we are getting. 71 | 72 | Raises: 73 | Exception: If the given section is not composed of 74 | items (e.g. a returns section, or a long description.) 75 | 76 | Returns: 77 | The items in the section (or None of the section is 78 | not defined.) 79 | 80 | # noqa: I202 81 | # noqa: I402 82 | 83 | """ 84 | pass 85 | 86 | @abstractmethod 87 | def get_style_errors(self) -> Iterable[Tuple[Callable, Tuple[int, int]]]: 88 | """Get any style errors annotated on the tree. 89 | 90 | Yields: 91 | Instances of DarglintErrors for style issues. 92 | 93 | # noqa: I302 94 | 95 | """ 96 | pass 97 | 98 | @abstractmethod 99 | def get_noqas(self) -> Dict[str, List[str]]: 100 | pass 101 | 102 | @abstractmethod 103 | def get_line_numbers(self, node_type: str) -> Optional[Tuple[int, int]]: 104 | pass 105 | 106 | @abstractmethod 107 | def get_line_numbers_for_value( 108 | self, node_type: str, value: str 109 | ) -> Optional[Tuple[int, int]]: 110 | pass 111 | 112 | @property 113 | @abstractmethod 114 | def ignore_all(self) -> bool: 115 | pass 116 | 117 | def satisfies_strictness(self, strictness): 118 | # type(Strictness) -> bool 119 | """Return true if the docstring has no more than the min strictness. 120 | 121 | Args: 122 | strictness: The minimum amount of strictness which should 123 | be present in the docstring. 124 | 125 | Returns: 126 | True if there is no more than the minimum amount of strictness. 127 | 128 | """ 129 | sections = { 130 | section for section in self.supported_sections if self.get_section(section) 131 | } 132 | if strictness == Strictness.SHORT_DESCRIPTION: 133 | return sections == {Sections.SHORT_DESCRIPTION} 134 | elif strictness == Strictness.LONG_DESCRIPTION: 135 | return sections in ( 136 | {Sections.SHORT_DESCRIPTION}, 137 | # Shouldn't be possible, but if it is in the future, then 138 | # we should allow this. 139 | {Sections.LONG_DESCRIPTION}, 140 | {Sections.SHORT_DESCRIPTION, Sections.LONG_DESCRIPTION}, 141 | ) 142 | else: 143 | return False 144 | -------------------------------------------------------------------------------- /darglint2/docstring/docstring.py: -------------------------------------------------------------------------------- 1 | from . import google, numpy, sphinx 2 | from .base import BaseDocstring 3 | 4 | 5 | class Docstring(object): 6 | """A factory method for creating docstrings.""" 7 | 8 | @staticmethod 9 | def from_google(root: str) -> BaseDocstring: 10 | return google.Docstring(root) 11 | 12 | @staticmethod 13 | def from_sphinx(root, config: str = None) -> BaseDocstring: 14 | return sphinx.Docstring(root) 15 | 16 | @staticmethod 17 | def from_numpy(root, config: str = None) -> BaseDocstring: 18 | return numpy.Docstring(root) 19 | -------------------------------------------------------------------------------- /darglint2/docstring/sections.py: -------------------------------------------------------------------------------- 1 | import enum 2 | 3 | 4 | class Sections(enum.Enum): 5 | SHORT_DESCRIPTION = 0 6 | LONG_DESCRIPTION = 1 7 | ARGUMENTS_SECTION = 2 8 | RAISES_SECTION = 4 9 | YIELDS_SECTION = 6 10 | RETURNS_SECTION = 8 11 | VARIABLES_SECTION = 10 12 | NOQAS = 13 13 | -------------------------------------------------------------------------------- /darglint2/docstring/style.py: -------------------------------------------------------------------------------- 1 | import enum 2 | 3 | 4 | class DocstringStyle(enum.Enum): 5 | GOOGLE = 0 6 | SPHINX = 1 7 | NUMPY = 2 8 | 9 | @classmethod 10 | def from_string(cls, style): 11 | style = style.lower().strip() 12 | if style == "google": 13 | return cls.GOOGLE 14 | if style == "sphinx": 15 | return cls.SPHINX 16 | if style == "numpy": 17 | return cls.NUMPY 18 | 19 | raise Exception( 20 | 'Unrecognized style "{}". Should be one of {}'.format( 21 | style, [x.name for x in DocstringStyle] 22 | ) 23 | ) 24 | -------------------------------------------------------------------------------- /darglint2/error_report.py: -------------------------------------------------------------------------------- 1 | """The error reporting classes.""" 2 | 3 | import ast # noqa 4 | from collections import OrderedDict 5 | from typing import Dict, Iterator, List, Tuple, Union 6 | 7 | from darglint2.config import get_config # noqa 8 | 9 | from .errors import DarglintError # noqa 10 | from .function_description import get_line_number_from_function 11 | 12 | 13 | class ErrorReport(object): 14 | """Reports the errors for the given run.""" 15 | 16 | def __init__( 17 | self, 18 | errors: List[DarglintError], 19 | filename: str, 20 | verbosity: int = 2, 21 | ) -> None: 22 | """Create a new error report. 23 | 24 | Args: 25 | errors: A list of DarglintError instances. 26 | filename: The name of the file the error came from. 27 | verbosity: A number in the set, {1, 2}, representing low 28 | and high verbosity. 29 | 30 | """ 31 | self.filename = filename 32 | self.verbosity = verbosity 33 | self.errors = errors 34 | self.error_dict = self._group_errors_by_function() 35 | 36 | def _sort(self) -> None: 37 | self.errors.sort(key=lambda x: x.function.lineno) 38 | 39 | def _group_errors_by_function( 40 | self, 41 | ) -> Dict[Union[ast.FunctionDef, ast.AsyncFunctionDef], List[DarglintError]]: 42 | # noqa: E501 43 | """Sort the current errors by function, and put into an OrderedDict. 44 | 45 | Returns: 46 | An ordered dictionary of functions and their errors. 47 | 48 | """ 49 | self._sort() 50 | error_dict: Dict = OrderedDict() 51 | current = None # The current function 52 | for error in self.errors: 53 | if current != error.function: 54 | current = error.function 55 | error_dict[current] = list() 56 | error_dict[current].append(error) 57 | 58 | # Sort all of the errors returned by the function 59 | # alphabetically. 60 | for key in error_dict: 61 | error_dict[key].sort(key=lambda x: x.message() or "") 62 | 63 | # Sort all of the errors returned by the key 64 | # by the line numbers. 65 | for key in error_dict: 66 | error_dict[key].sort(key=lambda x: x.line_numbers or (0, 0)) 67 | 68 | return error_dict 69 | 70 | def _get_error_description(self, error: DarglintError) -> str: 71 | """Get the error description. 72 | 73 | Args: 74 | error: The error to describe. 75 | 76 | Returns: 77 | A string representing the error. 78 | 79 | """ 80 | line_number = get_line_number_from_function(error.function) 81 | if hasattr(error.function, "decorator_list") and error.function.decorator_list: 82 | line_number += len(error.function.decorator_list) 83 | if error.line_numbers: 84 | line_number += error.line_numbers[0] + 1 85 | return get_config().message_template.format( 86 | msg_id=error.error_code, 87 | msg=error.message(verbosity=self.verbosity), 88 | path=self.filename, 89 | obj=error.function.name, 90 | line=line_number, # error.function.lineno, 91 | ) 92 | 93 | def __str__(self) -> str: 94 | """Return a string representation of this error report. 95 | 96 | Returns: 97 | A string representation of this error report. 98 | 99 | """ 100 | if len(self.errors) == 0: 101 | return "" 102 | ret = list() 103 | for function in self.error_dict: 104 | for error in self.error_dict[function]: 105 | ret.append(self._get_error_description(error)) 106 | return "\n".join(ret) 107 | 108 | def flake8_report(self) -> Iterator[Tuple[int, int, str]]: 109 | # line, col, message 110 | for function in self.error_dict: 111 | for error in self.error_dict[function]: 112 | # TODO: Shouldn't get_line_number (here and above) return 113 | # the correct line number? Why do we have to handle decorators 114 | # here? 115 | line_number = get_line_number_from_function(error.function) 116 | if ( 117 | hasattr(error.function, "decorator_list") 118 | and error.function.decorator_list 119 | ): 120 | line_number += len(error.function.decorator_list) 121 | if error.line_numbers: 122 | line_number += error.line_numbers[0] + 1 123 | else: 124 | line_number += 1 125 | # TODO: Do we need verbosity here? 126 | message = "{} {}".format( 127 | error.error_code, 128 | error.message(self.verbosity), 129 | ) 130 | yield (line_number, 0, message) 131 | -------------------------------------------------------------------------------- /darglint2/flake8_entry.py: -------------------------------------------------------------------------------- 1 | """The entry point for flake8.""" 2 | 3 | from typing import Iterator, Tuple 4 | 5 | from . import __version__ 6 | from .config import get_config 7 | from .docstring.style import DocstringStyle 8 | from .function_description import get_function_descriptions 9 | from .integrity_checker import IntegrityChecker 10 | from .strictness import Strictness 11 | 12 | 13 | class DarglintChecker(object): 14 | name = "flake8-darglint2" 15 | version = __version__ 16 | config = get_config() 17 | 18 | def __init__(self, tree, filename): 19 | self.tree = tree 20 | self.filename = filename 21 | self.verbosity = 2 22 | 23 | def run(self) -> Iterator[Tuple[int, int, str, type]]: 24 | if "*" in self.config.ignore: 25 | return 26 | 27 | # Remember the last line number, so that if there is an 28 | # exception raised by Darglint2, we can at least give a decent 29 | # idea of where it was raised. 30 | last_line = 1 31 | try: 32 | functions = get_function_descriptions(self.tree) 33 | checker = IntegrityChecker( 34 | raise_errors=False, 35 | ) 36 | checker.config = self.config 37 | for function in functions: 38 | checker.run_checks(function) 39 | 40 | error_report = checker.get_error_report(self.verbosity, self.filename) 41 | for line, col, msg in error_report.flake8_report(): 42 | last_line = line 43 | yield (line, col, msg, type(self)) 44 | 45 | except Exception as ex: 46 | yield ( 47 | last_line, 48 | 0, 49 | "DAR000: Unexpected exception in darglint2: " + str(ex), 50 | type(self), 51 | ) 52 | 53 | @classmethod 54 | def add_options(cls, option_manager): 55 | defaults = cls.config 56 | 57 | option_manager.add_option( 58 | "--docstring-style", 59 | default=defaults.style.name, 60 | parse_from_config=True, 61 | help="Docstring style to use for Darglint2", 62 | ) 63 | 64 | option_manager.add_option( 65 | "--strictness", 66 | default=defaults.strictness.name, 67 | parse_from_config=True, 68 | help="Strictness level to use for Darglint2", 69 | ) 70 | 71 | option_manager.add_option( 72 | "--darglint2-ignore-regex", 73 | type=str, 74 | parse_from_config=True, 75 | help=( 76 | "Methods/function names matching this regex will be skipped " 77 | "by Darglint2 during analysis." 78 | ), 79 | ) 80 | 81 | @classmethod 82 | def parse_options(cls, options): 83 | cls.config.style = DocstringStyle.from_string(options.docstring_style) 84 | cls.config.strictness = Strictness.from_string(options.strictness) 85 | cls.config.ignore_regex = options.darglint2_ignore_regex 86 | -------------------------------------------------------------------------------- /darglint2/parse/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akaihola/darglint2/72c51e90c187610c52fd5076b4c28e1202ece84f/darglint2/parse/__init__.py -------------------------------------------------------------------------------- /darglint2/parse/combinator.py: -------------------------------------------------------------------------------- 1 | r"""A parser combinator. 2 | 3 | This combinator first parses a docstring into sections 4 | (any block separated by two newlines), then calls individual 5 | CYK parsers for each subsection. The subparsers are called 6 | in order of specificity. 7 | 8 | Although weighting an overall CYK parser could handle an 9 | entire docstring, this should be much more efficient. A CYK 10 | parser is O(n^3). By reducing the docstring into k separate 11 | sections, we can reduce this to 12 | 13 | O(n + k(n / k)^3) 14 | = O(n + k (n^3 / k^3)) 15 | = O(n + (n^3 / k^2)) 16 | 17 | Which is still a member of O(n^3), since k is constant. However, 18 | since our k is around 4 members, this constant divisor will 19 | actually impart a significant improvement, even though it doesn't 20 | change the order. 21 | 22 | Upon inspection, threading did not significantly improve performance. 23 | The probem, it seems, is that some of the sections take much 24 | longer than others, and so threading does nothing to improve 25 | speed. (It actually made it worse.) 26 | 27 | """ 28 | 29 | 30 | def parser_combinator(top, lookup, combinator, tokens): 31 | """Parse the given tokens, combining in the given fashion. 32 | 33 | Args: 34 | top: The top-level parser. Separates the tokens into 35 | sections which can be consumed by the parsers in the 36 | lookup function. 37 | lookup: For a given section from the top-level parser, 38 | returns a list of possible parsers. 39 | combinator: Combines the resultant nodes from parsing 40 | each section from the top-level parser. 41 | tokens: The tokens to be parsed. 42 | 43 | Returns: 44 | The top-level node from the combinator. 45 | 46 | """ 47 | sections = top(tokens) 48 | parsed_sections = list() 49 | for i, section in enumerate(sections): 50 | parsed = None 51 | for parse in lookup(section, i): 52 | parsed = parse(section) 53 | if parsed: 54 | break 55 | if not parsed: 56 | return None 57 | parsed_sections.append(parsed) 58 | return combinator(*parsed_sections) 59 | -------------------------------------------------------------------------------- /darglint2/parse/cyk.py: -------------------------------------------------------------------------------- 1 | """An implementation of the CYK algorithm. 2 | 3 | The CYK algorithm was chosen because the Google 4 | docstring format allows for ambiguous representations, 5 | which CYK can handle without devolving into a terrible 6 | complexity. (It has a worst case of O(n^3). 7 | 8 | There are faster, on average, algorithms, which might 9 | be better suited to the average task of Darglint2. 10 | However, CYK is relatively simple, and is well documented. 11 | (Others, like chart parsing, are much more difficult 12 | to find examples of.) 13 | 14 | This representation was based directly on the wikipedia 15 | article, https://en.wikipedia.org/wiki/CYK_algorithm. 16 | 17 | """ 18 | 19 | from typing import List, Optional 20 | 21 | from ..node import CykNode 22 | from ..token import Token 23 | from .grammar import BaseGrammar 24 | 25 | 26 | def parse(grammar: BaseGrammar, tokens: List[Token]) -> Optional[CykNode]: 27 | if not tokens: 28 | return None 29 | n = len(tokens) 30 | r = len(grammar.productions) 31 | P: List[List[List[Optional[CykNode]]]] = [ 32 | [[None for _ in range(r)] for _ in range(n)] for _ in range(n) 33 | ] 34 | lookup = grammar.get_symbol_lookup() 35 | for s, token in enumerate(tokens): 36 | for v, production in enumerate(grammar.productions): 37 | for rhs in production.rhs: 38 | if len(rhs) > 2: 39 | continue 40 | 41 | # TODO: Cast to a TerminalDerivation? 42 | token_type, weight = rhs # type: ignore 43 | if token.token_type == token_type: 44 | P[0][s][v] = CykNode( 45 | production.lhs, 46 | value=token, 47 | weight=weight, 48 | ) 49 | for l in range(2, n + 1): # noqa: E741 50 | for s in range(n - l + 2): 51 | for p in range(l): 52 | for a, production in enumerate(grammar.productions): 53 | for derivation in production.rhs: 54 | is_terminal_derivation = len(derivation) <= 2 55 | if is_terminal_derivation: 56 | continue 57 | 58 | # TODO: Cast the derivation to a NonTerminalDerivation? 59 | annotations, B, C, weight = derivation # type: ignore 60 | b = lookup[B] 61 | c = lookup[C] 62 | lchild = P[p - 1][s - 1][b] 63 | rchild = P[l - p - 1][s + p - 1][c] 64 | if lchild and rchild: 65 | old = P[l - 1][s - 1][a] 66 | if old and old.weight > weight: 67 | continue 68 | P[l - 1][s - 1][a] = CykNode( 69 | production.lhs, 70 | lchild, 71 | rchild, 72 | annotations=annotations, 73 | weight=weight, 74 | ) 75 | return P[n - 1][0][lookup[grammar.start]] 76 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/Makefile: -------------------------------------------------------------------------------- 1 | 2 | sections = google_arguments_section.py \ 3 | google_long_description.py \ 4 | google_raises_section.py \ 5 | google_returns_section.py \ 6 | google_returns_section_without_type.py \ 7 | google_short_description.py \ 8 | google_yields_section.py \ 9 | google_yields_section_without_type.py \ 10 | sphinx_argument_type_section.py \ 11 | sphinx_arguments_section.py \ 12 | sphinx_base.py \ 13 | sphinx_long_description.py \ 14 | sphinx_raises_section.py \ 15 | sphinx_returns_section.py \ 16 | sphinx_return_type_section.py \ 17 | sphinx_short_description.py \ 18 | sphinx_variable_type_section.py \ 19 | sphinx_variables_section.py \ 20 | sphinx_yields_section.py \ 21 | sphinx_yield_type_section.py \ 22 | numpy_arguments_section.py \ 23 | numpy_long_description.py \ 24 | numpy_other_arguments_section.py \ 25 | numpy_raises_section.py \ 26 | numpy_receives_section.py \ 27 | numpy_returns_section.py \ 28 | numpy_short_description.py \ 29 | numpy_warns_section.py \ 30 | numpy_yields_section.py 31 | 32 | all: $(sections) 33 | 34 | %.py: %.bnf 35 | bnf_to_cnf -o $@ $< 36 | 37 | clean: 38 | rm $(sections) 39 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akaihola/darglint2/72c51e90c187610c52fd5076b4c28e1202ece84f/darglint2/parse/grammars/__init__.py -------------------------------------------------------------------------------- /darglint2/parse/grammars/google_arguments_section.bnf: -------------------------------------------------------------------------------- 1 | import google_base.bnf 2 | import google_types.bnf 3 | import noqa_statement.bnf 4 | 5 | from darglint2.errors import ( 6 | EmptyDescriptionError, 7 | EmptyTypeError, 8 | IndentError, 9 | ) 10 | from darglint2.parse.identifiers import ( 11 | ArgumentIdentifier, 12 | ArgumentItemIdentifier, 13 | ArgumentTypeIdentifier, 14 | ) 15 | 16 | Grammar: ArgumentsGrammar 17 | 18 | start: 19 | 20 | 21 | ::= 22 | 23 | 24 | ::= 25 | | 26 | 27 | 28 | ::= @ArgumentItemIdentifier 29 | 30 | | @EmptyDescriptionError 31 | 32 | 33 | ::= @ArgumentIdentifier 34 | 35 | | 2 @ArgumentIdentifier 36 | @EmptyTypeError 37 | 38 | | @ArgumentIdentifier 39 | @ArgumentTypeIdentifier 40 | 41 | | @ArgumentIdentifier 42 | @ArgumentTypeIdentifier 43 | 44 | | @ArgumentIdentifier 45 | 46 | 47 | # The normal can be indented any number of times, 48 | # but has at least two indents. By 49 | # weighing it more in the grammar, we capture under-indented 50 | # lines. 51 | # 52 | # We also need to have a version with a newline and one without 53 | # a newline, so that we can handle the last line in the arguments 54 | # section as well as all other lines. 55 | 56 | ::= 2 57 | | 2 58 | | 2 59 | | 2 60 | | @IndentError 61 | 62 | | @IndentError 63 | 64 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/google_base.bnf: -------------------------------------------------------------------------------- 1 | import terminals.bnf 2 | import noqa_statement.bnf 3 | 4 | # The base grammar could be different for different docstrings. 5 | # So we import it here. 6 | from darglint2.parse.grammar import ( 7 | BaseGrammar, 8 | P, 9 | ) 10 | 11 | 12 | ::= 13 | | 14 | 15 | 16 | 17 | ::= 18 | | 19 | 20 | 21 | ::= 22 | | 23 | 24 | 25 | ::= 26 | | 27 | 28 | 29 | ::= 30 | | 31 | 32 | # A paragraph allows for arbitrary indentation. No indentation is 33 | # required, however. 34 | 35 | ::= 36 | | 37 | | 38 | | 39 | 40 | 41 | ::= 42 | # Noqa statements, if in a line, should be at the end. 43 | | 44 | # The noqa statement could be on its own. 45 | | 46 | | 47 | 48 | 49 | ::= 50 | | 51 | | 52 | | 53 | 54 | 55 | ::= 56 | 57 | 58 | ::= 59 | 60 | 61 | ::= 62 | | ε 63 | 64 | 65 | ::= 66 | 67 | 68 | ::= 69 | | ε 70 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/google_long_description.bnf: -------------------------------------------------------------------------------- 1 | import google_base.bnf 2 | 3 | Grammar: LongDescriptionGrammar 4 | 5 | start: 6 | 7 | 8 | ::= 9 | | 10 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/google_raises_section.bnf: -------------------------------------------------------------------------------- 1 | import google_base.bnf 2 | import google_types.bnf 3 | 4 | from darglint2.errors import ( 5 | IndentError, 6 | EmptyDescriptionError, 7 | ) 8 | from darglint2.parse.identifiers import ( 9 | ExceptionIdentifier, 10 | ExceptionItemIdentifier, 11 | ) 12 | 13 | Grammar: RaisesGrammar 14 | 15 | start: 16 | 17 | 18 | ::= 19 | 20 | 21 | ::= 22 | | 23 | 24 | 25 | ::= 2 @ExceptionItemIdentifier 26 | 27 | | 2 @ExceptionItemIdentifier 28 | 29 | | @IndentError 30 | @ExceptionItemIdentifier 31 | 32 | | @IndentError 33 | @ExceptionItemIdentifier 34 | 35 | | @IndentError 36 | @ExceptionItemIdentifier 37 | 38 | | @EmptyDescriptionError 39 | 40 | 41 | 42 | ::= @ExceptionIdentifier 43 | 44 | 45 | ::= "TokenType\.WORD" 46 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/google_returns_section.bnf: -------------------------------------------------------------------------------- 1 | import google_base.bnf 2 | import google_types.bnf 3 | 4 | Grammar: ReturnsGrammar 5 | 6 | start: 7 | 8 | 9 | ::= 10 | 11 | 12 | ::= 13 | 14 | 15 | ::= 16 | | 17 | 18 | 19 | ::= 20 | 21 | 22 | ::= 23 | | 24 | 25 | 26 | ::= 27 | | 28 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/google_returns_section_without_type.bnf: -------------------------------------------------------------------------------- 1 | import google_base.bnf 2 | import google_types.bnf 3 | 4 | Grammar: ReturnsWithoutTypeGrammar 5 | 6 | start: 7 | 8 | 9 | ::= 10 | 11 | 12 | ::= 13 | 14 | 15 | ::= 16 | 17 | 18 | ::= 19 | 20 | 21 | ::= 22 | | 23 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/google_short_description.bnf: -------------------------------------------------------------------------------- 1 | import google_base.bnf 2 | 3 | Grammar: ShortDescriptionGrammar 4 | 5 | start: 6 | 7 | 8 | ::= 9 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/google_types.bnf: -------------------------------------------------------------------------------- 1 | import google_base.bnf 2 | import terminals.bnf 3 | 4 | from darglint2.errors import ( 5 | ParameterMalformedError, 6 | ) 7 | 8 | # Since we're not capturing commas, this assumes that a comma is 9 | # present. We'll probably want to add commas to the parsed symbols. 10 | Grammar: TypesGrammar 11 | 12 | start: 13 | 14 | 15 | ::= 2 16 | | 1 17 | 18 | 19 | ::= 20 | 21 | 22 | ::= 23 | 24 | # The type words, but allowing colons. 25 | 26 | ::= 27 | | @ParameterMalformedError 28 | | 29 | | ε 30 | 31 | 32 | ::= 33 | | 34 | | 35 | 36 | 37 | ::= 38 | | 39 | 40 | 41 | ::= 42 | | 43 | | 44 | 45 | 46 | ::= 47 | | 48 | | 49 | 50 | 51 | ::= 52 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/google_yields_section.bnf: -------------------------------------------------------------------------------- 1 | import google_base.bnf 2 | import google_types.bnf 3 | 4 | Grammar: YieldsGrammar 5 | 6 | start: 7 | 8 | 9 | ::= 10 | 11 | 12 | ::= 13 | 14 | 15 | ::= 16 | | 17 | 18 | 19 | ::= 20 | 21 | 22 | ::= 23 | | 24 | 25 | 26 | ::= 27 | | 28 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/google_yields_section_without_type.bnf: -------------------------------------------------------------------------------- 1 | import google_base.bnf 2 | import google_types.bnf 3 | 4 | Grammar: YieldsWithoutTypeGrammar 5 | 6 | start: 7 | 8 | 9 | ::= 10 | 11 | 12 | ::= 13 | 14 | 15 | ::= 16 | 17 | 18 | ::= 19 | 20 | 21 | ::= 22 | | 23 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/noqa_statement.bnf: -------------------------------------------------------------------------------- 1 | import terminals.bnf 2 | 3 | from darglint2.parse.identifiers import ( 4 | NoqaIdentifier, 5 | ) 6 | 7 | 8 | ::= 9 | | ε 10 | 11 | 12 | ::= @NoqaIdentifier 13 | | @NoqaIdentifier 14 | 15 | 16 | ::= 17 | 18 | 19 | ::= 20 | | 21 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/numpy.bnf: -------------------------------------------------------------------------------- 1 | import terminals.bnf 2 | import noqa_statement.bnf 3 | 4 | 5 | 6 | ::= 7 | | 8 | 9 | 10 | ::= 11 | | 12 | | 13 | 14 | 15 | ::= 16 | 17 | 18 | ::= 19 | | 20 | 21 | 22 | ::= 23 | | 24 | 25 | 26 | ::= 27 | | 28 | | 29 | | 30 | 31 | 32 | ::= 33 | 34 | 35 | ::= 36 | | ε 37 | 38 | 39 | ::= 40 | 41 | 42 | ::= 43 | | 44 | | ε 45 | 46 | 47 | ::= 48 | | 49 | | 50 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/numpy_arguments_section.bnf: -------------------------------------------------------------------------------- 1 | import terminals.bnf 2 | import noqa_statement.bnf 3 | import numpy.bnf 4 | 5 | from darglint2.errors import ( 6 | EmptyDescriptionError, 7 | EmptyTypeError, 8 | ) 9 | from darglint2.parse.grammar import ( 10 | BaseGrammar, 11 | P, 12 | ) 13 | from darglint2.parse.identifiers import ( 14 | ArgumentItemIdentifier, 15 | ArgumentTypeIdentifier, 16 | ) 17 | 18 | 19 | Grammar: ArgumentsGrammar 20 | 21 | start: 22 | 23 | 24 | 25 | ::= 26 | | 27 | 28 | 29 | ::=
30 | 31 | 32 | ::= 33 | | 34 | | 35 | | 36 | 37 | 38 | ::= @ArgumentItemIdentifier 39 | 40 | | @ArgumentItemIdentifier 41 | @EmptyTypeError 42 | 43 | | @ArgumentItemIdentifier 44 | @ArgumentTypeIdentifier 45 | 46 | 47 | 48 | ::= @ArgumentItemIdentifier 49 | @EmptyDescriptionError 50 | 51 | | @ArgumentItemIdentifier 52 | @EmptyDescriptionError 53 | @EmptyTypeError 54 | 55 | | @ArgumentItemIdentifier 56 | @ArgumentTypeIdentifier 57 | @EmptyDescriptionError 58 | 59 | 60 | 61 | ::= 62 | | 63 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/numpy_long_description.bnf: -------------------------------------------------------------------------------- 1 | import numpy.bnf 2 | 3 | from darglint2.parse.grammar import ( 4 | BaseGrammar, 5 | P, 6 | ) 7 | 8 | 9 | Grammar: LongDescriptionGrammar 10 | 11 | start: 12 | 13 | 14 | 15 | ::= 16 | | 17 | | 18 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/numpy_other_arguments_section.bnf: -------------------------------------------------------------------------------- 1 | import terminals.bnf 2 | import noqa_statement.bnf 3 | import numpy.bnf 4 | 5 | from darglint2.parse.grammar import ( 6 | BaseGrammar, 7 | P, 8 | ) 9 | from darglint2.parse.identifiers import ( 10 | ArgumentItemIdentifier, 11 | ArgumentTypeIdentifier, 12 | ) 13 | 14 | 15 | Grammar: OtherArgumentsGrammar 16 | 17 | start: 18 | 19 | 20 | ::= 21 | | 22 | 23 | 24 | ::=
25 | 26 | 27 | ::= 28 | | 29 | 30 | 31 | ::= @ArgumentItemIdentifier 32 | 33 | 34 | | @ArgumentItemIdentifier 35 | 36 | 37 | | @ArgumentItemIdentifier 38 | @ArgumentTypeIdentifier 39 | 40 | 41 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/numpy_raises_section.bnf: -------------------------------------------------------------------------------- 1 | import numpy.bnf 2 | import terminals.bnf 3 | import noqa_statement.bnf 4 | 5 | from darglint2.parse.grammar import ( 6 | BaseGrammar, 7 | P, 8 | ) 9 | from darglint2.parse.identifiers import ( 10 | ExceptionItemIdentifier, 11 | ) 12 | from darglint2.errors import ( 13 | EmptyDescriptionError, 14 | EmptyTypeError, 15 | ) 16 | 17 | 18 | Grammar: RaisesGrammar 19 | 20 | start: 21 | 22 | 23 | 24 | ::= 25 | | 26 | 27 | 28 | ::=
29 | 30 | 31 | ::= 32 | | 33 | 34 | 35 | ::= @ExceptionItemIdentifier 36 | 37 | 38 | # FIXME: The symbol raises-item gets elided during translation, 39 | # so the annotations are lost. 40 | | @ExceptionItemIdentifier 41 | @EmptyDescriptionError 42 | 43 | # Although it's not a part of the numpy standard, we allow a 44 | # type for errors. This allows us to capture excess colons 45 | # that could be left by users. In any case, we can raise 46 | # just about anything, so even though the function can't 47 | # annotate the exception type, the docstring could. 48 | | @ExceptionItemIdentifier 49 | @EmptyTypeError 50 | 51 | 52 | | @ExceptionItemIdentifier 53 | @EmptyTypeError 54 | 55 | 56 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/numpy_receives_section.bnf: -------------------------------------------------------------------------------- 1 | import numpy.bnf 2 | import terminals.bnf 3 | 4 | from darglint2.parse.grammar import ( 5 | BaseGrammar, 6 | P, 7 | ) 8 | 9 | 10 | Grammar: ReceivesGrammar 11 | 12 | start: 13 | 14 | 15 | 16 | ::= 17 | | 18 | 19 | 20 | ::=
21 | 22 | 23 | ::= 24 | | 25 | 26 | 27 | ::= 28 | 29 | | 30 | 31 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/numpy_returns_section.bnf: -------------------------------------------------------------------------------- 1 | import numpy.bnf 2 | import terminals.bnf 3 | 4 | from darglint2.parse.grammar import ( 5 | BaseGrammar, 6 | P, 7 | ) 8 | from darglint2.parse.identifiers import ( 9 | ReturnTypeIdentifier, 10 | ) 11 | 12 | 13 | Grammar: ReturnsGrammar 14 | 15 | start: 16 | 17 | 18 | 19 | ::= 20 | | 21 | 22 | 23 | ::=
24 | 25 | # TODO: Change the yields body to match this. 26 | 27 | ::= 28 | | 29 | 30 | 31 | ::= 32 | | 33 | 34 | 35 | ::= @ReturnTypeIdentifier 36 | | @ReturnTypeIdentifier 37 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/numpy_short_description.bnf: -------------------------------------------------------------------------------- 1 | import numpy.bnf 2 | 3 | from darglint2.parse.grammar import ( 4 | BaseGrammar, 5 | P, 6 | ) 7 | 8 | 9 | Grammar: ShortDescriptionGrammar 10 | 11 | start: 12 | 13 | 14 | 15 | ::= 16 | | 17 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/numpy_warns_section.bnf: -------------------------------------------------------------------------------- 1 | import numpy.bnf 2 | import terminals.bnf 3 | 4 | from darglint2.parse.grammar import ( 5 | BaseGrammar, 6 | P, 7 | ) 8 | from darglint2.parse.identifiers import ( 9 | ExceptionItemIdentifier, 10 | ) 11 | 12 | 13 | Grammar: WarnsGrammar 14 | 15 | start: 16 | 17 | 18 | 19 | ::= 20 | | 21 | 22 | 23 | ::=
24 | 25 | 26 | ::= 27 | | 28 | 29 | 30 | ::= @ExceptionItemIdentifier 31 | 32 | 33 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/numpy_yields_section.bnf: -------------------------------------------------------------------------------- 1 | import numpy.bnf 2 | import terminals.bnf 3 | 4 | from darglint2.parse.grammar import ( 5 | BaseGrammar, 6 | P, 7 | ) 8 | from darglint2.parse.identifiers import ( 9 | YieldTypeIdentifier, 10 | ) 11 | 12 | 13 | Grammar: YieldsGrammar 14 | 15 | start: 16 | 17 | 18 | 19 | ::= 20 | | 21 | 22 | 23 | ::=
24 | 25 | 26 | ::= 27 | | 28 | | 29 | 30 | 31 | ::= 32 | | 33 | 34 | 35 | ::= @YieldTypeIdentifier 36 | | @YieldTypeIdentifier 37 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/sphinx_argument_type_section.bnf: -------------------------------------------------------------------------------- 1 | import sphinx_base.bnf 2 | 3 | from darglint2.errors import ( 4 | EmptyDescriptionError, 5 | ) 6 | 7 | Grammar: ArgumentTypeGrammar 8 | 9 | start: 10 | 11 | 12 | ::= 13 | | 14 | 15 | 16 | ::= 17 | 18 | 19 | ::= @EmptyDescriptionError 20 | 21 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/sphinx_arguments_section.bnf: -------------------------------------------------------------------------------- 1 | import sphinx_base.bnf 2 | 3 | from darglint2.errors import ( 4 | EmptyDescriptionError, 5 | ) 6 | 7 | Grammar: ArgumentsGrammar 8 | 9 | start: 10 | 11 | 12 | ::= 13 | # This token gets elided up to the grammar, 14 | # So the children have to "know" than nothing comes after them. 15 | | 16 | 17 | 18 | ::= 19 | | 20 | | 21 | | 22 | 23 | 24 | ::= @EmptyDescriptionError 25 | | @EmptyDescriptionError 26 | | @EmptyDescriptionError 27 | | @EmptyDescriptionError 28 | 29 | 30 | ::= 31 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/sphinx_base.bnf: -------------------------------------------------------------------------------- 1 | import terminals.bnf 2 | import noqa_statement.bnf 3 | 4 | from darglint2.parse.grammar import ( 5 | BaseGrammar, 6 | P, 7 | ) 8 | from darglint2.errors import ( 9 | IndentError, 10 | ) 11 | 12 | 13 | 14 | ::= 2 15 | | 2 16 | | 2 17 | | 2 18 | | 1 @IndentError 19 | 20 | 21 | ::= 22 | | 23 | 24 | 25 | ::= 26 | | 27 | | 28 | 29 | 30 | ::= 31 | 32 | 33 | ::= 34 | | 35 | 36 | 37 | ::= 38 | 39 | 40 | ::= 41 | | 42 | 43 | 44 | ::= 45 | | 46 | | 47 | | 48 | 49 | 50 | ::= 51 | # Noqa statements, if in a line, should be at the end. 52 | | 53 | # The noqa statement could be on its own. 54 | | 55 | | 56 | 57 | 58 | 59 | ::= 60 | | ε 61 | 62 | 63 | ::= 64 | 65 | 66 | ::= 67 | | ε 68 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/sphinx_long_description.bnf: -------------------------------------------------------------------------------- 1 | import sphinx_base.bnf 2 | 3 | Grammar: LongDescriptionGrammar 4 | 5 | start: 6 | 7 | 8 | ::= 9 | | 10 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/sphinx_raises_section.bnf: -------------------------------------------------------------------------------- 1 | import sphinx_base.bnf 2 | 3 | from darglint2.errors import ( 4 | EmptyDescriptionError, 5 | ) 6 | 7 | 8 | Grammar: RaisesGrammar 9 | 10 | start: 11 | 12 | 13 | ::= 14 | | 15 | | 16 | 17 | 18 | ::= 19 | 20 | 21 | ::= @EmptyDescriptionError 22 | 23 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/sphinx_return_type_section.bnf: -------------------------------------------------------------------------------- 1 | import sphinx_base.bnf 2 | 3 | from darglint2.errors import ( 4 | EmptyDescriptionError, 5 | ) 6 | 7 | Grammar: ReturnTypeGrammar 8 | 9 | start: 10 | 11 | 12 | ::= 13 | | 14 | 15 | 16 | ::= 17 | 18 | 19 | ::= @EmptyDescriptionError 20 | 21 | 22 | 23 | ::= 24 | | 25 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/sphinx_returns_section.bnf: -------------------------------------------------------------------------------- 1 | import sphinx_base.bnf 2 | 3 | from darglint2.errors import ( 4 | EmptyDescriptionError, 5 | ) 6 | 7 | 8 | Grammar: ReturnsGrammar 9 | 10 | start: 11 | 12 | 13 | ::= 14 | | 15 | | 16 | 17 | 18 | ::= 19 | 20 | 21 | ::= @EmptyDescriptionError 22 | 23 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/sphinx_short_description.bnf: -------------------------------------------------------------------------------- 1 | import sphinx_base.bnf 2 | 3 | Grammar: ShortDescriptionGrammar 4 | 5 | start: 6 | 7 | 8 | ::= 9 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/sphinx_variable_type_section.bnf: -------------------------------------------------------------------------------- 1 | import sphinx_base.bnf 2 | 3 | from darglint2.errors import ( 4 | EmptyDescriptionError, 5 | ) 6 | 7 | Grammar: VariableTypeGrammar 8 | 9 | start: 10 | 11 | 12 | ::= 13 | | 14 | 15 | 16 | ::= 17 | 18 | 19 | ::= @EmptyDescriptionError 20 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/sphinx_variables_section.bnf: -------------------------------------------------------------------------------- 1 | import sphinx_base.bnf 2 | 3 | from darglint2.errors import ( 4 | EmptyDescriptionError, 5 | ) 6 | 7 | 8 | Grammar: VariablesSectionGrammar 9 | 10 | start: 11 | 12 | 13 | ::= 14 | | 15 | 16 | 17 | ::= 18 | | 19 | 20 | 21 | ::= @EmptyDescriptionError 22 | | @EmptyDescriptionError 23 | 24 | 25 | ::= 26 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/sphinx_yield_type_section.bnf: -------------------------------------------------------------------------------- 1 | import sphinx_base.bnf 2 | 3 | from darglint2.errors import ( 4 | EmptyDescriptionError, 5 | ) 6 | 7 | Grammar: YieldTypeGrammar 8 | 9 | start: 10 | 11 | 12 | ::= 13 | | 14 | 15 | 16 | ::= 17 | 18 | 19 | ::= @EmptyDescriptionError 20 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/sphinx_yields_section.bnf: -------------------------------------------------------------------------------- 1 | import sphinx_base.bnf 2 | 3 | from darglint2.errors import ( 4 | EmptyDescriptionError, 5 | ) 6 | 7 | Grammar: YieldsGrammar 8 | 9 | start: 10 | 11 | 12 | ::= 13 | | 14 | 15 | 16 | ::= 17 | 18 | 19 | ::= 20 | | 21 | 22 | 23 | ::= @EmptyDescriptionError 24 | 25 | | @EmptyDescriptionError 26 | 27 | -------------------------------------------------------------------------------- /darglint2/parse/grammars/terminals.bnf: -------------------------------------------------------------------------------- 1 | from darglint2.token import ( 2 | TokenType, 3 | ) 4 | 5 | 6 | ::= 7 | | 8 | | 9 | | 10 | | 11 | | 12 | |
13 | 14 | 15 | ::= 16 | | 17 | | 18 | | 19 | | 20 | |
21 | 22 | 23 | ::= "TokenType\.WORD" 24 | | 25 | | 26 | | 27 | | 28 | | 29 | | 30 | | 31 | | 32 | | 33 | | 34 | | 35 | | 36 | | 37 | | 38 | | 39 | | 40 | | 41 | | 42 | | 43 | 44 | 45 |
46 | ::= "TokenType\.HEADER" 47 | 48 | 49 | ::= "TokenType\.ARGUMENTS" 50 | 51 | 52 | ::= "TokenType\.ARGUMENT_TYPE" 53 | 54 | 55 | ::= "TokenType\.COLON" 56 | 57 | 58 | ::= "TokenType\.HASH" 59 | 60 | 61 | ::= "TokenType\.INDENT" 62 | 63 | 64 | ::= "TokenType\.LPAREN" 65 | 66 | 67 | ::= "TokenType\.NEWLINE" 68 | 69 | 70 | ::= "TokenType\.RAISES" 71 | 72 | 73 | ::= "TokenType\.RETURNS" 74 | 75 | 76 | ::= "TokenType\.RETURN_TYPE" 77 | 78 | 79 | ::= "TokenType\.RPAREN" 80 | 81 | 82 | ::= "TokenType\.YIELDS" 83 | 84 | 85 | ::= "TokenType\.YIELD_TYPE" 86 | 87 | 88 | ::= "TokenType\.VARIABLES" 89 | 90 | 91 | ::= "TokenType\.VARIABLE_TYPE" 92 | 93 | 94 | ::= "TokenType\.NOQA" 95 | 96 | 97 | ::= "TokenType\.OTHER" 98 | 99 | 100 | ::= "TokenType\.RECEIVES" 101 | 102 | 103 | ::= "TokenType\.WARNS" 104 | 105 | 106 | ::= "TokenType\.SEE" 107 | 108 | 109 | ::= "TokenType\.ALSO" 110 | 111 | 112 | ::= "TokenType\.NOTES" 113 | 114 | 115 | ::= "TokenType\.EXAMPLES" 116 | 117 | 118 | ::= "TokenType\.REFERENCES" 119 | -------------------------------------------------------------------------------- /darglint2/parse/long_description.py: -------------------------------------------------------------------------------- 1 | from functools import reduce 2 | from typing import List, Optional 3 | 4 | from ..node import CykNode 5 | from ..peaker import Peaker 6 | from ..token import Token, TokenType 7 | from .identifiers import NoqaIdentifier 8 | 9 | 10 | def _is(peaker: Peaker[Token], token_type: Optional[TokenType], index: int = 1) -> bool: 11 | try: 12 | token = peaker.peak(lookahead=index) 13 | except IndexError: 14 | token = None 15 | if not token_type and not token: 16 | return True 17 | return bool(token and token.token_type == token_type) 18 | 19 | 20 | def _are(peaker: Peaker[Token], *token_types: Optional[TokenType]) -> bool: 21 | return all( 22 | [_is(peaker, token_type, i + 1) for i, token_type in enumerate(token_types)] 23 | ) 24 | 25 | 26 | def _parse_noqa_head(peaker: Peaker[Token]) -> Optional[CykNode]: 27 | if not ( 28 | _are(peaker, TokenType.HASH, TokenType.NOQA, TokenType.NEWLINE) 29 | or _are(peaker, TokenType.HASH, TokenType.NOQA, None) 30 | ): 31 | return None 32 | noqa_hash = CykNode("hash", value=peaker.next()) 33 | noqa = CykNode("noqa", value=peaker.next()) 34 | if _is(peaker, TokenType.NEWLINE): 35 | peaker.next() 36 | return CykNode( 37 | "noqa", 38 | lchild=noqa_hash, 39 | rchild=noqa, 40 | annotations=[ 41 | NoqaIdentifier, 42 | ], 43 | ) 44 | 45 | 46 | def _last_node(node: CykNode) -> CykNode: 47 | curr = node 48 | rchild = curr.rchild 49 | while rchild: 50 | curr = rchild 51 | rchild = curr.rchild 52 | return curr 53 | 54 | 55 | def foldr(fun, xs, acc): 56 | return reduce(lambda x, y: fun(y, x), xs[::-1], acc) 57 | 58 | 59 | def _parse_words_until_newline_or_end(peaker): 60 | if not peaker.has_next() or _is(peaker, TokenType.NEWLINE): 61 | return None 62 | words = [CykNode("word", value=peaker.next())] 63 | while peaker.has_next() and not _is(peaker, TokenType.NEWLINE): 64 | words.append(CykNode("word", value=peaker.next())) 65 | 66 | if len(words) == 1: 67 | head = words[0] 68 | head.symbol = "words" 69 | return head 70 | 71 | def join(x, y): 72 | return CykNode( 73 | "words", 74 | lchild=x, 75 | rchild=y, 76 | ) 77 | 78 | acc = words.pop() 79 | acc.symbol = "words" 80 | 81 | return foldr(join, words, acc) 82 | 83 | 84 | def _parse_noqa(peaker: Peaker[Token]) -> Optional[CykNode]: 85 | if not ( 86 | _are(peaker, TokenType.HASH, TokenType.NOQA, TokenType.COLON, TokenType.WORD) 87 | ): 88 | return None 89 | noqa_hash = CykNode("hash", value=peaker.next()) 90 | noqa = CykNode("noqa", value=peaker.next()) 91 | colon = CykNode("colon", value=peaker.next()) 92 | targets = _parse_words_until_newline_or_end(peaker) 93 | head = CykNode( 94 | "noqa", 95 | lchild=CykNode( 96 | "noqa-head", 97 | lchild=noqa_hash, 98 | rchild=noqa, 99 | ), 100 | rchild=CykNode( 101 | "noqa-statement1", 102 | lchild=colon, 103 | rchild=targets, 104 | ), 105 | annotations=[ 106 | NoqaIdentifier, 107 | ], 108 | ) 109 | return head 110 | 111 | 112 | def _parse_long_description(peaker: Peaker[Token]) -> Optional[CykNode]: 113 | if not peaker.has_next(): 114 | return None 115 | head = _parse_noqa(peaker) or _parse_noqa_head(peaker) 116 | if head: 117 | new_head = CykNode( 118 | "long-description", 119 | lchild=head, 120 | ) 121 | head = new_head 122 | else: 123 | head = CykNode( 124 | symbol="long-description", 125 | lchild=CykNode("long-description1", value=peaker.next()), 126 | ) 127 | curr = _last_node(head) 128 | while peaker.has_next(): 129 | noqa = _parse_noqa(peaker) or _parse_noqa_head(peaker) 130 | if not noqa: # curr.rchild: 131 | curr.rchild = CykNode( 132 | symbol="long-description1", 133 | lchild=CykNode("long-description1", value=peaker.next()), 134 | ) 135 | else: 136 | old_left = curr.lchild 137 | curr.lchild = CykNode( 138 | symbol="long-description1", 139 | lchild=old_left, 140 | rchild=noqa, 141 | ) 142 | curr = _last_node(curr) 143 | return head 144 | 145 | 146 | def parse(tokens: List[Token]) -> Optional[CykNode]: 147 | peaker = Peaker((x for x in tokens), lookahead=5) 148 | if not peaker.has_next(): 149 | return None 150 | 151 | return _parse_long_description(peaker) 152 | -------------------------------------------------------------------------------- /darglint2/peaker.py: -------------------------------------------------------------------------------- 1 | """Describes Peaker, a stream transformer for peaking ahead.""" 2 | from collections import deque 3 | from typing import ( # noqa: F401 4 | Callable, 5 | Generic, 6 | Iterator, 7 | List, 8 | Optional, 9 | TypeVar, 10 | Union, 11 | ) 12 | 13 | T = TypeVar("T") 14 | 15 | 16 | class Peaker(Generic[T]): 17 | """A stream transformer allowing us to peak ahead.""" 18 | 19 | # The previous token which was gotten. 20 | prev: T = None 21 | 22 | class _Empty(object): 23 | value = None 24 | 25 | def __init__(self, stream: Iterator[T], lookahead: int = 1) -> None: 26 | """Create a new peaker. 27 | 28 | Args: 29 | stream: An iterator of T objects, which may be empty. 30 | lookahead: The amount of lookahead this should allow 31 | in the stream. 32 | 33 | """ 34 | self.stream = stream 35 | self.buffer: deque = deque() 36 | self.lookahead = lookahead 37 | self._buffer_to(lookahead) 38 | 39 | def _buffer_to(self, amount): 40 | """Extend the internal buffer to the given amount. 41 | 42 | Only adds items up to that amount, and while there 43 | are items to add. 44 | 45 | Args: 46 | amount: The length to make the buffer. 47 | 48 | Raises: 49 | Exception: If we are not able to buffer to the 50 | given amount. 51 | 52 | """ 53 | if amount > self.lookahead: 54 | raise Exception( 55 | "Cannot extend buffer to {}: " 56 | "beyond buffer lookahead {}".format(amount, self.lookahead) 57 | ) 58 | while len(self.buffer) < amount: 59 | try: 60 | self.buffer.appendleft(next(self.stream)) 61 | except StopIteration: 62 | break 63 | 64 | def next(self) -> T: 65 | """Get the next item in the stream, moving it forward. 66 | 67 | Side effects: 68 | Moves the stream forward. 69 | 70 | Raises: 71 | StopIteration: If there are no more items in the stream. 72 | 73 | Returns: 74 | The next item of type T in the stream. 75 | 76 | """ 77 | if len(self.buffer) == 0: 78 | raise StopIteration 79 | self.prev = self.buffer.pop() 80 | self._buffer_to(self.lookahead) 81 | return self.prev 82 | 83 | def peak(self, lookahead: int = 1) -> Optional[T]: 84 | """Get the next letter in the stream, without moving it forward. 85 | 86 | Args: 87 | lookahead: The amount of tokens to look ahead in 88 | the buffer. 89 | 90 | Raises: 91 | Exception: If we are not able to buffer to the given 92 | lookahead. 93 | 94 | Returns: 95 | The next item of type T in the stream. 96 | 97 | """ 98 | if lookahead > self.lookahead: 99 | raise Exception( 100 | "Cannot peak to {}: beyond buffer lookahead {}".format( 101 | lookahead, self.lookahead 102 | ) 103 | ) 104 | if lookahead > len(self.buffer): 105 | return None 106 | index = len(self.buffer) - lookahead 107 | return self.buffer[index] 108 | 109 | def rpeak(self, lookahead: int = 1) -> T: 110 | """Peak at the item lookahead ahead, raising an exception if empty. 111 | 112 | Args: 113 | lookahead: The amount of tokens to look ahead in 114 | the buffer. 115 | 116 | Raises: 117 | Exception: If we are not able to buffer to the given 118 | lookahead. 119 | IndexError: If there are no items at the given index ahead. 120 | 121 | Returns: 122 | The next item of type T in the stream. 123 | 124 | """ 125 | if lookahead > self.lookahead: 126 | raise Exception( 127 | "Cannot peak to {}: beyond buffer lookahead {}".format( 128 | lookahead, self.lookahead 129 | ) 130 | ) 131 | if lookahead > len(self.buffer): 132 | raise IndexError 133 | index = len(self.buffer) - lookahead 134 | return self.buffer[index] 135 | 136 | def has_next(self) -> bool: 137 | """Tell whether there are more tokens in the stream. 138 | 139 | Returns: 140 | True if there are more tokens, false otherwise. 141 | 142 | """ 143 | return len(self.buffer) > 0 144 | 145 | def take_while(self, test: Callable) -> List[T]: 146 | """Return elements from the stream while they pass the test. 147 | 148 | Args: 149 | test: A function which returns true if we would like to collect 150 | the token, or false if we would like to stop. 151 | 152 | Returns: 153 | A list of items (of type T), which pass the given test function. 154 | 155 | """ 156 | passing_elements: List[T] = [] 157 | while self.has_next() and test(self.peak()): 158 | passing_elements.append(self.next()) 159 | return passing_elements 160 | -------------------------------------------------------------------------------- /darglint2/strictness.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class Strictness(Enum): 5 | """The minimum strictness with which to apply checks. 6 | 7 | Strictness does not describe whether or not a check 8 | should be applied. Rather, if a check is done, strictness 9 | describes how intense/strict/deep the check should be. 10 | 11 | Each level here describes what is required of the 12 | docstring at the given level of strictness. For example, 13 | SHORT_DESCRIPTION describes the situation where one-liners are 14 | allowed, and sections are not required. 15 | 16 | If the docstring being checked contains more than the 17 | allowed amount below, then it is assumed that everything 18 | must be checked. 19 | 20 | """ 21 | 22 | # Allow a single-line description. 23 | SHORT_DESCRIPTION = 1 24 | 25 | # Allow a single-line description followed by a long 26 | # description, but no sections. 27 | LONG_DESCRIPTION = 2 28 | 29 | # Require everything. 30 | FULL_DESCRIPTION = 3 31 | 32 | @classmethod 33 | def from_string(cls, strictness): 34 | strictness = strictness.lower().strip() 35 | if strictness in {"short_description", "short"}: 36 | return cls.SHORT_DESCRIPTION 37 | if strictness in {"long_description", "long"}: 38 | return cls.LONG_DESCRIPTION 39 | if strictness in {"full_description", "full"}: 40 | return cls.FULL_DESCRIPTION 41 | 42 | raise Exception( 43 | 'Unrecognized strictness amount "{}". '.format(strictness) 44 | + 'Should be one of {"short", "long", "full"}' 45 | ) 46 | -------------------------------------------------------------------------------- /darglint2/token.py: -------------------------------------------------------------------------------- 1 | """Defines the tokens that result from lexing, as well as their types.""" 2 | 3 | from enum import Enum 4 | 5 | 6 | class BaseTokenType(Enum): 7 | pass 8 | 9 | 10 | class TokenType(BaseTokenType): 11 | """The possible types of tokens.""" 12 | 13 | WORD = 1 14 | COLON = 2 15 | DOCTERM = 3 # """ 16 | NEWLINE = 4 17 | INDENT = 5 # Assumed to always be 4 spaces 18 | HASH = 6 # A hash mark (for comments/noqa). 19 | LPAREN = 7 # ( 20 | RPAREN = 8 # ) 21 | 22 | RETURNS = 9 23 | YIELDS = 10 24 | RAISES = 11 25 | ARGUMENTS = 12 26 | NOQA = 13 27 | RETURN_TYPE = 14 28 | YIELD_TYPE = 15 29 | VARIABLES = 16 30 | VARIABLE_TYPE = 17 31 | ARGUMENT_TYPE = 18 32 | HEADER = 19 33 | OTHER = 20 34 | RECEIVES = 21 35 | WARNS = 22 36 | SEE = 23 37 | ALSO = 24 38 | NOTES = 25 39 | EXAMPLES = 26 40 | REFERENCES = 27 41 | 42 | # next: 28 43 | 44 | 45 | KEYWORDS = { 46 | TokenType.RETURNS, 47 | TokenType.YIELDS, 48 | TokenType.RAISES, 49 | TokenType.ARGUMENTS, 50 | TokenType.NOQA, 51 | TokenType.RETURN_TYPE, 52 | TokenType.YIELD_TYPE, 53 | TokenType.VARIABLES, 54 | TokenType.VARIABLE_TYPE, 55 | TokenType.ARGUMENT_TYPE, 56 | TokenType.RECEIVES, 57 | TokenType.WARNS, 58 | TokenType.NOTES, 59 | TokenType.EXAMPLES, 60 | TokenType.REFERENCES, 61 | } 62 | 63 | 64 | class Token(object): 65 | """A token representing anything which can appear in a docstring.""" 66 | 67 | def __init__(self, value: str, token_type: TokenType, line_number: int) -> None: 68 | """Create a new Token. 69 | 70 | Args: 71 | value: The value of the token. (The actual string.) 72 | token_type: The type of token this represents. 73 | line_number: The line number where this token resides. 74 | Used when reporting errors. 75 | 76 | """ 77 | self.value = value 78 | self.token_type = token_type 79 | self.line_number = line_number 80 | 81 | def __str__(self): 82 | """Return readable representation for debugging. 83 | 84 | Returns: 85 | A readable representation for debugging. 86 | 87 | """ 88 | return "".format(repr(self.value), self.token_type) 89 | 90 | def __repr__(self): 91 | """Return readable representation for debugging. 92 | 93 | Returns: 94 | A readable representation for debugging. 95 | 96 | """ 97 | return str(self) 98 | -------------------------------------------------------------------------------- /docker-build/Dockerfile.test36: -------------------------------------------------------------------------------- 1 | FROM python:3.6-alpine 2 | 3 | 4 | RUN apk update && \ 5 | apk add gcc \ 6 | musl-dev \ 7 | python3-dev 8 | 9 | 10 | RUN python -m pip install -U pip && \ 11 | python -m pip install pytest \ 12 | mypy \ 13 | flake8 \ 14 | typing 15 | 16 | 17 | WORKDIR /code/ 18 | -------------------------------------------------------------------------------- /docker-build/Dockerfile.test37: -------------------------------------------------------------------------------- 1 | FROM python:3.7-alpine 2 | 3 | 4 | RUN apk update && \ 5 | apk add gcc \ 6 | musl-dev \ 7 | python3-dev 8 | 9 | 10 | RUN python -m pip install -U pip && \ 11 | python -m pip install pytest \ 12 | mypy \ 13 | flake8 \ 14 | typing 15 | 16 | 17 | WORKDIR /code/ 18 | -------------------------------------------------------------------------------- /docker-build/Dockerfile.test38: -------------------------------------------------------------------------------- 1 | FROM python:3.8-alpine 2 | 3 | 4 | RUN apk update && \ 5 | apk add gcc \ 6 | musl-dev \ 7 | python3-dev 8 | 9 | 10 | RUN python -m pip install -U pip && \ 11 | python -m pip install pytest \ 12 | mypy \ 13 | flake8 \ 14 | typing 15 | 16 | 17 | WORKDIR /code/ 18 | -------------------------------------------------------------------------------- /docker-build/Dockerfile.test39: -------------------------------------------------------------------------------- 1 | FROM python:3.9-alpine 2 | 3 | 4 | RUN apk update && \ 5 | apk add gcc \ 6 | musl-dev \ 7 | python3-dev 8 | 9 | 10 | RUN python -m pip install -U pip && \ 11 | python -m pip install pytest \ 12 | mypy \ 13 | flake8 \ 14 | typing 15 | 16 | 17 | WORKDIR /code/ 18 | -------------------------------------------------------------------------------- /docker-build/Makefile: -------------------------------------------------------------------------------- 1 | .phony: all 2 | all: 3 | docker build -t darglint2-36 -f Dockerfile.test36 . && \ 4 | docker build -t darglint2-37 -f Dockerfile.test37 . && \ 5 | docker build -t darglint2-38 -f Dockerfile.test38 . && \ 6 | docker build -t darglint2-39 -f Dockerfile.test39 . 7 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | poster.pdf: poster.tex 2 | pdflatex --shell-escape poster.tex 3 | 4 | # Minimal makefile for Sphinx documentation 5 | # 6 | 7 | # You can set these variables from the command line, and also 8 | # from the environment for the first two. 9 | SPHINXOPTS ?= 10 | SPHINXBUILD ?= sphinx-build 11 | SOURCEDIR = source 12 | BUILDDIR = build 13 | 14 | # Put it first so that "make" without argument is like "make help". 15 | help: 16 | @poetry run $(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 17 | 18 | .PHONY: help Makefile 19 | 20 | # Catch-all target: route all unknown targets to Sphinx using the new 21 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 22 | %: Makefile 23 | @poetry run $(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 24 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/poster.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akaihola/darglint2/72c51e90c187610c52fd5076b4c28e1202ece84f/docs/poster.pdf -------------------------------------------------------------------------------- /docs/source/.gitignore: -------------------------------------------------------------------------------- 1 | # document trees generated by sphinx 2 | doctrees 3 | -------------------------------------------------------------------------------- /docs/source/_polyversion/static/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akaihola/darglint2/72c51e90c187610c52fd5076b4c28e1202ece84f/docs/source/_polyversion/static/.nojekyll -------------------------------------------------------------------------------- /docs/source/_polyversion/templates/index.html: -------------------------------------------------------------------------------- 1 | 2 | {# Determine latest tag / latest branch if no tags are available #} 3 | {% if tags %} {% set latest = tags|max(attribute='date') %} {% else %} {% set 4 | latest = branches|max(attribute='date') %} {% endif %} 5 | 6 | {# Redirect to the subdirectory of the latest version #} 7 | 8 | 9 | Redirecting to master branch 10 | 11 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /docs/source/_static/css/ext-links.css: -------------------------------------------------------------------------------- 1 | /* Add small icon appended to external links (not in use currently)*/ 2 | 3 | .reference.external:after { 4 | color: var(--color-sidebar-link-text); 5 | content: url("data:image/svg+xml;charset=utf-8,%3Csvg width='12' height='12' xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' stroke-width='1.5' stroke='%23607D8B' fill='none' stroke-linecap='round' stroke-linejoin='round'%3E%3Cpath d='M0 0h24v24H0z' stroke='none'/%3E%3Cpath d='M11 7H6a2 2 0 0 0-2 2v9a2 2 0 0 0 2 2h9a2 2 0 0 0 2-2v-5M10 14 20 4M15 4h5v5'/%3E%3C/svg%3E"); 6 | margin: 0 0.25rem; 7 | vertical-align: middle; 8 | } 9 | -------------------------------------------------------------------------------- /docs/source/_static/css/version-selector.css: -------------------------------------------------------------------------------- 1 | /* Style our version picker 2 | 3 | The version picker is defined in `_templates/versioning.html` and uses the same classes 4 | and ids as the one provided by the theme for use with readthedocs.io 5 | This allows us to load the styles by readthedocs as a basis 6 | and adjust them to our likings. 7 | */ 8 | 9 | /* Import RTD styles */ 10 | @import url("https://assets.readthedocs.org/static/css/readthedocs-doc-embed.css"); 11 | @import url("https://assets.readthedocs.org/static/css/badge_only.css"); 12 | 13 | /* remove border around version picker */ 14 | #furo-readthedocs-versions:focus-within, 15 | #furo-readthedocs-versions:hover { 16 | box-shadow: none; 17 | } 18 | 19 | /* adjust the element showing the selected version */ 20 | .rst-versions .rst-current-version { 21 | padding: var(--sidebar-item-spacing-vertical) 22 | var(--sidebar-item-spacing-horizontal); 23 | border-top: 1px solid var(--color-sidebar-search-border); 24 | color: var(--color-foreground-primary); 25 | } 26 | 27 | /* .rst-versions .rst-current-version.rst-out-of-date { 28 | color: #c64334; 29 | } 30 | 31 | .rst-versions .rst-current-version.rst-active-old-version { 32 | color: #634f00; 33 | } */ 34 | 35 | /* adjust the element listing all available versions */ 36 | #furo-readthedocs-versions > .rst-other-versions { 37 | padding: var(--sidebar-item-spacing-vertical) 38 | var(--sidebar-item-spacing-horizontal); 39 | border-style: none; 40 | border-top: 1px solid var(--color-sidebar-search-border); 41 | } 42 | 43 | /* adjust list headings */ 44 | .rst-versions .rst-other-versions dt { 45 | color: var(--color-foreground-secondary); 46 | } 47 | 48 | /* adjust selectable version items */ 49 | .rst-versions .rst-other-versions dd a { 50 | color: var(--color-sidebar-link-text--top-level); 51 | padding-left: 0px; 52 | padding-right: 12px; 53 | } 54 | 55 | /* adjust icons for the list headings */ 56 | .bi .version-header { 57 | margin-left: 0.25rem; 58 | } 59 | 60 | /* adjust icon for the version picker */ 61 | .bi-git { 62 | float: left; 63 | color: var(--color-foreground-primary); 64 | left: var(--sidebar-item-spacing-horizontal); 65 | } 66 | -------------------------------------------------------------------------------- /docs/source/_templates/components/edit-this-page.html: -------------------------------------------------------------------------------- 1 | {# Adjust link of `edit source` button The furo theme adds an `edit source` 2 | button to the top of the page that opens the page viewed on github in edit mode. 3 | However we prefer opening the file in the standards view mode. The furo theme is 4 | based on the `basic-ng` theme which defines a view-this-page button. We reuse 5 | its code to determine the page link but extend it to use the meta field 6 | `edit_path` that can be set in every .rst file to change the path the edit 7 | button links to. See 8 | https://www.sphinx-doc.org/en/master/usage/restructuredtext/field-lists.html#file-wide-metadata 9 | #} 10 | {% extends "furo/components/edit-this-page.html" %} 11 | {% from "furo/components/edit-this-page.html" import furo_edit_button with context %} 12 | {% from "basic-ng/components/edit-this-page.html" import sanitise_trailing_slash with context %} 13 | 14 | {#- Modified from 15 | https://github.com/pradyunsg/sphinx-basic-ng/blob/main/src/sphinx_basic_ng/theme/basic-ng/components/view-this-page.html#L5 16 | #} 17 | {%- macro determine_page_view_link() -%} 18 | {%- if theme_source_view_link -%} 19 | {{ theme_source_view_link.format(filename=pagename+page_source_suffix) }} 20 | {%- elif theme_source_repository -%} 21 | {#- First, sanitise the trailing slashes. -#} 22 | {%- set repo = sanitise_trailing_slash(theme_source_repository) -%} 23 | {%- set branch = theme_source_branch -%} 24 | {%- set subdirectory = sanitise_trailing_slash(theme_source_directory) -%} 25 | {#- Figure out the document's source file path. -#} 26 | {% if meta.edit_path %} 27 | {# Modify path based on the meta field `edit_path` #} 28 | {% if meta.edit_path.startswith("/") %} 29 | {%- set relative_path = meta.edit_path[1:] -%} 30 | {%- set subdirectory = "" -%} 31 | {%- else -%} 32 | {%- set relative_path = meta.edit_path -%} 33 | {%- endif -%} 34 | {%- else -%} 35 | {%- set relative_path = pagename + page_source_suffix -%} 36 | {%- endif -%} 37 | {%- if not subdirectory -%} 38 | {%- set document_path = relative_path -%} 39 | {%- else -%} 40 | {%- set document_path = subdirectory + "/" + relative_path -%} 41 | {%- endif -%} 42 | 43 | {#- Don't allow http:// URLs -#} 44 | {%- if repo.startswith( ( "http://github.com/", "http://gitlab.com/", "http://bitbucket.org/", ) ) -%} 45 | {{ warning("Could not use `source_repository` provided. Please use https:// links in your `conf.py` file's `html_theme_options`.") }} 46 | {#- Handle the relevant cases -#} 47 | {%- elif repo.startswith("https://github.com/") -%} 48 | {{ repo }}/blob/{{ branch }}/{{document_path }} 49 | {%- elif repo.startswith("https://gitlab.com/") -%} 50 | {{ repo}}/blob/{{ branch }}/{{ document_path }} 51 | {%- elif repo.startswith("https://bitbucket.org/") -%} 52 | {{ repo }}/src/{{ branch }}/{{document_path }} 53 | {#- Fail with a warning -#} 54 | {%- else -%} 55 | {{ warning( "Could not understand `source_repository` provided: " 56 | + repo + "\n" + "You should set `source_view_link`, so that the view link is presented." ) }} 57 | {%- endif -%} 58 | 59 | {%- elif show_source and has_source -%} 60 | {{ pathto('_sources/' + sourcename, true) }} 61 | {%- endif -%} 62 | {%- endmacro -%} 63 | 64 | {# use the edit button code by furo but use above macro to determine URL #} 65 | {% block link_available -%} 66 | {{ furo_edit_button(determine_page_view_link()) }} 67 | {%- endblock %} 68 | -------------------------------------------------------------------------------- /docs/source/_templates/sidebar/brand.html: -------------------------------------------------------------------------------- 1 | {# Style darglint logo in the sidebar This adds the version number as a superscript. #} 2 | {% extends "!sidebar/brand.html" %} 3 | 4 | {% block brand_content %} 5 | {{ project }}{{ release }} 8 | {% endblock brand_content %} 9 | -------------------------------------------------------------------------------- /docs/source/_templates/versioning.html: -------------------------------------------------------------------------------- 1 | {# Add version selector This generates a version selector similar to the rtd 2 | version selector using the data exposed by `sphinx-multiversion` through 3 | current, latest_version and versions. It uses the same classes and ids as the 4 | version picker provided by the theme for use with readthedocs.io The css styling 5 | can be found in `css/version-selector.css`. The template doesn't fail when the 6 | needed data isn't provided but the result doesn't work as expected. #} 7 | 8 |
16 | {# this element shows the current version and is visible by default It hides 17 | on hover while the element below becomes appears in its place. #} 18 | 19 | {# git icon indicating the version selector #} 20 | 21 | {# show current version; prepend `v` in case of branches #} {% if not 22 | current or not current.name.startswith("v") %} v: {% endif %} {{ 23 | current.name if current else "undefined" }} 24 | 25 | {% if tags or branches %} {# This item lists the avaible versions grouped 26 | into branches and tags. The item is hidden by default but appears when the 27 | user hovers over the version selector. #} 28 |
29 | {% if tags %} {# List of tags #} 30 |
31 |
32 | {{ _('Tags') }} 33 |
34 | {% for item in tags %} 35 |
{{ item.name }}
36 | {% endfor %} 37 |
38 | {% endif %} {% if branches %} {# List of branches #} 39 |
40 |
{{ _('Branches') }}
41 | {% for item in branches %} 42 |
{{ item.name }}
43 | {% endfor %} 44 |
45 | {% endif %} 46 |
47 | {% endif %} 48 |
49 | -------------------------------------------------------------------------------- /docs/source/changelog.rst: -------------------------------------------------------------------------------- 1 | :edit_path: /CHANGELOG.md 2 | 3 | :hide-toc: 4 | 5 | .. include:: ../../CHANGELOG.md 6 | :parser: myst_parser.sphinx_ 7 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | :hide-toc: 2 | 3 | ========= 4 | Darglint2 5 | ========= 6 | 7 | .. include:: ../../README.md 8 | :parser: myst_parser.sphinx_ 9 | :start-after: 10 | :end-before: 11 | 12 | .. toctree:: 13 | :maxdepth: 2 14 | 15 | readme 16 | 17 | .. toctree:: 18 | :maxdepth: 1 19 | :caption: Development 20 | 21 | changelog 22 | license 23 | 24 | .. toctree:: 25 | :caption: Links 26 | 27 | Github 28 | PyPI 29 | -------------------------------------------------------------------------------- /docs/source/license.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | License 3 | ======= 4 | 5 | **MIT License** 6 | 7 | .. include:: ../../LICENSE.txt 8 | -------------------------------------------------------------------------------- /docs/source/readme.rst: -------------------------------------------------------------------------------- 1 | :edit_path: /README.md 2 | 3 | .. 4 | the raw html directive adds a heading to the document that is excluded 5 | from the doctree 6 | 7 | .. raw:: html 8 | 9 |

Darglint2

10 | 11 | 12 | .. include:: ../../README.md 13 | :parser: myst_parser.sphinx_ 14 | :start-after: 15 | -------------------------------------------------------------------------------- /integration_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akaihola/darglint2/72c51e90c187610c52fd5076b4c28e1202ece84f/integration_tests/__init__.py -------------------------------------------------------------------------------- /integration_tests/analysis_tests.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import os 3 | from typing import Iterable 4 | from unittest import TestCase 5 | from unittest.mock import Mock, patch 6 | 7 | from darglint2.analysis.raise_visitor import RaiseVisitor 8 | from darglint2.config import AssertStyle 9 | from darglint2.function_description import get_function_descriptions, read_program 10 | from darglint2.utils import ConfigurationContext 11 | 12 | 13 | def yield_modules() -> Iterable[str]: 14 | for path, folders, filenames in os.walk("integration_tests/repos"): 15 | for filename in filenames: 16 | if not filename.endswith(".py"): 17 | continue 18 | yield os.path.join(path, filename) 19 | 20 | 21 | class RaiseAnalysisTest(TestCase): 22 | @patch("darglint2.analysis.raise_visitor.logger") 23 | def test_no_errors_logged_ever(self, mock_logger): 24 | """Make sure that no functions kill the analysis. 25 | 26 | We patch the logger to make sure no error-level message 27 | is recorded. An error-level message indicates that there 28 | is an unexpected branch we did not cover. 29 | 30 | """ 31 | mock_logger.error = Mock() 32 | visitor = RaiseVisitor() 33 | 34 | # Force assert to raise an error -- this will help to 35 | # distinguish it from the logger errors. 36 | with ConfigurationContext(assert_style=AssertStyle.RAISE): 37 | for module in yield_modules(): 38 | program = read_program(module) 39 | try: 40 | tree = ast.parse(program) 41 | except: # noqa: E722 42 | # If it doesn't parse, then it's probably Python2, 43 | # or something is invalid and we don't care. 44 | # We only want to check files which are valid 45 | # Python. 46 | continue 47 | functions = get_function_descriptions(tree) 48 | for function in functions: 49 | try: 50 | visitor.visit(function.function) 51 | except: # noqa: E722 52 | print("Visitor error raised during {}".format(module)) 53 | raise 54 | self.assertFalse( 55 | mock_logger.error.called, 56 | "Unexpected error log at {}".format( 57 | module, 58 | ), 59 | ) 60 | -------------------------------------------------------------------------------- /integration_tests/end_to_end.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | from unittest import TestCase 3 | 4 | 5 | class EndToEndTest(TestCase): 6 | def get_errors(self, filename, *args): 7 | invocation = ["darglint2", *args] 8 | invocation.append(filename) 9 | proc = subprocess.run( 10 | invocation, 11 | stdout=subprocess.PIPE, 12 | stderr=subprocess.PIPE, 13 | ) 14 | result = proc.stdout.decode("utf8") 15 | return result 16 | 17 | def test_enable_disabled_by_default(self): 18 | errors = self.get_errors( 19 | "integration_tests/files/missing_arg_type.py", 20 | "--enable", 21 | "DAR104", 22 | ) 23 | self.assertTrue("DAR104" in errors, errors) 24 | 25 | def test_two_space_indent(self): 26 | errors = self.get_errors( 27 | "integration_tests/files/two_spaces.py", 28 | "--indentation", 29 | "2", 30 | ) 31 | self.assertEqual(errors.count("DAR101"), 1) 32 | 33 | def test_docstring_style_selection(self): 34 | for style in ["google", "sphinx", "numpy"]: 35 | filename = "integration_tests/files/{}_example.py".format(style) 36 | errors = self.get_errors( 37 | filename, 38 | "--docstring-style", 39 | style, 40 | ) 41 | self.assertEqual( 42 | errors.count("DAR101"), 43 | 1, 44 | "Expected {} to have one missing parameter.".format( 45 | filename, 46 | ), 47 | ) 48 | self.assertEqual( 49 | errors.count("DAR102"), 50 | 1, 51 | "Expected {} to have one extra parameter.".format( 52 | filename, 53 | ), 54 | ) 55 | -------------------------------------------------------------------------------- /integration_tests/files/example-ascii.py: -------------------------------------------------------------------------------- 1 | # -*- coding: ascii -*- 2 | 3 | 4 | def main(): 5 | """Runs the main program. 6 | 7 | Returns: 8 | The return code for the program. 9 | 10 | """ 11 | print("Hello, world!") 12 | 13 | 14 | if __name__ == "__main__": 15 | main() 16 | -------------------------------------------------------------------------------- /integration_tests/files/example-latin1.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akaihola/darglint2/72c51e90c187610c52fd5076b4c28e1202ece84f/integration_tests/files/example-latin1.py -------------------------------------------------------------------------------- /integration_tests/files/example-utf8.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | def main(): 5 | """Runs the main program. 6 | 7 | Returns: 8 | The return code for the program. 9 | 10 | """ 11 | print("Hello, world!") 12 | 13 | 14 | if __name__ == "__main__": 15 | main() 16 | -------------------------------------------------------------------------------- /integration_tests/files/google_example.py: -------------------------------------------------------------------------------- 1 | def frobscottle(x): 2 | """Frobscottlize. 3 | 4 | Args: 5 | whizzpopper: Whiz and Pop. 6 | 7 | """ 8 | x.fly_and_fart() 9 | -------------------------------------------------------------------------------- /integration_tests/files/missing_arg_type.py: -------------------------------------------------------------------------------- 1 | def double(x): 2 | """Double the argument. 3 | 4 | Args: 5 | x: The argument to double. 6 | 7 | Returns: 8 | Twice the given amount. 9 | 10 | """ 11 | return 2 * x 12 | -------------------------------------------------------------------------------- /integration_tests/files/numpy_example.py: -------------------------------------------------------------------------------- 1 | def frobscottle(x): 2 | """Frobscottlize. 3 | 4 | Parameters 5 | ---------- 6 | whizzpopper 7 | Whiz and Pop. 8 | 9 | """ 10 | x.fly_and_fart() 11 | -------------------------------------------------------------------------------- /integration_tests/files/problematic.py: -------------------------------------------------------------------------------- 1 | def problematic_function(arg1): 2 | """Returns double the value of the first argument. 3 | 4 | Args: 5 | a: The first argument. 6 | 7 | Returns: 8 | Two times the value of the first argument. 9 | 10 | """ 11 | return arg1 * 2 12 | 13 | 14 | def problematic_rst(): 15 | """""" 16 | -------------------------------------------------------------------------------- /integration_tests/files/sphinx_example.py: -------------------------------------------------------------------------------- 1 | def frobscottle(x): 2 | """Frobscottlize. 3 | 4 | :param whizzpopper: Whiz and Pop. 5 | 6 | """ 7 | x.fly_and_fart() 8 | -------------------------------------------------------------------------------- /integration_tests/files/strictness_example.py: -------------------------------------------------------------------------------- 1 | def f(x): 2 | """Missing x, but satisfies short.""" 3 | return x * 2 4 | -------------------------------------------------------------------------------- /integration_tests/files/two_spaces.py: -------------------------------------------------------------------------------- 1 | class Eschatology(object): 2 | def destruction(self, items, saved): 3 | """Generally, everything gets destroyed. 4 | 5 | Args: 6 | items: The whole world. 7 | 8 | """ 9 | del items 10 | -------------------------------------------------------------------------------- /integration_tests/grammar_size.py: -------------------------------------------------------------------------------- 1 | """A script to check and report the size of the grammars.""" 2 | 3 | import importlib 4 | import inspect 5 | import os 6 | 7 | from darglint2.parse.grammar import BaseGrammar 8 | 9 | 10 | def convert_filename_to_module(filename): 11 | return filename[:-3].replace("/", ".") 12 | 13 | 14 | def get_python_modules_in_grammars(): 15 | basepath = os.path.join(os.getcwd(), "darglint2/parse/grammars") 16 | return [ 17 | (x, convert_filename_to_module(os.path.join("darglint2/parse/grammars", x))) 18 | for x in os.listdir(basepath) 19 | if x.endswith(".py") 20 | ] 21 | 22 | 23 | def get_grammars(module): 24 | return [ 25 | cls 26 | for (name, cls) in inspect.getmembers(module, inspect.isclass) 27 | if issubclass(cls, BaseGrammar) and cls is not BaseGrammar 28 | ] 29 | 30 | 31 | def get_productions_in_grammar(grammar): 32 | return len(grammar.productions) 33 | 34 | 35 | if __name__ == "__main__": 36 | modules = get_python_modules_in_grammars() 37 | count = { 38 | "google": 0, 39 | "sphinx": 0, 40 | "numpy": 0, 41 | } 42 | print("BY FILENAME") 43 | for grammar_type in count: 44 | for filename, filepath in filter( 45 | lambda x: x[0].startswith(grammar_type), modules 46 | ): 47 | mod = importlib.import_module(filepath) 48 | grammars = get_grammars(mod) 49 | amount = 0 50 | for grammar in grammars: 51 | amount += get_productions_in_grammar(grammar) 52 | count[grammar_type] += amount 53 | print("{} {}".format(filename.ljust(50), amount)) 54 | print("\nTOTALS") 55 | for grammar in count: 56 | print("{}:\t{}".format(grammar, count[grammar])) 57 | -------------------------------------------------------------------------------- /integration_tests/max_golden_profile.py: -------------------------------------------------------------------------------- 1 | import cProfile 2 | import json 3 | 4 | if __name__ == "__main__": 5 | with open("integration_tests/max_golden.json", "r") as fin: 6 | data = json.load(fin) 7 | assert len(data) == 1 8 | golden = data[0] 9 | print(golden["docstring"]) 10 | print() 11 | assert isinstance(golden["docstring"], str) 12 | cProfile.run('Docstring.from_google(golden["docstring"])') 13 | -------------------------------------------------------------------------------- /integration_tests/sources.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | from unittest import TestCase 3 | 4 | 5 | class SourceFileTestCase(TestCase): 6 | def assertWorks(self, filename): 7 | proc = subprocess.run(["darglint2", filename]) 8 | self.assertTrue( 9 | proc.returncode in {0, 1}, 10 | "Expected error code 0 or 1, but got {} for {}".format( 11 | proc.returncode, 12 | filename, 13 | ), 14 | ) 15 | 16 | def test_encoding_works(self): 17 | for encoding in [ 18 | "ascii", 19 | "utf8", 20 | "latin1", 21 | ]: 22 | self.assertWorks("integration_tests/files/example-{}.py".format(encoding)) 23 | -------------------------------------------------------------------------------- /integration_tests/test_flake8.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from flake8.options.manager import OptionManager 4 | 5 | from darglint2.config import get_config 6 | from darglint2.docstring.style import DocstringStyle 7 | from darglint2.flake8_entry import DarglintChecker 8 | from darglint2.strictness import Strictness 9 | 10 | 11 | class Flake8TestCase(TestCase): 12 | """Tests that flake8 config is parsed correctly.""" 13 | 14 | def test_config_parsed(self): 15 | default_config = get_config().get_default_instance() 16 | parser = OptionManager("", "") 17 | DarglintChecker.add_options(parser) 18 | 19 | options, args = parser.parse_args([]) 20 | DarglintChecker.parse_options(options) 21 | self.assertEqual(default_config.style, DarglintChecker.config.style) 22 | 23 | argv = ["--docstring-style=numpy", "--strictness=short"] 24 | options, args = parser.parse_args(argv) 25 | 26 | DarglintChecker.config = default_config 27 | DarglintChecker.parse_options(options) 28 | self.assertEqual(DarglintChecker.config.style, DocstringStyle.NUMPY) 29 | self.assertEqual( 30 | DarglintChecker.config.strictness, Strictness.SHORT_DESCRIPTION 31 | ) 32 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["poetry-core>=1.0.0"] 3 | build-backend = "poetry.core.masonry.api" 4 | 5 | [tool.poetry] 6 | name = "darglint2" 7 | version = "1.8.2" 8 | description = "A utility for ensuring docstrings stay up to date with the source code." 9 | authors = ["terrencepreilly"] 10 | maintainers = [ 11 | "Antti Kaihola <13725+akaihola@users.noreply.github.com>", 12 | "yfprojects ", 13 | ] 14 | license = "MIT" 15 | repository = "http://github.com/akaihola/darglint2" 16 | documentation = "https://akaihola.github.io/darglint2" 17 | keywords = ["documentation", "linter", "development"] 18 | classifiers = [ 19 | 'Intended Audience :: Developers', 20 | 'Topic :: Software Development :: Documentation', 21 | 'Topic :: Software Development :: Quality Assurance', 22 | 'License :: OSI Approved :: MIT License', 23 | 'Programming Language :: Python :: 3', 24 | 'Programming Language :: Python :: 3 :: Only', 25 | 'Programming Language :: Python :: 3.7', 26 | 'Programming Language :: Python :: 3.8', 27 | 'Programming Language :: Python :: 3.9', 28 | 'Programming Language :: Python :: 3.10', 29 | 'Programming Language :: Python :: 3.11', 30 | ] 31 | readme = "README.md" 32 | 33 | [tool.poetry.dependencies] 34 | python = "^3.7" 35 | 36 | [tool.poetry.group.dev.dependencies] 37 | bnf-to-cnf = { path = "bin/bnf_to_cnf", develop = true } 38 | doc_extract = { path = "bin/doc_extract", develop = true } 39 | 40 | [tool.poetry.group.lint] 41 | optional = true 42 | 43 | [tool.poetry.group.lint.dependencies] 44 | flake8 = "^3.7" 45 | mypy = "^0.812" 46 | pydocstyle = "^4.0" 47 | 48 | [tool.poetry.group.test] 49 | optional = true 50 | 51 | [tool.poetry.group.test.dependencies] 52 | pytest = "^7.2.2" 53 | tox = "^4.4.6" 54 | 55 | [tool.poetry.group.polyversion] 56 | optional = true 57 | 58 | [tool.poetry.group.polyversion.dependencies] 59 | sphinx_polyversion = { path = "bin/sphinx_polyversion", develop = true } 60 | 61 | [tool.poetry.group.docs] 62 | optional = true 63 | 64 | [tool.poetry.group.docs.dependencies] 65 | sphinx = "^5.3.0" 66 | sphinx-argparse = "^0.3.2" 67 | sphinx-rtd-theme-github-versions = "^1.1" 68 | sphinx-rtd-theme = "^1.2.0" 69 | myst-parser = "^1.0.0" 70 | sphinx-notfound-page = "^0.8.3" 71 | sphinx-copybutton = "^0.5.2" 72 | furo = "^2023.3.27" 73 | sphinxext-opengraph = "^0.8.2" 74 | sphinx-autobuild = "^2021.3.14" 75 | sphinx-design = "^0.4.1" 76 | 77 | [tool.poetry.scripts] 78 | darglint2 = "darglint2.driver:main" 79 | 80 | [tool.poetry.plugins."flake8.extension"] 81 | "DAR" = "darglint2.flake8_entry:DarglintChecker" 82 | 83 | [tool.black] 84 | target-version = ['py37'] 85 | extend-exclude = ['^integration_tests/files'] 86 | 87 | [tool.isort] 88 | profile = "black" 89 | skip_gitignore = true 90 | 91 | # tool.flake8 -> tox.ini 92 | 93 | [tool.mypy] 94 | packages = "darglint2" 95 | -------------------------------------------------------------------------------- /tests/.pydocstyle.ini: -------------------------------------------------------------------------------- 1 | [pydocstyle] 2 | ignore=D100,D101,D102 3 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akaihola/darglint2/72c51e90c187610c52fd5076b4c28e1202ece84f/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """Common pytest fixtures for all test modules""" 2 | 3 | import pytest 4 | 5 | import darglint2.config 6 | 7 | 8 | @pytest.fixture(autouse=True) 9 | def default_config(): 10 | # darglint2 automatically reads configuration from config files in the dev 11 | # environment. We must override config so that we maintain a controlled 12 | # test environment. 13 | darglint2.config._config = darglint2.config.Configuration.get_default_instance() 14 | -------------------------------------------------------------------------------- /tests/test_abstract_callable_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from unittest import TestCase 3 | 4 | from darglint2.analysis.abstract_callable_visitor import AbstractCallableVisitor 5 | 6 | from .utils import reindent 7 | 8 | # from darglint2.analysis.analysis_visitor import AnalysisVisitor 9 | 10 | 11 | class PureAbstractVisitorTests(TestCase): 12 | def analyzeAbstract(self, program): 13 | function = ast.parse(reindent(program)).body[0] 14 | # visitor = AnalysisVisitor() 15 | visitor = AbstractCallableVisitor() 16 | visitor.visit(function) 17 | return visitor 18 | 19 | def check_abstract_decoration(self, program, result=True): 20 | visitor = self.analyzeAbstract(program) 21 | self.assertFalse(visitor.is_abstract) 22 | visitor = self.analyzeAbstract("@abstractmethod\n" + reindent(program)) 23 | self.assertEqual(visitor.is_abstract, result) 24 | 25 | def check_abstract_toggle_doc(self, program, result=True, doc="None"): 26 | self.check_abstract_decoration(program.format(docstring=""), result) 27 | self.check_abstract_decoration( 28 | program.format(docstring='"""{}"""'.format(doc)), result 29 | ) 30 | 31 | def test_pass(self): 32 | program = r""" 33 | def f(): 34 | {docstring} 35 | pass 36 | """ 37 | self.check_abstract_toggle_doc(program) 38 | 39 | def test_return(self): 40 | program = r""" 41 | def f(): 42 | {docstring} 43 | return 2 44 | """ 45 | self.check_abstract_toggle_doc(program, False) 46 | 47 | def test_ellipsis(self): 48 | program = r""" 49 | def f(): 50 | {docstring} 51 | ... 52 | """ 53 | self.check_abstract_toggle_doc(program) 54 | 55 | def test_constant(self): 56 | program = r""" 57 | def f(): 58 | {docstring} 59 | 42 60 | """ 61 | self.check_abstract_toggle_doc(program, False) 62 | 63 | def test_not_implemented_exception(self): 64 | program = r""" 65 | def f(): 66 | {docstring} 67 | raise NotImplementedError 68 | """ 69 | self.check_abstract_toggle_doc(program) 70 | 71 | def test_not_implemented_exception_reason(self): 72 | program = r""" 73 | def f(): 74 | {docstring} 75 | raise NotImplementedError("Malte did not want to.") 76 | """ 77 | self.check_abstract_toggle_doc(program) 78 | 79 | def test_not_implemented(self): 80 | program = r""" 81 | def f(): 82 | {docstring} 83 | return NotImplemented 84 | """ 85 | self.check_abstract_toggle_doc(program) 86 | 87 | def test_only_docstring(self): 88 | program = r''' 89 | def f(): 90 | """Documented empty body.""" 91 | ''' 92 | self.check_abstract_decoration(program) 93 | -------------------------------------------------------------------------------- /tests/test_analysis_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from unittest import TestCase 3 | 4 | from darglint2.analysis.analysis_visitor import AnalysisVisitor 5 | 6 | from .utils import reindent 7 | 8 | 9 | class AnalysisVisitorTests(TestCase): 10 | def assertFound(self, program, attribute, args, transform=None): 11 | """Assert that the given attribute values were found. 12 | 13 | Args: 14 | program: The program to run the analysis on. 15 | attribute: The attribute which should be checked. 16 | args: The value(s) which should exist in the attribute. 17 | transform: If supplied, a function which transforms 18 | the attribute values prior to comparison. 19 | 20 | Returns: 21 | The visitor, in case you want to do more analysis. 22 | 23 | """ 24 | function = ast.parse(reindent(program)) 25 | visitor = AnalysisVisitor() 26 | visitor.visit(function) 27 | actual = getattr(visitor, attribute) 28 | if transform: 29 | if isinstance(actual, list): 30 | actual = list(map(transform, actual)) 31 | elif isinstance(actual, set): 32 | actual = set(map(transform, actual)) 33 | else: 34 | actual = transform(actual) 35 | self.assertEqual( 36 | actual, 37 | args, 38 | ) 39 | return visitor 40 | 41 | def test_analyze_single_function_with_everything(self): 42 | program = r''' 43 | def f(x: int) -> int: 44 | """Halves the argument.""" 45 | assert x > 0 46 | ret = x / 2 47 | if ret < 0: 48 | raise Exception('It\'s less than 0!') 49 | yield None 50 | return ret 51 | ''' 52 | self.assertFound(program, "arguments", ["x"]) 53 | self.assertFound(program, "types", ["int"]) 54 | self.assertFound(program, "exceptions", {"Exception"}) 55 | 56 | # Just check that an assert is present by registering a number. 57 | self.assertFound(program, "asserts", [1], lambda x: 1) 58 | self.assertFound(program, "returns", [1], lambda x: 1) 59 | 60 | self.assertFound(program, "variables", ["ret"], lambda x: x.id) 61 | self.assertFound(program, "yields", [1], lambda x: 1) 62 | 63 | def test_only_current_function_checked(self): 64 | program = r''' 65 | def f(x): 66 | """Halves the argument.""" 67 | def _inner(): 68 | r = x / 2 69 | if r < 0: 70 | raise Exception('It\'s less than 0!') 71 | return r 72 | return _inner() 73 | ''' 74 | self.assertFound(program, "returns", [1], lambda x: 1) 75 | 76 | def test_finds_abstract(self): 77 | program = r''' 78 | @abstractmethod 79 | def f(x): 80 | """Halves the argument.""" 81 | pass 82 | ''' 83 | function = ast.parse(reindent(program)) 84 | visitor = AnalysisVisitor() 85 | visitor.visit(function) 86 | self.assertTrue(visitor.is_abstract, "Should have been marked abstract.") 87 | 88 | def test_finds_not_abstract(self): 89 | program = r''' 90 | def f(x): 91 | """Halves the argument.""" 92 | return x / 2 93 | ''' 94 | function = ast.parse(reindent(program)) 95 | visitor = AnalysisVisitor() 96 | visitor.visit(function) 97 | self.assertFalse(visitor.is_abstract, "Should have been marked abstract.") 98 | -------------------------------------------------------------------------------- /tests/test_argument_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from unittest import TestCase 3 | 4 | from darglint2.analysis.argument_visitor import ArgumentVisitor 5 | 6 | from .utils import reindent, require_python 7 | 8 | 9 | class ArgumentVisitorTests(TestCase): 10 | def assertFound(self, program, *args): 11 | """Assert that the given arguments were found. 12 | 13 | Args: 14 | program: The program to run the analysis on. 15 | args: The arguments we expect to find. If none, 16 | then this is an empty list. 17 | 18 | Returns: 19 | The visitor, in case you want to do more analysis. 20 | 21 | """ 22 | function = ast.parse(reindent(program)).body[0] 23 | visitor = ArgumentVisitor() 24 | visitor.visit(function) 25 | self.assertEqual(sorted(visitor.arguments), sorted(args)) 26 | return visitor 27 | 28 | def assertTypesFound(self, program, *types): 29 | function = ast.parse(reindent(program)).body[0] 30 | visitor = ArgumentVisitor() 31 | visitor.visit(function) 32 | self.assertEqual(sorted(visitor.types), sorted(types)) 33 | return visitor 34 | 35 | def test_no_arguments(self): 36 | program = """ 37 | def f(): 38 | return 3 39 | """ 40 | self.assertFound(program) 41 | 42 | def test_one_argument(self): 43 | program = """ 44 | def f(x): 45 | return x * 2 46 | """ 47 | self.assertFound(program, "x") 48 | 49 | def test_many_arguments(self): 50 | program = """ 51 | def f(a, b, c, d, e, f): 52 | return a + b + c + d + e + f 53 | """ 54 | self.assertFound( 55 | program, 56 | "a", 57 | "b", 58 | "c", 59 | "d", 60 | "e", 61 | "f", 62 | ) 63 | 64 | def test_keyword_arguments(self): 65 | program = """ 66 | def f(x = 3, y = "hello"): 67 | return y * x 68 | """ 69 | self.assertFound( 70 | program, 71 | "x", 72 | "y", 73 | ) 74 | 75 | def test_keyword_only_arguments(self): 76 | program = """ 77 | def f(x, y, *, z): 78 | return "{}: {}".format(x * y, z) 79 | """ 80 | self.assertFound(program, "x", "y", "z") 81 | 82 | @require_python(3, 8) 83 | def test_order_only_arguments(self): 84 | program = """ 85 | def f(x, y, /, z): 86 | return f'{x * y}: {z}' 87 | """ 88 | self.assertFound(program, "x", "y", "z") 89 | 90 | @require_python(3, 8) 91 | def test_order_and_keyword_arguments(self): 92 | program = """ 93 | def f(x, y, /, z, *, q): 94 | return x + y + z + q 95 | """ 96 | self.assertFound(program, "x", "y", "z", "q") 97 | 98 | def test_method(self): 99 | program = """ 100 | class A(object): 101 | def f(self): 102 | return "hello" 103 | """ 104 | self.assertFound(program, "self") 105 | 106 | def test_argument_type_inline(self): 107 | program = """ 108 | def f(x: int) -> float: 109 | return x * 0.5 110 | """ 111 | self.assertTypesFound(program, "int") 112 | 113 | def test_no_argument_type(self): 114 | program = """ 115 | def f(x) -> str: 116 | return "{}'s".format(x) 117 | """ 118 | self.assertTypesFound(program, None) 119 | 120 | def test_multiple_types_ordered(self): 121 | program = """ 122 | def f(x: int, y: str) -> str: 123 | return y * x 124 | """ 125 | self.assertTypesFound(program, "int", "str") 126 | -------------------------------------------------------------------------------- /tests/test_assert_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from unittest import TestCase 3 | 4 | from darglint2.analysis.assert_visitor import AssertVisitor 5 | 6 | from .utils import reindent 7 | 8 | 9 | class AssertVisitorTests(TestCase): 10 | def assertFound(self, program, n=0): 11 | tree = ast.parse(reindent(program)) 12 | visitor = AssertVisitor() 13 | visitor.visit(tree) 14 | self.assertEqual( 15 | len(visitor.asserts), 16 | n, 17 | "Expected to encounter {} asserts, but encountered {}".format( 18 | n, 19 | len(visitor.asserts), 20 | ), 21 | ) 22 | 23 | def test_no_asserts(self): 24 | program = r""" 25 | def f(x): 26 | return x * 2 27 | """ 28 | self.assertFound(program) 29 | 30 | def test_one_assertion(self): 31 | program = r""" 32 | def f(x): 33 | assert isinstance(x, int), "Expected an integer." 34 | return x * 2 35 | """ 36 | self.assertFound(program, 1) 37 | 38 | def test_two_assertions(self): 39 | program = r""" 40 | def f(x): 41 | assert isinstance(x, int), "Expected an integer." 42 | assert x > 0, "Expected a positive, non-zero integer." 43 | return 1 / x 44 | """ 45 | self.assertFound(program, 2) 46 | 47 | def test_assertion_in_async_func(self): 48 | program = r""" 49 | async def guaranteed(x): 50 | assert x 51 | return x 52 | """ 53 | self.assertFound(program, 1) 54 | -------------------------------------------------------------------------------- /tests/test_config.py: -------------------------------------------------------------------------------- 1 | """Tests configuration scripts.""" 2 | 3 | from random import choice, randint 4 | from string import ascii_letters 5 | from unittest import TestCase, mock 6 | 7 | from darglint2.config import ( 8 | POSSIBLE_CONFIG_FILENAMES, 9 | LogLevel, 10 | find_config_file_in_path, 11 | get_logger, 12 | walk_path, 13 | ) 14 | from darglint2.utils import ConfigurationContext 15 | 16 | 17 | class WalkPathTestCase(TestCase): 18 | """Tests the walk_path function.""" 19 | 20 | @mock.patch("darglint2.config.os.getcwd") 21 | def test_at_root_yields_only_once(self, mock_getcwd): 22 | """We should only get root once. # noqa""" 23 | mock_getcwd.return_value = "/" 24 | path_walker = walk_path() 25 | self.assertEqual(next(path_walker), "/") 26 | with self.assertRaises(StopIteration): 27 | next(path_walker) 28 | 29 | @mock.patch("darglint2.config.os.getcwd") 30 | def test_really_long_path(self, mock_getcwd): 31 | directories = [ 32 | "".join([choice(ascii_letters + "_-") for _ in range(randint(1, 10))]) 33 | for __ in range(randint(10, 30)) 34 | ] 35 | cwd = "/" + "/".join(directories) 36 | mock_getcwd.return_value = cwd 37 | path_walker = walk_path() 38 | paths_walked = [x for x in path_walker] 39 | self.assertEqual( 40 | len(paths_walked), 41 | len(directories) + 1, 42 | "Should have had {} but had {} paths.".format( 43 | len(directories), 44 | len(paths_walked) + 1, 45 | ), 46 | ) 47 | 48 | 49 | class FindConfigFileInPathTestCase(TestCase): 50 | """Test that the config file is being found.""" 51 | 52 | @mock.patch("darglint2.config.configparser.ConfigParser") 53 | @mock.patch("darglint2.config.os.listdir") 54 | def test_filename_checked(self, mock_listdir, mock_ConfigParser): 55 | """Check that only the necessary filenames are identified. # noqa""" 56 | fake_files = [ 57 | "".join([choice(ascii_letters + "_-") for _ in range(randint(5, 10))]) 58 | for _ in range(10) 59 | ] 60 | mock_listdir.return_value = fake_files + list(POSSIBLE_CONFIG_FILENAMES) 61 | 62 | config_parser = mock.MagicMock() 63 | mock_ConfigParser.return_value = config_parser 64 | 65 | contents_checked = list() 66 | 67 | def read_file(filename): 68 | contents_checked.append(filename) 69 | return mock.MagicMock() 70 | 71 | config_parser.read = read_file 72 | 73 | find_config_file_in_path("./") 74 | 75 | self.assertEqual( 76 | set(contents_checked), {"./" + x for x in POSSIBLE_CONFIG_FILENAMES} 77 | ) 78 | 79 | 80 | class LoggingTestCase(TestCase): 81 | def test_log_level_set_by_config(self): 82 | with ConfigurationContext(): 83 | logger = get_logger() 84 | self.assertEqual(logger.level, LogLevel.CRITICAL.value) 85 | with ConfigurationContext(log_level=LogLevel.ERROR): 86 | logger = get_logger() 87 | self.assertEqual(logger.level, LogLevel.ERROR.value) 88 | -------------------------------------------------------------------------------- /tests/test_custom_assert.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from unittest.mock import Mock, patch 3 | 4 | from darglint2.config import AssertStyle 5 | from darglint2.custom_assert import Assert 6 | from darglint2.utils import ConfigurationContext 7 | 8 | 9 | class AssertTestCase(TestCase): 10 | def test_can_configure_to_raise(self): 11 | with ConfigurationContext( 12 | assert_style=AssertStyle.RAISE, 13 | ): 14 | message = "My Message" 15 | with self.assertRaises(AssertionError) as exc: 16 | Assert(False, message) 17 | self.assertTrue(message in str(exc.exception)) 18 | 19 | @patch("darglint2.custom_assert.get_logger") 20 | def test_logs_by_default(self, mock_get_logger): 21 | mock_logger = Mock() 22 | mock_logger.error = Mock() 23 | mock_get_logger.return_value = mock_logger 24 | message = "My Message" 25 | with ConfigurationContext(): 26 | Assert(False, message) 27 | self.assertTrue(mock_logger.error.called) 28 | self.assertEqual( 29 | mock_logger.error.call_args[0][0], 30 | message, 31 | ) 32 | -------------------------------------------------------------------------------- /tests/test_error_report.py: -------------------------------------------------------------------------------- 1 | """Tests for the error reporting class.""" 2 | 3 | import ast 4 | from unittest import TestCase 5 | 6 | from darglint2.config import Configuration 7 | from darglint2.error_report import ErrorReport 8 | from darglint2.errors import EmptyDescriptionError 9 | from darglint2.function_description import get_function_descriptions 10 | 11 | 12 | def _get_function_description(program): 13 | tree = ast.parse(program) 14 | return get_function_descriptions(tree)[0] 15 | 16 | 17 | class ErrorReportMessageTemplateTest(TestCase): 18 | """Test the ErrorReport templating. 19 | 20 | The message formatting syntax and variable names is 21 | taken primarily from pylint. That will allow any users 22 | familiar with pylint to use this utility with some ease. 23 | Format strings are also very intuitive for Pythonistas. 24 | 25 | """ 26 | 27 | def setUp(self): 28 | """Make a generic function to raise errors about.""" 29 | self.function_description = _get_function_description( 30 | "\n".join( 31 | [ 32 | "def top_level_function(arg):", 33 | ' """My docstring.', 34 | "", 35 | " Args:", 36 | " arg:", 37 | "", 38 | ' """', 39 | " return 1", 40 | ] 41 | ) 42 | ) 43 | 44 | # Give a line number so that it can be sorted. 45 | self.function_description.lineno = 0 46 | 47 | def test_message_template_can_be_empty(self): 48 | """Ensure the error report representation can be empty.""" 49 | message = "Discard me." 50 | error = EmptyDescriptionError( 51 | self.function_description, 52 | message, 53 | ) 54 | filename = "/Some/File/Discarded.py" 55 | with Configuration(message_template="").context(): 56 | error_report = ErrorReport(errors=[error], filename=filename) 57 | self.assertEqual(str(error_report), "") 58 | 59 | def test_format_string_only_msg_id(self): 60 | """Ensure that message template can have one item.""" 61 | message = "This message is missing a description." 62 | error = EmptyDescriptionError( 63 | self.function_description, 64 | message, 65 | ) 66 | filename = "/Users/ronald_vermillion/great_donuts.ijs" 67 | with Configuration(message_template="{msg_id}").context(): 68 | error_report = ErrorReport(errors=[error], filename=filename) 69 | self.assertEqual(str(error_report), EmptyDescriptionError.error_code) 70 | 71 | def test_format_can_include_string_constants(self): 72 | """Ensure that string constants can be in message.""" 73 | message = "The Message!" 74 | error = EmptyDescriptionError( 75 | self.function_description, 76 | message, 77 | ) 78 | filename = "./some_filename.py" 79 | with Configuration(message_template="({msg}) :)").context(): 80 | error_report = ErrorReport(errors=[error], filename=filename) 81 | self.assertEqual( 82 | str(error_report), "(Empty description: e The Message!) :)" 83 | ) 84 | 85 | def test_all_attributes(self): 86 | """Test against all possible attributes.""" 87 | message = "The Message!" 88 | error = EmptyDescriptionError( 89 | self.function_description, 90 | message, 91 | ) 92 | filename = "./some_filename.py" 93 | message_template = " ".join( 94 | [ 95 | "{msg}", 96 | "{msg_id}", 97 | "{line}", 98 | "{path}", 99 | "{obj}", 100 | ] 101 | ) 102 | with Configuration(message_template=message_template).context(): 103 | error_report = ErrorReport(errors=[error], filename=filename) 104 | 105 | # This will raise an error if the template 106 | # parameters are incorrect. 107 | str(error_report) 108 | -------------------------------------------------------------------------------- /tests/test_function_scoped_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from unittest import TestCase 3 | 4 | from darglint2.analysis.argument_visitor import ArgumentVisitor 5 | from darglint2.analysis.function_scoped_visitor import FunctionScopedVisitorMixin 6 | from darglint2.analysis.return_visitor import ReturnVisitor 7 | 8 | from .utils import reindent 9 | 10 | 11 | class ScopedReturnAndArgumentVisitor( 12 | FunctionScopedVisitorMixin, ArgumentVisitor, ReturnVisitor 13 | ): 14 | pass 15 | 16 | 17 | class FunctionScopedVisitorMixinTests(TestCase): 18 | def assertArgsFound(self, program, *args): 19 | """Assert that the given arguments were present. 20 | 21 | Args: 22 | program: The program to analyze. 23 | args: The arguments which should be present (or empty if none.) 24 | 25 | Returns: 26 | The visitor, in case further analysis is required. 27 | 28 | """ 29 | function = ast.parse(reindent(program)).body[0] 30 | visitor = ScopedReturnAndArgumentVisitor() 31 | visitor.visit(function) 32 | self.assertEqual( 33 | sorted(visitor.arguments), 34 | sorted(args), 35 | ) 36 | 37 | def assertReturnFound(self, program): 38 | """Assert that the return was found. 39 | 40 | Args: 41 | program: The program to run the analysis on. 42 | 43 | Returns: 44 | The visitor, in case you want to do more analysis. 45 | 46 | """ 47 | function = ast.parse(reindent(program)).body[0] 48 | visitor = ScopedReturnAndArgumentVisitor() 49 | visitor.visit(function) 50 | self.assertTrue(visitor.returns) 51 | return visitor 52 | 53 | def assertNoReturnFound(self, program): 54 | """Assert that no return was found. 55 | 56 | Args: 57 | program: The program to run the analysis on. 58 | 59 | Returns: 60 | The visitor, in case you want to do more analysis. 61 | 62 | """ 63 | function = ast.parse(reindent(program)).body[0] 64 | visitor = ScopedReturnAndArgumentVisitor() 65 | visitor.visit(function) 66 | self.assertEqual(visitor.returns, []) 67 | return visitor 68 | 69 | def test_nested_return(self): 70 | program = r""" 71 | def f(): 72 | def g(): 73 | return 'Hello nesting!' 74 | print(g()) 75 | """ 76 | self.assertNoReturnFound(program) 77 | 78 | def test_deeply_nested_return(self): 79 | program = r""" 80 | def f(): 81 | def g(): 82 | def h(): 83 | def i(): 84 | return 'Hello deeply nesting!' 85 | print(i()) 86 | h() 87 | g() 88 | """ 89 | self.assertNoReturnFound(program) 90 | 91 | def test_only_outermost_captured(self): 92 | """Test that only the outermost function is analyzed. 93 | 94 | Rather than capturing all nested ones here, we'll extract the 95 | functions separately, and run analysis on them independently of 96 | one another. 97 | 98 | That should simplify the whole process by making it a bit more 99 | recursive. Also, it will allow us to ignore nesting if we want. 100 | 101 | """ 102 | program = r""" 103 | def f(): 104 | def g(): 105 | return 3 106 | yield g() 107 | """ 108 | self.assertNoReturnFound(program) 109 | 110 | def test_outer_async_function_captured(self): 111 | program = r""" 112 | async def f(): 113 | return 3 114 | """ 115 | self.assertReturnFound(program) 116 | 117 | def test_inner_async_skipped(self): 118 | program = r""" 119 | async def f(): 120 | async def g(): 121 | return 3 122 | yield await g() 123 | """ 124 | self.assertNoReturnFound(program) 125 | 126 | def test_lambda_forms_scope(self): 127 | """A lambda must form its own scope, to prevent leaking into parent.""" 128 | program = r""" 129 | def f(xs): 130 | ys = copy(xs) 131 | ys.sort(key=lambda x: x[0]) 132 | return ys 133 | """ 134 | self.assertReturnFound(program) 135 | self.assertArgsFound(program, "xs") 136 | -------------------------------------------------------------------------------- /tests/test_generated_grammar.py: -------------------------------------------------------------------------------- 1 | r"""Tests for a grammar generated by bnf_to_cnf, using cyk. 2 | 3 | Original grammar: 4 | 5 | start: 6 | 7 | ::= 8 | 9 | | 10 | | 11 | 12 | ::= 13 | 14 | | 15 | | @OutOfOrder 16 | 17 | ::= 18 | 19 | | 20 | 21 | ::= 22 | 23 | | 24 | | 25 | | 26 | | 27 | 28 | ::= "GTT\.intransitive_verb" 29 | ::= "GTT\.transitive_verb" 30 | ::= "GTT\.noun" 31 | ::= "GTT\.adjective" 32 | 33 | 34 | # Where the following tokens are lexed. 35 | 36 | ::= 37 | "Hegh" 38 | | "quS" 39 | 40 | ::= 41 | "HoH" 42 | | "qIp" 43 | 44 | ::= 45 | "Duj" 46 | | "loD" 47 | | "puq" 48 | | "bIQ" 49 | 50 | ::= 51 | "val" 52 | | "QIp" 53 | 54 | """ 55 | 56 | from unittest import TestCase 57 | 58 | from darglint2.parse.cyk import parse 59 | from darglint2.parse.grammar import BaseGrammar 60 | from darglint2.parse.grammar import Production as P 61 | from darglint2.token import BaseTokenType, Token 62 | 63 | 64 | class OutOfOrder(BaseException): 65 | pass 66 | 67 | 68 | class GTT(BaseTokenType): 69 | intransitive_verb = 0 70 | transitive_verb = 1 71 | noun = 2 72 | adjective = 3 73 | 74 | unknown = 4 75 | 76 | 77 | # Generated on 2019-07-06 17:38:20.963686 78 | 79 | 80 | class Grammar(BaseGrammar): 81 | productions = [ 82 | P( 83 | "sentence", 84 | ([], "noun_phrase", "trans_verb_phrase", 0), 85 | ([], "transitive_verb", "noun_phrase", 0), 86 | (GTT.transitive_verb, 0), 87 | ([], "intransitive_verb", "noun_phrase", 0), 88 | (GTT.intransitive_verb, 0), 89 | ([OutOfOrder], "noun_phrase", "intransitive_verb", 0), 90 | ), 91 | P( 92 | "trans_verb_phrase", 93 | ([], "transitive_verb", "noun_phrase", 0), 94 | (GTT.transitive_verb, 0), 95 | ), 96 | P( 97 | "noun_phrase", 98 | ([], "noun", "adjective", 0), 99 | ([], "noun", "noun", 0), 100 | ([], "noun", "noun_phrase0", 0), 101 | ([], "noun", "noun_phrase1", 0), 102 | (GTT.noun, 0), 103 | ), 104 | P("intransitive_verb", (GTT.intransitive_verb, 0)), 105 | P("transitive_verb", (GTT.transitive_verb, 0)), 106 | P("noun", (GTT.noun, 0)), 107 | P("adjective", (GTT.adjective, 0)), 108 | P("noun_phrase0", ([], "adjective", "noun", 0)), 109 | P("noun_phrase1", ([], "noun", "adjective", 0)), 110 | ] 111 | start = "sentence" 112 | 113 | 114 | def _lex(sentence): 115 | lookup = { 116 | "Hegh": GTT.intransitive_verb, 117 | "quS": GTT.intransitive_verb, 118 | "HoH": GTT.transitive_verb, 119 | "qIp": GTT.transitive_verb, 120 | "Duj": GTT.noun, 121 | "loD": GTT.noun, 122 | "puq": GTT.noun, 123 | "bIQ": GTT.noun, 124 | "val": GTT.adjective, 125 | "QIp": GTT.adjective, 126 | } 127 | for word in sentence.split(): 128 | yield Token( 129 | value=word, 130 | token_type=lookup.get(word, GTT.unknown), 131 | line_number=0, 132 | ) 133 | 134 | 135 | def lex(sentence): 136 | return list(_lex(sentence)) 137 | 138 | 139 | class GeneratedGrammarTest(TestCase): 140 | def test_valid_sentences(self): 141 | sentences = [ 142 | "Hegh puq", 143 | "loD HoH puq Duj", 144 | "qIp bIQ QIp", 145 | "puq val qIp loD", 146 | ] 147 | for sentence in sentences: 148 | self.assertTrue( 149 | parse(Grammar, lex(sentence)), 150 | 'Expected to parse "{}", but failed.'.format(sentence), 151 | ) 152 | 153 | def test_invalid_sentences(self): 154 | bad_sentences = [ 155 | # Unrecognized symbol 156 | "unrecognized puq" 157 | # Incorrect structure 158 | "qIp qIp" 159 | ] 160 | for sentence in bad_sentences: 161 | self.assertFalse( 162 | parse(Grammar, lex(sentence)), 163 | 'Unexpectedly parsed "{}"'.format( 164 | sentence, 165 | ), 166 | ) 167 | -------------------------------------------------------------------------------- /tests/test_grammar.py: -------------------------------------------------------------------------------- 1 | """Tests for the Grammar class.""" 2 | 3 | from unittest import TestCase 4 | 5 | from darglint2.parse.grammar import BaseGrammar 6 | from darglint2.parse.grammar import Production as P 7 | 8 | 9 | class GrammarTest(TestCase): 10 | """Tests for the grammar class. 11 | 12 | Since most of the structure/logic of the docstring is going 13 | to be moved to data, it makes sense to represent this with 14 | a flexible data structure. 15 | 16 | """ 17 | 18 | def test_grammar_must_have_productions_and_start(self): 19 | class BadGrammar(BaseGrammar): 20 | pass 21 | 22 | with self.assertRaises(Exception): 23 | BadGrammar() 24 | 25 | def test_only_productions_and_start_necessary(self): 26 | class GoodGrammar(BaseGrammar): 27 | productions = [] 28 | start = "" 29 | 30 | GoodGrammar() 31 | 32 | 33 | class ProductionTest(TestCase): 34 | def test_can_create_production(self): 35 | P("sentence", ("verb", "noun")) 36 | 37 | def test_can_create_production_with_annotations(self): 38 | class OutOfOrder(BaseException): 39 | pass 40 | 41 | P.with_annotations("sentence", [OutOfOrder], ("noun", "verb")) 42 | -------------------------------------------------------------------------------- /tests/test_node.py: -------------------------------------------------------------------------------- 1 | """Tests for the parser Node class.""" 2 | 3 | from random import randint 4 | from unittest import TestCase 5 | 6 | from darglint2.node import CykNode 7 | 8 | 9 | class CykNodeTest(TestCase): 10 | def build_binary_search_tree(self, root, value): 11 | if value < root.value: 12 | if not root.lchild: 13 | root.lchild = CykNode(symbol="", value=value) 14 | else: 15 | self.build_binary_search_tree(root.lchild, value) 16 | elif value > root.value: 17 | if not root.rchild: 18 | root.rchild = CykNode(symbol="", value=value) 19 | else: 20 | self.build_binary_search_tree(root.rchild, value) 21 | else: 22 | pass 23 | 24 | def assertIsSorted(self, arr, reverse=False): 25 | for i in range(len(arr) - 1): 26 | if reverse: 27 | self.assertTrue( 28 | arr[i] >= arr[i + 1], 29 | "{} is not sorted.".format(arr), 30 | ) 31 | else: 32 | self.assertTrue( 33 | arr[i] <= arr[i + 1], 34 | "{} is not sorted.".format(arr), 35 | ) 36 | 37 | def test_in_order_traversal(self): 38 | node = CykNode(symbol="", value=randint(-100, 100)) 39 | for i in range(20): 40 | self.build_binary_search_tree(node, randint(-100, 100)) 41 | values = [x.value for x in node.in_order_traverse()] 42 | self.assertIsSorted(values) 43 | -------------------------------------------------------------------------------- /tests/test_parser_combinator.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from darglint2.node import CykNode 4 | from darglint2.parse.combinator import parser_combinator 5 | from darglint2.parse.cyk import parse 6 | from darglint2.parse.grammar import BaseGrammar 7 | from darglint2.parse.grammar import Production as P 8 | from darglint2.token import BaseTokenType, Token 9 | 10 | 11 | class PoetryTokenType(BaseTokenType): 12 | WORD = 1 13 | NEWLINE = 2 14 | 15 | 16 | # This grammar requires that a poem end in a newline. 17 | class TotalPoetryGrammar(BaseGrammar): 18 | productions = [ 19 | P("word", (PoetryTokenType.WORD, 0)), 20 | P("newline", (PoetryTokenType.NEWLINE, 0)), 21 | P("line", ([], "word", "newline", 0), ([], "word", "line", 0)), 22 | P("stanza", ([], "line", "newline", 0), ([], "line", "stanza", 0)), 23 | P( 24 | "poem", 25 | # A single-stanza poem. 26 | ([], "line", "stanza", 0), 27 | # A multi-stanza poem. 28 | ([], "stanza", "stanza", 0), 29 | ), 30 | ] 31 | 32 | start = "poem" 33 | 34 | 35 | class StanzaGrammar(BaseGrammar): 36 | productions = [ 37 | P("word", (PoetryTokenType.WORD, 0)), 38 | P("newline", (PoetryTokenType.NEWLINE, 0)), 39 | P("line", ([], "word", "newline", 0), ([], "word", "line", 0)), 40 | P("stanza", ([], "line", "newline", 0), ([], "line", "stanza", 0)), 41 | ] 42 | 43 | start = "stanza" 44 | 45 | 46 | def combine(*nodes, encountered=False): 47 | if len(nodes) == 1: 48 | if not encountered: 49 | n = nodes[0] 50 | n.symbol = "poem" 51 | return n 52 | else: 53 | return nodes[0] 54 | else: 55 | return CykNode( 56 | symbol="poem", lchild=nodes[0], rchild=combine(*nodes[1:], encountered=True) 57 | ) 58 | 59 | 60 | def lex(poem): 61 | tokens = list() 62 | word = "" 63 | i = 0 64 | for letter in poem: 65 | if letter == "\n": 66 | if word: 67 | tokens.append( 68 | Token( 69 | value=word, 70 | token_type=PoetryTokenType.WORD, 71 | line_number=i, 72 | ) 73 | ) 74 | word = "" 75 | tokens.append( 76 | Token( 77 | value="\n", 78 | token_type=PoetryTokenType.NEWLINE, 79 | line_number=i, 80 | ) 81 | ) 82 | i += 1 83 | elif letter.isspace(): 84 | if word: 85 | tokens.append( 86 | Token( 87 | value=word, 88 | token_type=PoetryTokenType.WORD, 89 | line_number=i, 90 | ) 91 | ) 92 | word = "" 93 | else: 94 | word += letter 95 | if word: 96 | tokens.append( 97 | Token( 98 | value=word, 99 | token_type=PoetryTokenType.WORD, 100 | line_number=i, 101 | ) 102 | ) 103 | return tokens 104 | 105 | 106 | def lookup(*args): 107 | return [lambda x: parse(StanzaGrammar, x)] 108 | 109 | 110 | def top_parse(tokens): 111 | ret = list() 112 | curr = list() 113 | for token in tokens: 114 | curr.append(token) 115 | if len(curr) > 1: 116 | if ( 117 | curr[-1].token_type == PoetryTokenType.NEWLINE 118 | and curr[-2].token_type == PoetryTokenType.NEWLINE 119 | ): 120 | ret.append(curr) 121 | curr = list() 122 | if curr: 123 | ret.append(curr) 124 | return ret 125 | 126 | 127 | poems = [ 128 | "Roly poly\nSomething holey\n\nIn and out\nAnd round about.\n\n", 129 | "In the braken brambles\nHides a scruffy vagrant\n" "Wearied out from shambles\n\n", 130 | "A\n\nC\n\n", 131 | ] 132 | 133 | 134 | class ParserCombinatorTests(TestCase): 135 | def test_total_grammar(self): 136 | for poem in poems: 137 | tokens = lex(poem) 138 | parsed = parse(TotalPoetryGrammar, tokens) 139 | self.assertTrue(parsed is not None) 140 | 141 | def test_equivalent_to_combined(self): 142 | for poem in poems: 143 | tokens = lex(poem) 144 | total = parse(TotalPoetryGrammar, tokens) 145 | combined = parser_combinator( 146 | top_parse, 147 | lookup, 148 | combine, 149 | tokens, 150 | ) 151 | self.assertTrue( 152 | total.equals(combined), 153 | ) 154 | -------------------------------------------------------------------------------- /tests/test_peaker.py: -------------------------------------------------------------------------------- 1 | from itertools import cycle 2 | from unittest import TestCase 3 | 4 | from darglint2.peaker import Peaker 5 | 6 | 7 | class PeakerTestCase(TestCase): 8 | def test_peak_doesnt_move_stream_forward(self): 9 | generator = cycle("abc") 10 | peaker = Peaker(stream=generator) 11 | self.assertEqual(peaker.peak(), "a") 12 | self.assertEqual(peaker.peak(), "a") 13 | 14 | def test_next_does_move_stream_forward(self): 15 | generator = cycle("abc") 16 | peaker = Peaker(stream=generator) 17 | self.assertEqual(peaker.next(), "a") 18 | self.assertEqual(peaker.next(), "b") 19 | 20 | def test_stop_iteration_raised(self): 21 | peaker = Peaker((x for x in "ab")) 22 | self.assertEqual(peaker.next(), "a") 23 | self.assertEqual(peaker.next(), "b") 24 | with self.assertRaises(StopIteration): 25 | peaker.next() 26 | 27 | def test_has_next_returns_false_at_end_of_iteration(self): 28 | peaker = Peaker((x for x in "ab")) 29 | self.assertTrue(peaker.has_next()) 30 | peaker.next() 31 | self.assertTrue(peaker.has_next()) 32 | peaker.next() 33 | self.assertFalse(peaker.has_next()) 34 | 35 | def test_empty_stream_says_it_has_none(self): 36 | peaker = Peaker((x for x in "")) 37 | self.assertFalse(peaker.has_next()) 38 | 39 | def test_take_while(self): 40 | peaker = Peaker((x for x in "name 1234")) 41 | name = "".join(peaker.take_while(str.isalpha)) 42 | self.assertEqual(name, "name") 43 | self.assertTrue(peaker.has_next()) 44 | spaces = "".join(peaker.take_while(str.isspace)) 45 | self.assertEqual(spaces, " ") 46 | self.assertTrue(peaker.has_next()) 47 | numbers = "".join(peaker.take_while(str.isdecimal)) 48 | self.assertEqual(numbers, "1234") 49 | self.assertFalse(peaker.has_next()) 50 | 51 | def test_passing_none_to_peaker_marks_empty(self): 52 | peaker = Peaker((x for x in [])) 53 | self.assertFalse(peaker.has_next()) 54 | 55 | def test_peaker_can_have_n_lookahead(self): 56 | peaker = Peaker((x for x in "abcd"), lookahead=3) 57 | self.assertEqual(peaker.peak(lookahead=1), "a") 58 | self.assertEqual(peaker.peak(lookahead=2), "b") 59 | self.assertEqual(peaker.peak(lookahead=3), "c") 60 | 61 | def test_cannot_peak_beyond_specified_lookahead(self): 62 | peaker = Peaker((x for x in "abcd"), lookahead=1) 63 | with self.assertRaises(Exception): 64 | peaker.peak(lookahead=3) 65 | -------------------------------------------------------------------------------- /tests/test_returns_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from unittest import TestCase 3 | 4 | from darglint2.analysis.return_visitor import ReturnVisitor 5 | 6 | from .utils import reindent 7 | 8 | 9 | class ReturnsVisitorTests(TestCase): 10 | def assertFound(self, program): 11 | """Assert that the return was found. 12 | 13 | Args: 14 | program: The program to run the analysis on. 15 | 16 | Returns: 17 | The visitor, in case you want to do more analysis. 18 | 19 | """ 20 | function = ast.parse(reindent(program)).body[0] 21 | visitor = ReturnVisitor() 22 | visitor.visit(function) 23 | self.assertTrue(visitor.returns) 24 | return visitor 25 | 26 | def assertNoneFound(self, program): 27 | """Assert that no return was found. 28 | 29 | Args: 30 | program: The program to run the analysis on. 31 | 32 | Returns: 33 | The visitor, in case you want to do more analysis. 34 | 35 | """ 36 | function = ast.parse(reindent(program)).body[0] 37 | visitor = ReturnVisitor() 38 | visitor.visit(function) 39 | self.assertEqual(visitor.returns, []) 40 | return visitor 41 | 42 | def test_no_return(self): 43 | program = r""" 44 | def f(): 45 | pass 46 | """ 47 | self.assertNoneFound(program) 48 | 49 | def test_nested_no_return(self): 50 | program = r""" 51 | def f(): 52 | def g(): 53 | pass 54 | """ 55 | self.assertNoneFound(program) 56 | 57 | def test_simplest_function(self): 58 | program = r""" 59 | def f(): 60 | return 3 61 | """ 62 | self.assertFound(program) 63 | 64 | def test_early_return(self): 65 | program = r""" 66 | def f(x): 67 | if x < 0: 68 | return -1 69 | for i in range(x): 70 | if complex_condition(x, i): 71 | return i 72 | """ 73 | self.assertFound(program) 74 | 75 | def test_conditional_return(self): 76 | program = r""" 77 | def f(): 78 | if MY_GLOBAL: 79 | return 1 80 | else: 81 | return 2 82 | """ 83 | self.assertFound(program) 84 | 85 | def test_return_in_context(self): 86 | program = r""" 87 | def f(): 88 | with open('/tmp/input', 'r') as fin: 89 | return fin.readlines() 90 | """ 91 | self.assertFound(program) 92 | 93 | def test_returns_none(self): 94 | program = r""" 95 | def f(): 96 | return 97 | """ 98 | visitor = self.assertFound(program) 99 | self.assertEqual( 100 | visitor.returns[0].value, 101 | None, 102 | ) 103 | 104 | def test_returns_non_none(self): 105 | program = r""" 106 | def f(): 107 | return 3 108 | """ 109 | visitor = self.assertFound(program) 110 | self.assertTrue( 111 | isinstance(visitor.returns[0].value, ast.AST), 112 | ) 113 | -------------------------------------------------------------------------------- /tests/test_variable_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from unittest import TestCase 3 | 4 | from darglint2.analysis.variable_visitor import VariableVisitor 5 | 6 | from .utils import reindent 7 | 8 | 9 | class VariableVisitorTests(TestCase): 10 | def assertFound(self, program, *variables): 11 | """Assert that the return was found. 12 | 13 | Args: 14 | program: The program to run the analysis on. 15 | variables: The variables which we expect to have found 16 | (or empty, if we expect none.) 17 | 18 | Returns: 19 | The visitor, in case you want to do more analysis. 20 | 21 | """ 22 | function = ast.parse(reindent(program)).body[0] 23 | visitor = VariableVisitor() 24 | visitor.visit(function) 25 | self.assertEqual(sorted({x.id for x in visitor.variables}), sorted(variables)) 26 | return visitor 27 | 28 | def test_no_variables(self): 29 | program = """ 30 | def f(x): 31 | return x * 2 32 | """ 33 | self.assertFound(program) 34 | 35 | def test_one_variables(self): 36 | program = """ 37 | def f(x): 38 | y = x * 2 39 | return y 40 | """ 41 | self.assertFound(program, "y") 42 | 43 | def test_many_variables(self): 44 | program = """ 45 | def f(x): 46 | y = 2 * x 47 | pi = 3.1415 48 | something = 'cat' 49 | return something * int(y * pi) 50 | """ 51 | self.assertFound(program, "y", "pi", "something") 52 | 53 | def test_no_variables_in_method(self): 54 | program = """ 55 | class X: 56 | def f(self, x): 57 | self.x = x * 2 58 | return self.x 59 | """ 60 | self.assertFound(program) 61 | 62 | def test_one_variable_in_method(self): 63 | program = """ 64 | class X: 65 | def f(self, x): 66 | y = x * 2 67 | self.x = y 68 | return y 69 | """ 70 | self.assertFound(program, "y") 71 | 72 | def test_many_variables_in_method(self): 73 | program = """ 74 | class X: 75 | def f(self, x): 76 | y = 2 * x 77 | pi = 3.1415 78 | something = 'cat' 79 | self.msg = something * int(y * pi) 80 | return self.msg 81 | """ 82 | self.assertFound(program, "y", "pi", "something") 83 | -------------------------------------------------------------------------------- /tests/test_yield_visitor.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from unittest import TestCase 3 | 4 | from darglint2.analysis.yield_visitor import YieldVisitor 5 | 6 | from .utils import reindent 7 | 8 | 9 | class YieldsVisitorTests(TestCase): 10 | def assertFound(self, program): 11 | """Assert that the yield was found. 12 | 13 | Args: 14 | program: The program to run the analysis on. 15 | 16 | Yields: 17 | The visitor, in case you want to do more analysis. 18 | 19 | """ 20 | function = ast.parse(reindent(program)).body[0] 21 | visitor = YieldVisitor() 22 | visitor.visit(function) 23 | self.assertTrue(visitor.yields) 24 | return visitor 25 | 26 | def assertNoneFound(self, program): 27 | """Assert that no yield was found. 28 | 29 | Args: 30 | program: The program to run the analysis on. 31 | 32 | Yields: 33 | The visitor, in case you want to do more analysis. 34 | 35 | """ 36 | function = ast.parse(reindent(program)).body[0] 37 | visitor = YieldVisitor() 38 | visitor.visit(function) 39 | self.assertEqual(visitor.yields, []) 40 | return visitor 41 | 42 | def test_no_yield(self): 43 | program = r""" 44 | def f(): 45 | pass 46 | """ 47 | self.assertNoneFound(program) 48 | 49 | def test_nested_no_yield(self): 50 | program = r""" 51 | def f(): 52 | def g(): 53 | pass 54 | """ 55 | self.assertNoneFound(program) 56 | 57 | def test_simplest_function(self): 58 | program = r""" 59 | def f(): 60 | yield 3 61 | """ 62 | self.assertFound(program) 63 | 64 | def test_early_yield(self): 65 | program = r""" 66 | def f(x): 67 | if x < 0: 68 | yield -1 69 | for i in range(x): 70 | if complex_condition(x, i): 71 | yield i 72 | """ 73 | self.assertFound(program) 74 | 75 | def test_conditional_yield(self): 76 | program = r""" 77 | def f(): 78 | if MY_GLOBAL: 79 | yield 1 80 | else: 81 | yield 2 82 | """ 83 | self.assertFound(program) 84 | 85 | def test_yield_in_context(self): 86 | program = r""" 87 | def f(): 88 | with open('/tmp/input', 'r') as fin: 89 | yield fin.readlines() 90 | """ 91 | self.assertFound(program) 92 | 93 | def test_yields_none(self): 94 | program = r""" 95 | def f(): 96 | yield 97 | """ 98 | visitor = self.assertFound(program) 99 | self.assertEqual( 100 | visitor.yields[0].value, 101 | None, 102 | ) 103 | 104 | def test_yields_non_none(self): 105 | program = r""" 106 | def f(): 107 | yield 3 108 | """ 109 | visitor = self.assertFound(program) 110 | self.assertTrue( 111 | isinstance(visitor.yields[0].value, ast.AST), 112 | ) 113 | 114 | def test_yield_from(self): 115 | program = r""" 116 | def f(): 117 | yield from (x for x in range(10)) 118 | """ 119 | self.assertFound(program) 120 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py37,py38,py39,py310,py311 3 | [pytest] 4 | addopts = --ignore bin --ignore integration_tests/repos --ignore _data/ 5 | 6 | [flake8] 7 | max-line-length = 88 8 | extend-ignore = E203,E501 9 | 10 | [darglint2] 11 | strictness = short 12 | message_template = {path}:{line}:{obj}: {msg_id} {msg} 13 | 14 | [testenv] 15 | deps=pytest 16 | commands= 17 | pytest --ignore=integration_tests/ --ignore=_data/ 18 | 19 | # Use docker environments to test all python versions. 20 | # (This is just slightly more convenient for running tests 21 | # without having to install a bunch of python version ahead 22 | # of time.) I'll remove python versions as support for them 23 | # is dropped. 24 | [testenv:docker] 25 | deps = 26 | pytest 27 | allowlist_externals = 28 | docker 29 | commands = 30 | docker run -it -v {toxinidir}:/code/ darglint2-36 pytest 31 | docker run -it -v {toxinidir}:/code/ darglint2-37 pytest 32 | docker run -it -v {toxinidir}:/code/ darglint2-38 pytest 33 | docker run -it -v {toxinidir}:/code/ darglint2-39 pytest 34 | 35 | 36 | # Pre-commit tests. 37 | # 38 | # To run, 39 | # 40 | # tox -e pre-commit 41 | # 42 | # These tests expect certain test fixtures to be present. They're not 43 | # included here because they could substantially increase the size of 44 | # the repository, and I don't want to worry about licenses, etc. 45 | # 46 | # The following is a list of files and where they should be sourced: 47 | # 48 | # - _integration_tests/goldens.json_: A set of goldens which should be 49 | # generated by hand, or using the utility in bin/doc_extract. Alternatively, 50 | # they could be generated from darglint2 and touched up by hand to be 51 | # made more correct. 52 | # - _integration_tests/max_golden.json_: Should contain a very large golden, 53 | # crafted to be difficult to parse. 54 | # - _.performance_testrun_: Automatically generated, contains the history of 55 | # of the performance test runs. Also used as a cache of previous runs 56 | # to prevent pre-commit tests from taking too long. 57 | # - _.performance_module_testrun_: Automatically generated, contains the history 58 | # of the module performance test runs. (Which are run against the repostitories 59 | # described below.) 60 | # - _.performance_history_: Automatially generated, records performance history 61 | # in an easy to view fashion. 62 | # - _integration_tests/repos/_: Cloned whole repos to test against. 63 | # Ideally, these will be quite large, so it doesn't make sense to include them 64 | # in the repository. 65 | # 66 | # TODO: Add flake8 output. 67 | [testenv:pre-commit] 68 | deps= 69 | pytest 70 | flake8 71 | flake8-docstrings 72 | flake8-rst-docstrings 73 | commands = 74 | # E2E tests 75 | pytest integration_tests/end_to_end.py 76 | 77 | # Analysis tests 78 | pytest integration_tests/analysis_tests.py 79 | 80 | # Test against how we expect docstrings to be parsed. 81 | pytest integration_tests/goldens.py 82 | 83 | # Test the performance to make sure we don't introduce 84 | # a severe regression. 85 | pytest integration_tests/performance.py 86 | 87 | # Test different source file encodings. 88 | pytest integration_tests/sources.py 89 | 90 | # Display the size of the grammar. 91 | python integration_tests/grammar_size.py 92 | 93 | # Display the performance statistics. 94 | python integration_tests/performance.py 95 | 96 | # Make sure darglint2 stays compatible with other common plugins. 97 | pytest integration_tests/compatibility.py 98 | 99 | # Test that the flake8 plugin gets config from flake8. 100 | pytest integration_tests/test_flake8.py 101 | --------------------------------------------------------------------------------