├── example_dags └── .gitkeep ├── tests ├── operators │ └── fake.py └── test_rules.py ├── assets └── images │ └── usage.png ├── .vscode ├── settings.json └── launch.json ├── .pre-commit-hooks.yaml ├── airflint ├── __init__.py ├── __main__.py ├── actions │ └── new_statements.py └── rules │ ├── use_function_level_imports.py │ └── use_jinja_variable_get.py ├── .github └── workflows │ ├── codeql-analysis.yml │ ├── publish.yml │ └── test.yml ├── pyproject.toml ├── .pre-commit-config.yaml ├── .gitignore ├── README.md ├── LICENSE └── poetry.lock /example_dags/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/operators/fake.py: -------------------------------------------------------------------------------- 1 | class FakeOperator: 2 | template_fields = ("foo", "bar") 3 | -------------------------------------------------------------------------------- /assets/images/usage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/feluelle/airflint/HEAD/assets/images/usage.png -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.testing.pytestArgs": [ 3 | "tests" 4 | ], 5 | "python.testing.unittestEnabled": false, 6 | "python.testing.pytestEnabled": true 7 | } 8 | -------------------------------------------------------------------------------- /.pre-commit-hooks.yaml: -------------------------------------------------------------------------------- 1 | - id: airflint 2 | name: Airflint 3 | description: A linter for your Airflow DAGs to ensure Best Practices are being used. 4 | entry: airflint 5 | language: python 6 | types: [python] 7 | -------------------------------------------------------------------------------- /airflint/__init__.py: -------------------------------------------------------------------------------- 1 | """Top-level package for airflint.""" 2 | 3 | from importlib.metadata import version 4 | from os import getcwd 5 | from os.path import dirname, join, realpath 6 | 7 | __app_name__ = "airflint" 8 | __version__ = version(__name__) 9 | __location__ = realpath(join(getcwd(), dirname(__file__))) 10 | -------------------------------------------------------------------------------- /airflint/__main__.py: -------------------------------------------------------------------------------- 1 | from refactor import run 2 | 3 | from airflint.rules.use_function_level_imports import UseFunctionLevelImports 4 | from airflint.rules.use_jinja_variable_get import UseJinjaVariableGet 5 | 6 | 7 | def main(): 8 | run( 9 | rules=[ 10 | UseFunctionLevelImports, 11 | UseJinjaVariableGet, 12 | ], 13 | ) 14 | 15 | 16 | if __name__ == "__main__": 17 | main() 18 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "airflint", 9 | "type": "python", 10 | "request": "launch", 11 | "module": "airflint", 12 | "justMyCode": true, 13 | "args": ["example_dags/"] 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | name: codeql 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths-ignore: 8 | - '**.md' 9 | pull_request: 10 | branches: 11 | - main 12 | paths-ignore: 13 | - '**.md' 14 | schedule: 15 | - cron: '32 10 * * 4' 16 | 17 | jobs: 18 | analyze: 19 | runs-on: ubuntu-latest 20 | permissions: 21 | actions: read 22 | contents: read 23 | security-events: write 24 | steps: 25 | - name: Checkout repository 26 | uses: actions/checkout@v2 27 | - name: Setup python 28 | id: setup-python 29 | uses: actions/setup-python@v2 30 | with: 31 | python-version: 3.9 32 | - name: Initialize CodeQL 33 | uses: github/codeql-action/init@v1 34 | with: 35 | languages: python 36 | - name: Autobuild 37 | uses: github/codeql-action/autobuild@v1 38 | - name: Perform CodeQL Analysis 39 | uses: github/codeql-action/analyze@v1 40 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "airflint" 3 | version = "0.3.2-alpha" 4 | description = "Enforce Best Practices for all your Airflow DAGs. ⭐" 5 | authors = ["Felix Uellendall "] 6 | license = "Apache-2.0" 7 | keywords = ["airflow", "best-practices", "dag"] 8 | readme = "README.md" 9 | 10 | [tool.poetry.scripts] 11 | airflint = "airflint.__main__:main" 12 | 13 | [tool.poetry.dependencies] 14 | python = "^3.9" 15 | refactor = "^0.5.0" 16 | 17 | [tool.poetry.dev-dependencies] 18 | pytest = "^7.1.1" 19 | pytest-cov = "^3.0.0" 20 | 21 | [build-system] 22 | requires = ["poetry-core>=1.0.0"] 23 | build-backend = "poetry.core.masonry.api" 24 | 25 | [tool.coverage.run] 26 | omit = ["airflint/__main__.py"] 27 | 28 | [tool.ruff] 29 | target-version = "py39" 30 | 31 | [tool.ruff.lint] 32 | select = ["E4", "E7", "E9", "F", "B", "S", "UP", "PGH", "D", "I", "A"] 33 | ignore = ["D100", "D102"] 34 | 35 | [tool.ruff.per-file-ignores] 36 | "tests/*" = ["S101", "D"] 37 | "airflint/rules/*.py" = ["S101"] 38 | "airflint/__main__.py" = ["D"] 39 | 40 | [tool.ruff.pydocstyle] 41 | convention = "pep257" 42 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | ci: 2 | skip: 3 | - pytest 4 | 5 | default_language_version: 6 | python: python3.9 7 | repos: 8 | - repo: meta 9 | hooks: 10 | - id: check-hooks-apply 11 | - repo: https://github.com/pre-commit/pre-commit-hooks 12 | rev: v5.0.0 13 | hooks: 14 | - id: check-ast 15 | - id: check-builtin-literals 16 | - id: check-docstring-first 17 | - id: check-toml 18 | - id: debug-statements 19 | - id: end-of-file-fixer 20 | - id: trailing-whitespace 21 | - repo: https://github.com/pre-commit/mirrors-mypy 22 | rev: v1.14.0 23 | hooks: 24 | - id: mypy 25 | additional_dependencies: 26 | - types-toml 27 | - repo: https://github.com/astral-sh/ruff-pre-commit 28 | rev: v0.8.4 29 | hooks: 30 | - id: ruff 31 | args: 32 | - --fix 33 | - --unsafe-fixes 34 | - id: ruff-format 35 | - repo: https://github.com/crate-ci/typos 36 | rev: v1.28.4 37 | hooks: 38 | - id: typos 39 | - repo: local 40 | hooks: 41 | - id: pytest 42 | name: Run pytest 43 | entry: poetry run pytest tests/ 44 | language: system 45 | pass_filenames: false 46 | always_run: true 47 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: publish 2 | 3 | on: 4 | release: 5 | types: 6 | - published 7 | 8 | jobs: 9 | publish: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Check out repository 13 | uses: actions/checkout@v2 14 | - name: Setup python 15 | id: setup-python 16 | uses: actions/setup-python@v2 17 | with: 18 | python-version: 3.9 19 | - name: Install Poetry 20 | uses: snok/install-poetry@v1 21 | with: 22 | virtualenvs-create: true 23 | virtualenvs-in-project: true 24 | installer-parallel: true 25 | - name: Load cached venv 26 | id: cached-poetry-dependencies 27 | uses: actions/cache@v2 28 | with: 29 | path: .venv 30 | key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} 31 | - name: Install dependencies 32 | if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' 33 | run: poetry install --no-interaction --no-root 34 | - name: Install library 35 | run: poetry install --no-interaction 36 | - name: Publish to PyPI 37 | env: 38 | PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} 39 | run: | 40 | poetry config pypi-token.pypi $PYPI_TOKEN 41 | poetry publish --build 42 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: test 2 | 3 | on: 4 | push: 5 | paths-ignore: 6 | - '**.md' 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | python: ['3.9', '3.10', '3.11'] 15 | experimental: [false] 16 | include: 17 | - python: '3.12.0-alpha - 3.12.0' 18 | experimental: true 19 | continue-on-error: ${{ matrix.experimental }} 20 | steps: 21 | - name: Check out repository 22 | uses: actions/checkout@v2 23 | - name: Setup python 24 | id: setup-python 25 | uses: actions/setup-python@v2 26 | with: 27 | python-version: ${{ matrix.python }} 28 | - name: Install Poetry 29 | uses: snok/install-poetry@v1 30 | with: 31 | virtualenvs-create: true 32 | virtualenvs-in-project: true 33 | installer-parallel: true 34 | - name: Load cached venv 35 | id: cached-poetry-dependencies 36 | uses: actions/cache@v2 37 | with: 38 | path: .venv 39 | key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} 40 | - name: Install dependencies 41 | if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' 42 | run: poetry install --no-interaction --no-root 43 | - name: Install library 44 | run: poetry install --no-interaction 45 | - name: Run tests 46 | run: poetry run pytest --cov-report=xml --cov=airflint tests/ 47 | - name: Upload coverage to Codecov 48 | uses: codecov/codecov-action@v2 49 | with: 50 | verbose: true 51 | -------------------------------------------------------------------------------- /airflint/actions/new_statements.py: -------------------------------------------------------------------------------- 1 | # Credits go to GitHub user @isidentical who provided most of the solution. 2 | import ast 3 | from dataclasses import dataclass 4 | 5 | from refactor import Context, common 6 | from refactor.actions import LazyInsertAfter 7 | from refactor.ast import split_lines 8 | 9 | 10 | @dataclass 11 | class NewStatementsAction(LazyInsertAfter): 12 | """Add new statements after the node's line.""" 13 | 14 | statements: list[ast.stmt] 15 | 16 | def apply(self, context: Context, source: str) -> str: 17 | # We need a custom action that defines a custom apply() 18 | # method since this use case is not natively supported. 19 | 20 | # What apply() does is very basic, it splits the whole 21 | # source into smaller pieces (lines) and inserts the 22 | # newly built source code into the appropriate place. 23 | lines = split_lines(source) 24 | 25 | # First we need to find where the initial anchor starts, 26 | # and we'll extract the indentation from there. So all 27 | # the new nodes will align with this indentation level. 28 | start_line = lines[self.node.lineno - 1] 29 | indentation, start_prefix = common.find_indent( 30 | start_line[: self.node.col_offset], 31 | ) 32 | 33 | # Then we'll build the actual source code from scratch. 34 | new_source = "" 35 | for statement in self.statements: 36 | new_source += context.unparse(statement) + "\n" 37 | 38 | # And split it into lines as well. 39 | replacement = split_lines(new_source) 40 | 41 | # For each line in this code, we'll apply *THE SAME* indentation level 42 | # as the anchor node. 43 | replacement.apply_indentation(indentation, start_prefix=start_prefix) 44 | 45 | # And insert these lines just on top of the actual anchor node. 46 | anchor = self.node.lineno - 1 47 | for line in reversed(replacement): 48 | lines.insert(anchor, line) 49 | 50 | # Finally we'll merge everything together! 51 | return lines.join() 52 | -------------------------------------------------------------------------------- /airflint/rules/use_function_level_imports.py: -------------------------------------------------------------------------------- 1 | # Credits go to GitHub user @isidentical who provided most of the solution. 2 | import ast 3 | 4 | from refactor import Rule, context 5 | from refactor.actions import BaseAction 6 | from refactor.context import ScopeType 7 | 8 | from airflint.actions.new_statements import NewStatementsAction 9 | 10 | 11 | class UseFunctionLevelImports(Rule): 12 | """Replace top-level import by function-level import.""" 13 | 14 | context_providers = (context.Scope,) 15 | 16 | def match(self, node: ast.AST) -> BaseAction: 17 | # Instead of going import -> function, we are going 18 | # function -> import since this way it is easier to 19 | # do the refactorings one by one and finally remove 20 | # all unused imports at the end. 21 | assert isinstance(node, ast.FunctionDef) 22 | # Skip functions using dag decorator 23 | # as they are being called by the DAG Parser as well. 24 | assert not any( 25 | isinstance(identifier, ast.Name) 26 | and isinstance(identifier.ctx, ast.Load) 27 | and identifier.id == "dag" 28 | for expr in node.decorator_list 29 | for identifier in ast.walk(expr) 30 | ) 31 | 32 | inlined_imports = [] 33 | # We only walk through function body - not decorators 34 | # as they are not within the function scope. 35 | for stmt in node.body: 36 | for identifier in ast.walk(stmt): 37 | # Find all used identifiers inside this function. 38 | if not ( 39 | isinstance(identifier, ast.Name) 40 | and isinstance(identifier.ctx, ast.Load) 41 | ): 42 | continue 43 | 44 | # And try to resolve their scope. Each scope has its own parent 45 | # (unless it is a module), so we are simply going back until we 46 | # find the definition for the selected identifier. 47 | scope = self.context["scope"].resolve(identifier) 48 | while not (definitions := scope.definitions.get(identifier.id, [])): 49 | scope = scope.parent 50 | if scope is None: 51 | # There might be some magic, so let's not 52 | # forget the chance of running out of scopes. 53 | break 54 | 55 | # If any of the definitions (there might be multiple of them) 56 | # we found matches an import, we'll filter them out. 57 | imports = [ 58 | definition 59 | for definition in definitions 60 | if isinstance(definition, (ast.Import, ast.ImportFrom)) 61 | ] 62 | # And we'll ensure this import is originating from the global scope. 63 | if imports and scope.scope_type is ScopeType.GLOBAL: 64 | inlined_imports.append(imports[0]) 65 | 66 | # We only want unique imports i.e. no duplicates 67 | unique_inlined_imports = list(dict.fromkeys(inlined_imports)) 68 | 69 | # We want this rule to only run if there is at least 1 inlined import. 70 | assert len(unique_inlined_imports) >= 1 71 | 72 | # We'll select the first statement, which will act like an anchor to us 73 | # when we are inserting. 74 | first_stmt = node.body[0] 75 | return NewStatementsAction(first_stmt, unique_inlined_imports) 76 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 105 | __pypackages__/ 106 | 107 | # Celery stuff 108 | celerybeat-schedule 109 | celerybeat.pid 110 | 111 | # SageMath parsed files 112 | *.sage.py 113 | 114 | # Environments 115 | .env 116 | .venv 117 | env/ 118 | venv/ 119 | ENV/ 120 | env.bak/ 121 | venv.bak/ 122 | 123 | # Spyder project settings 124 | .spyderproject 125 | .spyproject 126 | 127 | # Rope project settings 128 | .ropeproject 129 | 130 | # mkdocs documentation 131 | /site 132 | 133 | # mypy 134 | .mypy_cache/ 135 | .dmypy.json 136 | dmypy.json 137 | 138 | # Pyre type checker 139 | .pyre/ 140 | 141 | # pytype static type analyzer 142 | .pytype/ 143 | 144 | # Cython debug symbols 145 | cython_debug/ 146 | 147 | # PyCharm 148 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 149 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 150 | # and can be added to the global gitignore or merged into this file. For a more nuclear 151 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 152 | #.idea/ 153 | 154 | # Example dags for testing 155 | example_dags/* 156 | !.gitkeep 157 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # airflint 2 | 3 | [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/feluelle/airflint/main.svg)](https://results.pre-commit.ci/latest/github/feluelle/airflint/main) 4 | ![test workflow](https://github.com/feluelle/airflint/actions/workflows/test.yml/badge.svg) 5 | ![codeql-analysis workflow](https://github.com/feluelle/airflint/actions/workflows/codeql-analysis.yml/badge.svg) 6 | [![codecov](https://codecov.io/gh/feluelle/airflint/branch/main/graph/badge.svg?token=J8UEP8IVY4)](https://codecov.io/gh/feluelle/airflint) 7 | [![PyPI version](https://img.shields.io/pypi/v/airflint)](https://pypi.org/project/airflint/) 8 | [![License](https://img.shields.io/pypi/l/airflint)](https://github.com/feluelle/airflint/blob/main/LICENSE) 9 | [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/airflint)](https://pypi.org/project/airflint/) 10 | [![PyPI version](https://img.shields.io/pypi/dm/airflint)](https://pypi.org/project/airflint/) 11 | 12 | > Enforce Best Practices for all your Airflow DAGs. ⭐ 13 | 14 | ⚠️ **airflint is still in alpha stage and has not been tested with real world Airflow DAGs. Please report any issues you face via [GitHub Issues](https://github.com/feluelle/airflint/issues), thank you. 🙏** 15 | 16 | ## 🧑‍🏫 Rules 17 | 18 | - [x] Use function-level imports instead of top-level imports[^1][^2] (see [Top level Python Code](https://airflow.apache.org/docs/apache-airflow/stable/best-practices.html#top-level-python-code)) 19 | - [x] Use jinja template syntax instead of `Variable.get` (see [Airflow Variables](https://airflow.apache.org/docs/apache-airflow/stable/best-practices.html#airflow-variables)) 20 | 21 | [^1]: There is a PEP for [Lazy Imports](https://peps.python.org/pep-0690/) targeted to arrive in Python 3.12 which would supersede this rule. 22 | 23 | [^2]: To remove top-level imports after running `UseFunctionLevelImports` rule, use a tool such as [autoflake](https://github.com/PyCQA/autoflake). 24 | 25 | _based on official [Best Practices](https://airflow.apache.org/docs/apache-airflow/stable/best-practices.html)_ 26 | 27 | ## Requirements 28 | 29 | airflint is tested with: 30 | 31 | | | Main version (dev) | Released version (0.3.2-alpha) | 32 | |----------------|----------------------------------------|--------------------------------| 33 | | Python | 3.9, 3.10, 3.11, 3.12.0-alpha - 3.12.0 | 3.9, 3.10 | 34 | | Apache Airflow | >= 2.0.0 | >= 2.0.0 | 35 | 36 | ## 🚀 Get started 37 | 38 | To install it from [PyPI](https://pypi.org/) run: 39 | 40 | ```console 41 | pip install airflint 42 | ``` 43 | 44 | > **_NOTE:_** It is recommended to install airflint into your existing airflow environment with all your providers included. This way `UseJinjaVariableGet` rule can detect all `template_fields` and airflint works as expected. 45 | 46 | Then just call it like this: 47 | 48 | ![usage](assets/images/usage.png) 49 | 50 | ### pre-commit 51 | 52 | Alternatively you can add the following repo to your `pre-commit-config.yaml`: 53 | 54 | ```yaml 55 | - repo: https://github.com/feluelle/airflint 56 | rev: v0.3.2-alpha 57 | hooks: 58 | - id: airflint 59 | args: ["-a"] # Use -a to apply the suggestions 60 | additional_dependencies: # Add all package dependencies you have in your dags, preferable with version spec 61 | - apache-airflow 62 | - apache-airflow-providers-cncf-kubernetes 63 | ``` 64 | 65 | To complete the `UseFunctionlevelImports` rule, please add the `autoflake` hook after the `airflint` hook, as below: 66 | 67 | ```yaml 68 | - repo: https://github.com/pycqa/autoflake 69 | rev: v1.4 70 | hooks: 71 | - id: autoflake 72 | args: ["--remove-all-unused-imports", "--in-place"] 73 | ``` 74 | 75 | This will remove unused imports. 76 | 77 | ## ❤️ Contributing 78 | 79 | I am looking for contributors who are interested in.. 80 | 81 | - testing airflint with real world Airflow DAGs and reporting issues as soon as they face them 82 | - optimizing the ast traversing for existing rules 83 | - adding new rules based on best practices or bottlenecks you have experienced during Airflow DAGs authoring 84 | - documenting about what is being supported in particular by each rule 85 | - defining supported airflow versions i.e. some rules are bound to specific Airflow features and version 86 | 87 | For questions, please don't hesitate to open a GitHub issue. 88 | -------------------------------------------------------------------------------- /airflint/rules/use_jinja_variable_get.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from importlib import import_module 3 | from typing import Any 4 | 5 | from refactor import Rule 6 | from refactor.actions import Replace 7 | from refactor.context import Ancestry, Scope 8 | 9 | 10 | class UseJinjaVariableGet(Rule): 11 | """Replace `Variable.get("foo")` Calls through the jinja equivalent `{{ var.value.foo }}` if the variable is listed in `template_fields`.""" 12 | 13 | context_providers = (Scope, Ancestry) 14 | 15 | def _get_operator_keywords(self, reference: ast.Call) -> list[ast.keyword]: 16 | parent = self.context["ancestry"].get_parent(reference) 17 | 18 | if isinstance(parent, ast.Assign): 19 | # Get all operator keywords referencing the variable, Variable.get call was assigned to. 20 | scope = self.context["scope"] 21 | operator_keywords = [ 22 | node 23 | for node in ast.walk(self.context.tree) 24 | if isinstance(node, ast.keyword) 25 | and isinstance(node.value, ast.Name) 26 | and any( 27 | node.value.id == target.id 28 | and scope.resolve(node.value).can_reach(scope.resolve(target)) 29 | for target in parent.targets 30 | if isinstance(target, ast.Name) 31 | ) 32 | ] 33 | if operator_keywords: 34 | return operator_keywords 35 | raise AssertionError("No operator keywords found. Skipping..") 36 | 37 | if isinstance(parent, ast.keyword): 38 | # Direct reference without variable assignment. 39 | return [parent] 40 | 41 | raise AssertionError(f"Unsupported parent type {type(parent)}. Skipping..") 42 | 43 | def _lookup_template_fields(self, keyword: ast.keyword) -> None: 44 | parent = self.context["ancestry"].get_parent(keyword) 45 | 46 | # Find the import node module matching the operator calls name. 47 | assert isinstance(operator_call := parent, ast.Call) 48 | assert isinstance(operator_call.func, ast.Name) 49 | scope = self.context["scope"].resolve(operator_call.func) 50 | try: 51 | import_node = next( 52 | node 53 | for node in ast.walk(self.context.tree) 54 | if isinstance(node, ast.ImportFrom) 55 | and any(alias.name == operator_call.func.id for alias in node.names) 56 | and scope.can_reach(self.context["scope"].resolve(node)) 57 | ) 58 | except StopIteration: 59 | raise AssertionError( 60 | "Could not find import definition. Skipping.." 61 | ) from None 62 | assert (module_name := import_node.module) 63 | 64 | # Try to import the module into python. 65 | try: 66 | _module = import_module(module_name) 67 | except ImportError as e: 68 | raise AssertionError("Could not import module. Skipping..") from e 69 | assert (file_path := _module.__file__) 70 | 71 | # Parse the ast to check if the keyword is in template_fields. 72 | with open(file_path) as file: 73 | module = ast.parse(file.read()) 74 | assert any( 75 | ( 76 | # E.g. : Any = (..., ) 77 | isinstance(stmt, ast.AnnAssign) 78 | and isinstance(target := stmt.target, ast.Name) 79 | # E.g. = (..., ) 80 | or isinstance(stmt, ast.Assign) 81 | and isinstance(target := stmt.targets[0], ast.Name) # type: ignore[assignment] 82 | ) 83 | and target.id == "template_fields" 84 | and isinstance(stmt.value, ast.Tuple) 85 | and any( 86 | isinstance(elt, ast.Constant) and elt.value == keyword.arg 87 | for elt in stmt.value.elts 88 | ) 89 | for module_stmt in module.body 90 | if isinstance(module_stmt, ast.ClassDef) 91 | for stmt in module_stmt.body 92 | ) 93 | 94 | def _get_parameter( 95 | self, 96 | node: ast.Call, 97 | position: int, 98 | name: str, 99 | ) -> Any: 100 | if position < len(node.args) and isinstance( 101 | arg := node.args[position], 102 | ast.Constant, 103 | ): 104 | return arg.value 105 | return next( 106 | keyword.value.value 107 | for keyword in node.keywords 108 | if keyword.arg == name and isinstance(keyword.value, ast.Constant) 109 | ) 110 | 111 | def _construct_value(self, node: ast.Call) -> str: 112 | # Read key from Variable.get node. 113 | key = self._get_parameter(node, position=0, name="key") 114 | 115 | # Read optional deserialize_json from Variable.get node. 116 | try: 117 | deserialize_json = self._get_parameter( 118 | node, 119 | position=2, 120 | name="deserialize_json", 121 | ) 122 | var_type = "json" if deserialize_json else "value" 123 | except StopIteration: 124 | var_type = "value" 125 | 126 | # Read optional default_var from Variable.get node and construct the final value. 127 | try: 128 | default_var = self._get_parameter(node, position=1, name="default_var") 129 | if isinstance(default_var, str): 130 | value = f"{{{{ var.{var_type}.get('{key}', '{default_var}') }}}}" 131 | else: 132 | value = f"{{{{ var.{var_type}.get('{key}', {default_var}) }}}}" 133 | except StopIteration: 134 | value = f"{{{{ var.{var_type}.{key} }}}}" 135 | 136 | return value 137 | 138 | def match(self, node): 139 | assert ( 140 | isinstance(node, ast.Call) 141 | and isinstance(node.func, ast.Attribute) 142 | and isinstance(node.func.value, ast.Name) 143 | and node.func.value.id == "Variable" 144 | and node.func.attr == "get" 145 | and isinstance(node.func.ctx, ast.Load) 146 | ) 147 | 148 | for operator_keyword in self._get_operator_keywords(reference=node): 149 | self._lookup_template_fields(keyword=operator_keyword) 150 | 151 | return Replace( 152 | node, 153 | target=ast.Constant(value=self._construct_value(node)), 154 | ) 155 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "atomicwrites" 3 | version = "1.4.0" 4 | description = "Atomic file writes." 5 | category = "dev" 6 | optional = false 7 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 8 | 9 | [[package]] 10 | name = "attrs" 11 | version = "21.4.0" 12 | description = "Classes Without Boilerplate" 13 | category = "dev" 14 | optional = false 15 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 16 | 17 | [package.extras] 18 | dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] 19 | docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] 20 | tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] 21 | tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] 22 | 23 | [[package]] 24 | name = "colorama" 25 | version = "0.4.5" 26 | description = "Cross-platform colored terminal text." 27 | category = "dev" 28 | optional = false 29 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 30 | 31 | [[package]] 32 | name = "coverage" 33 | version = "6.4.3" 34 | description = "Code coverage measurement for Python" 35 | category = "dev" 36 | optional = false 37 | python-versions = ">=3.7" 38 | 39 | [package.dependencies] 40 | tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} 41 | 42 | [package.extras] 43 | toml = ["tomli"] 44 | 45 | [[package]] 46 | name = "iniconfig" 47 | version = "1.1.1" 48 | description = "iniconfig: brain-dead simple config-ini parsing" 49 | category = "dev" 50 | optional = false 51 | python-versions = "*" 52 | 53 | [[package]] 54 | name = "packaging" 55 | version = "21.3" 56 | description = "Core utilities for Python packages" 57 | category = "dev" 58 | optional = false 59 | python-versions = ">=3.6" 60 | 61 | [package.dependencies] 62 | pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" 63 | 64 | [[package]] 65 | name = "pluggy" 66 | version = "1.0.0" 67 | description = "plugin and hook calling mechanisms for python" 68 | category = "dev" 69 | optional = false 70 | python-versions = ">=3.6" 71 | 72 | [package.extras] 73 | testing = ["pytest-benchmark", "pytest"] 74 | dev = ["tox", "pre-commit"] 75 | 76 | [[package]] 77 | name = "py" 78 | version = "1.11.0" 79 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 80 | category = "dev" 81 | optional = false 82 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 83 | 84 | [[package]] 85 | name = "pyparsing" 86 | version = "3.0.9" 87 | description = "pyparsing module - Classes and methods to define and execute parsing grammars" 88 | category = "dev" 89 | optional = false 90 | python-versions = ">=3.6.8" 91 | 92 | [package.extras] 93 | diagrams = ["railroad-diagrams", "jinja2"] 94 | 95 | [[package]] 96 | name = "pytest" 97 | version = "7.1.2" 98 | description = "pytest: simple powerful testing with Python" 99 | category = "dev" 100 | optional = false 101 | python-versions = ">=3.7" 102 | 103 | [package.dependencies] 104 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} 105 | attrs = ">=19.2.0" 106 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 107 | iniconfig = "*" 108 | packaging = "*" 109 | pluggy = ">=0.12,<2.0" 110 | py = ">=1.8.2" 111 | tomli = ">=1.0.0" 112 | 113 | [package.extras] 114 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] 115 | 116 | [[package]] 117 | name = "pytest-cov" 118 | version = "3.0.0" 119 | description = "Pytest plugin for measuring coverage." 120 | category = "dev" 121 | optional = false 122 | python-versions = ">=3.6" 123 | 124 | [package.dependencies] 125 | coverage = {version = ">=5.2.1", extras = ["toml"]} 126 | pytest = ">=4.6" 127 | 128 | [package.extras] 129 | testing = ["virtualenv", "pytest-xdist", "six", "process-tests", "hunter", "fields"] 130 | 131 | [[package]] 132 | name = "refactor" 133 | version = "0.5.0" 134 | description = "AST-based fragmental source code refactoring toolkit" 135 | category = "main" 136 | optional = false 137 | python-versions = ">=3.9" 138 | 139 | [[package]] 140 | name = "tomli" 141 | version = "2.0.1" 142 | description = "A lil' TOML parser" 143 | category = "dev" 144 | optional = false 145 | python-versions = ">=3.7" 146 | 147 | [metadata] 148 | lock-version = "1.1" 149 | python-versions = "^3.9" 150 | content-hash = "ad564b56f2b1b54d3a443a75ce95b2cf955ff728ebd3e43ca5451f07f6c3c990" 151 | 152 | [metadata.files] 153 | atomicwrites = [ 154 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, 155 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, 156 | ] 157 | attrs = [ 158 | {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, 159 | {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, 160 | ] 161 | colorama = [ 162 | {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, 163 | {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, 164 | ] 165 | coverage = [ 166 | {file = "coverage-6.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f50d3a822947572496ea922ee7825becd8e3ae6fbd2400cd8236b7d64b17f285"}, 167 | {file = "coverage-6.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d5191d53afbe5b6059895fa7f58223d3751c42b8101fb3ce767e1a0b1a1d8f87"}, 168 | {file = "coverage-6.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04010af3c06ce2bfeb3b1e4e05d136f88d88c25f76cd4faff5d1fd84d11581ea"}, 169 | {file = "coverage-6.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6630d8d943644ea62132789940ca97d05fac83f73186eaf0930ffa715fbdab6b"}, 170 | {file = "coverage-6.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05de0762c1caed4a162b3e305f36cf20a548ff4da0be6766ad5c870704be3660"}, 171 | {file = "coverage-6.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e3a41aad5919613483aad9ebd53336905cab1bd6788afd3995c2a972d89d795"}, 172 | {file = "coverage-6.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a2738ba1ee544d6f294278cfb6de2dc1f9a737a780469b5366e662a218f806c3"}, 173 | {file = "coverage-6.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a0d2df4227f645a879010461df2cea6b7e3fb5a97d7eafa210f7fb60345af9e8"}, 174 | {file = "coverage-6.4.3-cp310-cp310-win32.whl", hash = "sha256:73a10939dc345460ca0655356a470dd3de9759919186a82383c87b6eb315faf2"}, 175 | {file = "coverage-6.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:53c8edd3b83a4ddba3d8c506f1359401e7770b30f2188f15c17a338adf5a14db"}, 176 | {file = "coverage-6.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f1eda5cae434282712e40b42aaf590b773382afc3642786ac3ed39053973f61f"}, 177 | {file = "coverage-6.4.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59fc88bc13e30f25167e807b8cad3c41b7218ef4473a20c86fd98a7968733083"}, 178 | {file = "coverage-6.4.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75314b00825d70e1e34b07396e23f47ed1d4feedc0122748f9f6bd31a544840"}, 179 | {file = "coverage-6.4.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52f8b9fcf3c5e427d51bbab1fb92b575a9a9235d516f175b24712bcd4b5be917"}, 180 | {file = "coverage-6.4.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5a559aab40c716de80c7212295d0dc96bc1b6c719371c20dd18c5187c3155518"}, 181 | {file = "coverage-6.4.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:306788fd019bb90e9cbb83d3f3c6becad1c048dd432af24f8320cf38ac085684"}, 182 | {file = "coverage-6.4.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:920a734fe3d311ca01883b4a19aa386c97b82b69fbc023458899cff0a0d621b9"}, 183 | {file = "coverage-6.4.3-cp37-cp37m-win32.whl", hash = "sha256:ab9ef0187d6c62b09dec83a84a3b94f71f9690784c84fd762fb3cf2d2b44c914"}, 184 | {file = "coverage-6.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:39ebd8e120cb77a06ee3d5fc26f9732670d1c397d7cd3acf02f6f62693b89b80"}, 185 | {file = "coverage-6.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc698580216050b5f4a34d2cdd2838b429c53314f1c4835fab7338200a8396f2"}, 186 | {file = "coverage-6.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:877ee5478fd78e100362aed56db47ccc5f23f6e7bb035a8896855f4c3e49bc9b"}, 187 | {file = "coverage-6.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:555a498999c44f5287cc95500486cd0d4f021af9162982cbe504d4cb388f73b5"}, 188 | {file = "coverage-6.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eff095a5aac7011fdb51a2c82a8fae9ec5211577f4b764e1e59cfa27ceeb1b59"}, 189 | {file = "coverage-6.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5de1e9335e2569974e20df0ce31493d315a830d7987e71a24a2a335a8d8459d3"}, 190 | {file = "coverage-6.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7856ea39059d75f822ff0df3a51ea6d76307c897048bdec3aad1377e4e9dca20"}, 191 | {file = "coverage-6.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:411fdd9f4203afd93b056c0868c8f9e5e16813e765de962f27e4e5798356a052"}, 192 | {file = "coverage-6.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cdf7b83f04a313a21afb1f8730fe4dd09577fefc53bbdfececf78b2006f4268e"}, 193 | {file = "coverage-6.4.3-cp38-cp38-win32.whl", hash = "sha256:ab2b1a89d2bc7647622e9eaf06128a5b5451dccf7c242deaa31420b055716481"}, 194 | {file = "coverage-6.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:0e34247274bde982bbc613894d33f9e36358179db2ed231dd101c48dd298e7b0"}, 195 | {file = "coverage-6.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b104b6b1827d6a22483c469e3983a204bcf9c6bf7544bf90362c4654ebc2edf3"}, 196 | {file = "coverage-6.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:adf1a0d272633b21d645dd6e02e3293429c1141c7d65a58e4cbcd592d53b8e01"}, 197 | {file = "coverage-6.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff9832434a9193fbd716fbe05f9276484e18d26cc4cf850853594bb322807ac3"}, 198 | {file = "coverage-6.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:923f9084d7e1d31b5f74c92396b05b18921ed01ee5350402b561a79dce3ea48d"}, 199 | {file = "coverage-6.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d64304acf79766e650f7acb81d263a3ea6e2d0d04c5172b7189180ff2c023c"}, 200 | {file = "coverage-6.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fc294de50941d3da66a09dca06e206297709332050973eca17040278cb0918ff"}, 201 | {file = "coverage-6.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a42eaaae772f14a5194f181740a67bfd48e8806394b8c67aa4399e09d0d6b5db"}, 202 | {file = "coverage-6.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4822327b35cb032ff16af3bec27f73985448f08e874146b5b101e0e558b613dd"}, 203 | {file = "coverage-6.4.3-cp39-cp39-win32.whl", hash = "sha256:f217850ac0e046ede611312703423767ca032a7b952b5257efac963942c055de"}, 204 | {file = "coverage-6.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0a84376e4fd13cebce2c0ef8c2f037929c8307fb94af1e5dbe50272a1c651b5d"}, 205 | {file = "coverage-6.4.3-pp36.pp37.pp38-none-any.whl", hash = "sha256:068d6f2a893af838291b8809c876973d885543411ea460f3e6886ac0ee941732"}, 206 | {file = "coverage-6.4.3.tar.gz", hash = "sha256:ec2ae1f398e5aca655b7084392d23e80efb31f7a660d2eecf569fb9f79b3fb94"}, 207 | ] 208 | iniconfig = [ 209 | {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, 210 | {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, 211 | ] 212 | packaging = [ 213 | {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, 214 | {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, 215 | ] 216 | pluggy = [ 217 | {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, 218 | {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, 219 | ] 220 | py = [ 221 | {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, 222 | {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, 223 | ] 224 | pyparsing = [ 225 | {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, 226 | {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, 227 | ] 228 | pytest = [ 229 | {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, 230 | {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, 231 | ] 232 | pytest-cov = [ 233 | {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, 234 | {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, 235 | ] 236 | refactor = [ 237 | {file = "refactor-0.5.0-py3-none-any.whl", hash = "sha256:3dd9882aa9e8a4eb982ba6d2d23f7551685bb9e451783394fba5566f3db8c37f"}, 238 | {file = "refactor-0.5.0.tar.gz", hash = "sha256:b82945d3a313cd4984a5bc2c16e0ccc1c585aeea38ab80890b56c4a6a3d98d33"}, 239 | ] 240 | tomli = [ 241 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 242 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 243 | ] 244 | -------------------------------------------------------------------------------- /tests/test_rules.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | 3 | import pytest 4 | from refactor import Session 5 | 6 | from airflint.rules.use_function_level_imports import UseFunctionLevelImports 7 | from airflint.rules.use_jinja_variable_get import UseJinjaVariableGet 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "rule, source, expected", 12 | [ 13 | pytest.param( 14 | UseFunctionLevelImports, 15 | """ 16 | from functools import reduce 17 | from operator import add 18 | import dataclass 19 | 20 | def something(): 21 | a = reduce(x, y) 22 | b = add(a, a) 23 | return b 24 | 25 | def other_thing(): 26 | return dataclass(something(1, 2)) 27 | """, 28 | """ 29 | from functools import reduce 30 | from operator import add 31 | import dataclass 32 | 33 | def something(): 34 | from functools import reduce 35 | from operator import add 36 | a = reduce(x, y) 37 | b = add(a, a) 38 | return b 39 | 40 | def other_thing(): 41 | import dataclass 42 | return dataclass(something(1, 2)) 43 | """, 44 | id="UseFunctionLevelImports | SUCCESS | general", 45 | ), 46 | pytest.param( 47 | UseFunctionLevelImports, 48 | """ 49 | from airflow.decorators import dag, task 50 | from operator import add 51 | 52 | @dag() 53 | def my_dag(): 54 | @task() 55 | def my_task(): 56 | add(1, 2) 57 | """, 58 | """ 59 | from airflow.decorators import dag, task 60 | from operator import add 61 | 62 | @dag() 63 | def my_dag(): 64 | @task() 65 | def my_task(): 66 | from operator import add 67 | add(1, 2) 68 | """, 69 | id="UseFunctionLevelImports | SKIP | dag decorator", 70 | ), 71 | pytest.param( 72 | UseFunctionLevelImports, 73 | """ 74 | from airflow.decorators import dag, task 75 | from operator import add 76 | 77 | @dag() 78 | def my_dag(): 79 | @task() 80 | def my_task(): 81 | add(1, 2) 82 | add(1, 2) 83 | """, 84 | """ 85 | from airflow.decorators import dag, task 86 | from operator import add 87 | 88 | @dag() 89 | def my_dag(): 90 | @task() 91 | def my_task(): 92 | from operator import add 93 | add(1, 2) 94 | add(1, 2) 95 | """, 96 | id="UseFunctionLevelImports | SUCCESS | unique", 97 | ), 98 | pytest.param( 99 | UseFunctionLevelImports, 100 | """ 101 | import random 102 | from airflow import DAG 103 | from airflow.decorators import task 104 | 105 | with DAG() as dag: 106 | @task.branch() 107 | def random_choice(): 108 | return random.choice(['task_1', 'task_2']) 109 | """, 110 | """ 111 | import random 112 | from airflow import DAG 113 | from airflow.decorators import task 114 | 115 | with DAG() as dag: 116 | @task.branch() 117 | def random_choice(): 118 | import random 119 | return random.choice(['task_1', 'task_2']) 120 | """, 121 | id="UseFunctionLevelImports | SKIP | function decorators", 122 | ), 123 | pytest.param( 124 | UseJinjaVariableGet, 125 | """ 126 | from airflow.models import Variable 127 | from operators.fake import FakeOperator 128 | 129 | FakeOperator(task_id="fake", foo=Variable.get("FOO")) 130 | """, 131 | """ 132 | from airflow.models import Variable 133 | from operators.fake import FakeOperator 134 | 135 | FakeOperator(task_id="fake", foo='{{ var.value.FOO }}') 136 | """, 137 | id="UseJinjaVariableGet | SUCCESS | direct assignment with key", 138 | ), 139 | pytest.param( 140 | UseJinjaVariableGet, 141 | """ 142 | from airflow.models import Variable 143 | from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import KubernetesPodOperator 144 | 145 | KubernetesPodOperator(task_id="fake", image=Variable.get("FOO")) 146 | """, 147 | """ 148 | from airflow.models import Variable 149 | from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import KubernetesPodOperator 150 | 151 | KubernetesPodOperator(task_id="fake", image=Variable.get("FOO")) 152 | """, 153 | id="UseJinjaVariableGet | SKIP | cannot import module", 154 | ), 155 | pytest.param( 156 | UseJinjaVariableGet, 157 | """ 158 | from airflow.models import Variable 159 | 160 | def foo(): 161 | from operators.fake import FakeOperator 162 | 163 | FakeOperator(task_id="fake", foo=Variable.get("FOO")) 164 | """, 165 | """ 166 | from airflow.models import Variable 167 | 168 | def foo(): 169 | from operators.fake import FakeOperator 170 | 171 | FakeOperator(task_id="fake", foo=Variable.get("FOO")) 172 | """, 173 | id="UseJinjaVariableGet | SKIP | cannot reach import", 174 | ), 175 | pytest.param( 176 | UseJinjaVariableGet, 177 | """ 178 | from airflow.models import Variable 179 | from operators.fake import FakeOperator 180 | 181 | FakeOperator(task_id="fake", fizz=Variable.get("FOO")) 182 | """, 183 | """ 184 | from airflow.models import Variable 185 | from operators.fake import FakeOperator 186 | 187 | FakeOperator(task_id="fake", fizz=Variable.get("FOO")) 188 | """, 189 | id="UseJinjaVariableGet | SKIP | keyword not in template_fields", 190 | ), 191 | pytest.param( 192 | UseJinjaVariableGet, 193 | """ 194 | from airflow.models import Variable 195 | from operators.fake import FakeOperator 196 | 197 | var = Variable.get("FOO") 198 | 199 | FakeOperator(task_id="fake", foo=var) 200 | """, 201 | """ 202 | from airflow.models import Variable 203 | from operators.fake import FakeOperator 204 | 205 | var = '{{ var.value.FOO }}' 206 | 207 | FakeOperator(task_id="fake", foo=var) 208 | """, 209 | id="UseJinjaVariableGet | SUCCESS | variable assignment", 210 | ), 211 | pytest.param( 212 | UseJinjaVariableGet, 213 | """ 214 | from airflow.models import Variable 215 | from operators.fake import FakeOperator 216 | 217 | def foo(): 218 | var = Variable.get("FOO") 219 | 220 | FakeOperator(task_id="fake", foo=var) 221 | """, 222 | """ 223 | from airflow.models import Variable 224 | from operators.fake import FakeOperator 225 | 226 | def foo(): 227 | var = Variable.get("FOO") 228 | 229 | FakeOperator(task_id="fake", foo=var) 230 | """, 231 | id="UseJinjaVariableGet | SKIP | cannot reach variable", 232 | ), 233 | pytest.param( 234 | UseJinjaVariableGet, 235 | """ 236 | from airflow.models import Variable 237 | from operators.fake import FakeOperator 238 | 239 | var = Variable.get("FOO") 240 | 241 | FakeOperator(task_id="fake", foo=var, bar=var) 242 | """, 243 | """ 244 | from airflow.models import Variable 245 | from operators.fake import FakeOperator 246 | 247 | var = '{{ var.value.FOO }}' 248 | 249 | FakeOperator(task_id="fake", foo=var, bar=var) 250 | """, 251 | id="UseJinjaVariableGet | SUCCESS | variable assignment with multiple keywords", 252 | ), 253 | pytest.param( 254 | UseJinjaVariableGet, 255 | """ 256 | from airflow.models import Variable 257 | from operators.fake import FakeOperator 258 | 259 | var = Variable.get("FOO") 260 | 261 | FakeOperator(task_id="fake", foo=var, fizz=var) 262 | """, 263 | """ 264 | from airflow.models import Variable 265 | from operators.fake import FakeOperator 266 | 267 | var = Variable.get("FOO") 268 | 269 | FakeOperator(task_id="fake", foo=var, fizz=var) 270 | """, 271 | id="UseJinjaVariableGet | SKIP | variable assignment at least one keyword not in template_fields", 272 | ), 273 | pytest.param( 274 | UseJinjaVariableGet, 275 | """ 276 | from airflow.models import Variable 277 | from operators.fake import FakeOperator 278 | 279 | var = Variable.get("FOO") 280 | 281 | FakeOperator(task_id="fake", foo=var) 282 | FakeOperator(task_id="fake2", fizz=var) 283 | """, 284 | """ 285 | from airflow.models import Variable 286 | from operators.fake import FakeOperator 287 | 288 | var = Variable.get("FOO") 289 | 290 | FakeOperator(task_id="fake", foo=var) 291 | FakeOperator(task_id="fake2", fizz=var) 292 | """, 293 | id="UseJinjaVariableGet | SKIP | variable assignment for multiple calls where at least one keyword is not in template_fields", 294 | ), 295 | pytest.param( 296 | UseJinjaVariableGet, 297 | """ 298 | from airflow.models import Variable 299 | from operators.fake import FakeOperator 300 | 301 | var = Variable.get("FOO") 302 | 303 | FakeOperator(task_id="fake", foo=var) 304 | FakeOperator(task_id="fake2", foo=var) 305 | """, 306 | """ 307 | from airflow.models import Variable 308 | from operators.fake import FakeOperator 309 | 310 | var = '{{ var.value.FOO }}' 311 | 312 | FakeOperator(task_id="fake", foo=var) 313 | FakeOperator(task_id="fake2", foo=var) 314 | """, 315 | id="UseJinjaVariableGet | SUCCESS | variable assignment for multiple calls", 316 | ), 317 | pytest.param( 318 | UseJinjaVariableGet, 319 | """ 320 | from airflow.models import Variable 321 | from operators.fake import FakeOperator 322 | 323 | FakeOperator(task_id="fake", fizz=str(Variable.get("FOO"))) 324 | """, 325 | """ 326 | from airflow.models import Variable 327 | from operators.fake import FakeOperator 328 | 329 | FakeOperator(task_id="fake", fizz=str(Variable.get("FOO"))) 330 | """, 331 | id="UseJinjaVariableGet | SKIP | direct assignment with unimplemented parent type", 332 | ), 333 | pytest.param( 334 | UseJinjaVariableGet, 335 | """ 336 | from airflow.models import Variable 337 | from operators.fake import FakeOperator 338 | 339 | FakeOperator(task_id="fake", foo=Variable.get("FOO", deserialize_json=True)) 340 | """, 341 | """ 342 | from airflow.models import Variable 343 | from operators.fake import FakeOperator 344 | 345 | FakeOperator(task_id="fake", foo='{{ var.json.FOO }}') 346 | """, 347 | id="UseJinjaVariableGet | SUCCESS | direct assignment with key and deserialize_json keyword", 348 | ), 349 | pytest.param( 350 | UseJinjaVariableGet, 351 | """ 352 | from airflow.models import Variable 353 | from operators.fake import FakeOperator 354 | 355 | FakeOperator(task_id="fake", foo=Variable.get("FOO", default_var="BAR")) 356 | """, 357 | """ 358 | from airflow.models import Variable 359 | from operators.fake import FakeOperator 360 | 361 | FakeOperator(task_id="fake", foo="{{ var.value.get('FOO', 'BAR') }}") 362 | """, 363 | id="UseJinjaVariableGet | SUCCESS | direct assignment with key and default_var keyword", 364 | ), 365 | pytest.param( 366 | UseJinjaVariableGet, 367 | """ 368 | from airflow.models import Variable 369 | from operators.fake import FakeOperator 370 | 371 | FakeOperator(task_id="fake", foo=Variable.get("FOO", default_var=None)) 372 | """, 373 | """ 374 | from airflow.models import Variable 375 | from operators.fake import FakeOperator 376 | 377 | FakeOperator(task_id="fake", foo="{{ var.value.get('FOO', None) }}") 378 | """, 379 | id="UseJinjaVariableGet | SUCCESS | direct assignment with key and default_var=None keyword", 380 | ), 381 | pytest.param( 382 | UseJinjaVariableGet, 383 | """ 384 | from airflow.models import Variable 385 | from operators.fake import FakeOperator 386 | 387 | FakeOperator(task_id="fake", foo=Variable.get("FOO", deserialize_json=True, default_var="BAR")) 388 | """, 389 | """ 390 | from airflow.models import Variable 391 | from operators.fake import FakeOperator 392 | 393 | FakeOperator(task_id="fake", foo="{{ var.json.get('FOO', 'BAR') }}") 394 | """, 395 | id="UseJinjaVariableGet | SUCCESS | direct assignment with key, deserialize_json and default_var keywords", 396 | ), 397 | ], 398 | ) 399 | def test_rules(rule, source, expected): 400 | source = textwrap.dedent(source) 401 | expected = textwrap.dedent(expected) 402 | 403 | session = Session(rules=[rule]) 404 | assert session.run(source) == expected 405 | --------------------------------------------------------------------------------