├── tests ├── __init__.py ├── documentation │ ├── __init__.py │ ├── presentation │ │ ├── __init__.py │ │ └── test_formatters.py │ ├── conftest.py │ └── actions │ │ └── test_propagate.py ├── _fixtures │ └── dbt_sample_project │ │ ├── data │ │ └── .gitkeep │ │ ├── analysis │ │ └── .gitkeep │ │ ├── macros │ │ └── .gitkeep │ │ ├── snapshots │ │ └── .gitkeep │ │ ├── tests │ │ └── .gitkeep │ │ ├── models │ │ ├── mart_user.sql │ │ ├── stg_city.sql │ │ ├── stg_user.sql │ │ ├── mart_user_and_city.sql │ │ └── schema.yml │ │ ├── .gitignore │ │ ├── postgres_schema.sql │ │ ├── docker-compose.yaml │ │ ├── README.md │ │ ├── dbt_project.yml │ │ └── target │ │ └── catalog.json ├── dbt_cloud │ ├── conftest.py │ └── actions │ │ └── test_retrieve_most_recent_artifact.py └── README.md ├── src └── dbttoolkit │ ├── __init__.py │ ├── utils │ ├── __init__.py │ ├── logger.py │ └── io.py │ ├── dbt_cloud │ ├── __init__.py │ ├── models │ │ ├── __init__.py │ │ └── dbt_artifact.py │ ├── clients │ │ ├── __init__.py │ │ └── dbt_cloud_client.py │ ├── actions │ │ ├── __main__.py │ │ ├── _docs.py │ │ ├── __init__.py │ │ ├── retrieve_most_recent_artifact.py │ │ └── retrieve_artifacts_time_interval.py │ └── README.md │ ├── documentation │ ├── __init__.py │ ├── models │ │ ├── __init__.py │ │ └── column.py │ ├── presentation │ │ ├── __init__.py │ │ ├── formatters.py │ │ └── stats.py │ ├── actions │ │ ├── __main__.py │ │ ├── __init__.py │ │ └── propagate.py │ └── README.md │ └── cli.py ├── .mypy.ini ├── .flake8 ├── bin └── dbt-toolkit ├── docs └── propagation_simple.png ├── .coveragerc ├── Pipfile ├── .github └── workflows │ ├── publish-package.yaml │ ├── run-code-checks.yaml │ └── run-tests.yaml ├── pyproject.toml ├── .gitignore ├── Makefile ├── setup.py ├── README.md ├── LICENSE └── Pipfile.lock /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/dbttoolkit/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/dbttoolkit/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/documentation/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/dbttoolkit/dbt_cloud/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/dbttoolkit/dbt_cloud/models/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/dbttoolkit/documentation/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/dbttoolkit/dbt_cloud/clients/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/dbttoolkit/documentation/models/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/data/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/documentation/presentation/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | ignore_missing_imports = True -------------------------------------------------------------------------------- /src/dbttoolkit/documentation/presentation/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/analysis/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/macros/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/snapshots/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/tests/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | # The same as used by black 3 | max-line-length = 120 -------------------------------------------------------------------------------- /bin/dbt-toolkit: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from dbttoolkit import cli 4 | 5 | cli.main() 6 | -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/models/mart_user.sql: -------------------------------------------------------------------------------- 1 | select * 2 | from {{ ref('stg_user') }} 3 | -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/models/stg_city.sql: -------------------------------------------------------------------------------- 1 | select * 2 | from {{ source('raw', 'city') }} 3 | -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/models/stg_user.sql: -------------------------------------------------------------------------------- 1 | select * 2 | from {{ source('raw', 'user') }} 3 | -------------------------------------------------------------------------------- /docs/propagation_simple.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/voi-oss/dbt-toolkit/HEAD/docs/propagation_simple.png -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/.gitignore: -------------------------------------------------------------------------------- 1 | target/* 2 | !target/manifest_original.json 3 | !target/catalog.json 4 | 5 | dbt_modules/ 6 | logs/ 7 | -------------------------------------------------------------------------------- /src/dbttoolkit/dbt_cloud/actions/__main__.py: -------------------------------------------------------------------------------- 1 | from dbttoolkit.dbt_cloud.actions import dbt_cloud_typer_app 2 | 3 | if __name__ == "__main__": 4 | dbt_cloud_typer_app() 5 | -------------------------------------------------------------------------------- /src/dbttoolkit/documentation/actions/__main__.py: -------------------------------------------------------------------------------- 1 | from dbttoolkit.documentation.actions import documentation_typer_app 2 | 3 | if __name__ == "__main__": 4 | documentation_typer_app() 5 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | source = dbttoolkit 4 | 5 | [report] 6 | exclude_lines = 7 | if self.debug: 8 | pragma: no cover 9 | raise NotImplementedError 10 | if __name__ == .__main__.: 11 | ignore_errors = True 12 | omit = 13 | tests/* 14 | -------------------------------------------------------------------------------- /src/dbttoolkit/documentation/actions/__init__.py: -------------------------------------------------------------------------------- 1 | import typer 2 | 3 | from dbttoolkit.documentation.actions.propagate import typer_app as propagate_typer_app 4 | 5 | documentation_typer_app = typer.Typer() 6 | documentation_typer_app.registered_commands.append(*propagate_typer_app.registered_commands) 7 | -------------------------------------------------------------------------------- /tests/documentation/conftest.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | 5 | CURRENT_FOLDER = Path(__file__).resolve().parent 6 | 7 | 8 | @pytest.fixture(scope="session") 9 | def dbt_sample_project_path(): 10 | return (CURRENT_FOLDER / "../_fixtures/dbt_sample_project").resolve() 11 | -------------------------------------------------------------------------------- /src/dbttoolkit/dbt_cloud/models/dbt_artifact.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class DbtArtifact(str, Enum): 5 | """ 6 | The built-in dbt artifacts 7 | """ 8 | 9 | catalog = "catalog" 10 | manifest = "manifest" 11 | run_results = "run_results" 12 | sources = "sources" 13 | -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/models/mart_user_and_city.sql: -------------------------------------------------------------------------------- 1 | select 2 | _user.id as user_id, 3 | _user.name as name, -- ambiguous (city also has a name) on purpose 4 | city.id as city_id 5 | 6 | from {{ ref('stg_user') }} as _user 7 | 8 | left join {{ ref('stg_city' )}} as city 9 | on _user.city_id = city.id 10 | -------------------------------------------------------------------------------- /src/dbttoolkit/dbt_cloud/actions/_docs.py: -------------------------------------------------------------------------------- 1 | HELP = dict( 2 | output_folder="the folder where the retrieved artifact will be written", 3 | environment_id="dbt Cloud environment id", 4 | account_id="dbt Cloud account id", 5 | project_id="dbt Cloud project id", 6 | job_id="dbt Cloud job id", 7 | token="dbt Cloud API token", 8 | ) 9 | -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/postgres_schema.sql: -------------------------------------------------------------------------------- 1 | create schema raw; 2 | 3 | create table raw.city ( 4 | id varchar, 5 | name varchar, 6 | population integer 7 | ); 8 | 9 | create table raw.user ( 10 | id varchar, 11 | name varchar, 12 | height_cm integer, 13 | date_of_birth date, 14 | city_id varchar 15 | ); 16 | -------------------------------------------------------------------------------- /src/dbttoolkit/cli.py: -------------------------------------------------------------------------------- 1 | import typer 2 | 3 | from dbttoolkit.dbt_cloud.actions import dbt_cloud_typer_app 4 | from dbttoolkit.documentation.actions import documentation_typer_app 5 | 6 | app = typer.Typer() 7 | 8 | app.add_typer(documentation_typer_app, name="docs") 9 | app.add_typer(dbt_cloud_typer_app, name="dbt-cloud") 10 | 11 | 12 | def main(): 13 | app() 14 | -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | services: 3 | postgres: 4 | image: postgres:12 5 | restart: always 6 | environment: 7 | - POSTGRES_USER=postgres 8 | - POSTGRES_PASSWORD=postgres 9 | - POSTGRES_DB=dbt_sample_project 10 | ports: 11 | - '5433:5432' 12 | volumes: 13 | - ./postgres_schema.sql:/docker-entrypoint-initdb.d/1-schema.sql 14 | -------------------------------------------------------------------------------- /src/dbttoolkit/dbt_cloud/actions/__init__.py: -------------------------------------------------------------------------------- 1 | import typer 2 | 3 | from dbttoolkit.dbt_cloud.actions.retrieve_artifacts_time_interval import typer_app as artifacts_time_interval_typer_app 4 | from dbttoolkit.dbt_cloud.actions.retrieve_most_recent_artifact import typer_app as most_recent_artifact_typer_app 5 | 6 | dbt_cloud_typer_app = typer.Typer() 7 | dbt_cloud_typer_app.registered_commands.append(*artifacts_time_interval_typer_app.registered_commands) 8 | dbt_cloud_typer_app.registered_commands.append(*most_recent_artifact_typer_app.registered_commands) 9 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [packages] 7 | # Also need to be added to setup.py 8 | pydantic = "~= 1.9.1" 9 | requests = "~= 2.28.0" 10 | typer = "~= 0.4.2" 11 | google-cloud-storage = "~= 2.4.0" 12 | rich = "~= 13.3" 13 | 14 | [dev-packages] 15 | mypy = "== 0.961" 16 | black = "== 22.6.0" 17 | flake8 = "~= 4.0.1" 18 | pytest = "~= 7.1.2" 19 | types-requests = "~= 2.28.0" 20 | pytest-cov = "~= 3.0.0" 21 | responses = "~= 0.23" 22 | isort = "~= 5.10.1" 23 | 24 | [requires] 25 | python_version = "3.8" 26 | -------------------------------------------------------------------------------- /src/dbttoolkit/utils/logger.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | 4 | LOG_FORMAT = ( 5 | "%(asctime)s - %(name)s - %(threadName)s - %(levelname)s — %(filename)s:%(funcName)s:%(lineno)d - %(message)s" 6 | ) 7 | 8 | 9 | def init_logger(level=logging.INFO): 10 | logger = get_logger() 11 | logger.setLevel(level) 12 | handler = logging.StreamHandler(sys.stdout) 13 | handler.setFormatter(logging.Formatter(LOG_FORMAT)) 14 | logger.addHandler(handler) 15 | 16 | return logger 17 | 18 | 19 | def get_logger(): 20 | return logging.getLogger("dbt-toolkit") 21 | -------------------------------------------------------------------------------- /.github/workflows/publish-package.yaml: -------------------------------------------------------------------------------- 1 | name: Publish package 2 | 3 | on: 4 | release: 5 | types: [released] 6 | 7 | jobs: 8 | publish-package: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v3 12 | 13 | - name: Set up Python 14 | uses: actions/setup-python@v4 15 | with: 16 | python-version: '3.8.x' 17 | 18 | - name: Build the distribution package 19 | run: make build 20 | 21 | - name: Publish distribution to PyPI 22 | uses: pypa/gh-action-pypi-publish@release/v1 23 | with: 24 | password: ${{ secrets.PYPI_API_TOKEN }} 25 | -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/README.md: -------------------------------------------------------------------------------- 1 | Welcome to your new dbt project! 2 | 3 | ### Using the starter project 4 | 5 | Try running the following commands: 6 | - dbt run 7 | - dbt test 8 | 9 | 10 | ### Resources: 11 | - Learn more about dbt [in the docs](https://docs.getdbt.com/docs/introduction) 12 | - Check out [Discourse](https://discourse.getdbt.com/) for commonly asked questions and answers 13 | - Join the [chat](https://community.getdbt.com/) on Slack for live discussions and support 14 | - Find [dbt events](https://events.getdbt.com) near you 15 | - Check out [the blog](https://blog.getdbt.com/) for the latest news on dbt's development and best practices 16 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=42", "wheel", "setuptools_scm[toml]>=3.4"] 3 | 4 | [tool.setuptools_scm] 5 | write_to = "src/dbttoolkit/_version.py" 6 | write_to_template = '__version__ = "{version}"' 7 | local_scheme = "no-local-version" 8 | fallback_version = "0+unknown.scm_missing" 9 | 10 | [tool.black] 11 | line-length = 120 12 | target-version = ['py36', 'py37', 'py38', 'py39'] 13 | include = '\.pyi?$' 14 | extend-exclude = ''' 15 | /( 16 | # The following are specific to Black, you probably don't want those. 17 | | blib2to3 18 | | tests/data 19 | | profiling 20 | )/ 21 | ''' 22 | exclude = ''' 23 | /( 24 | | venv 25 | )/ 26 | ''' 27 | 28 | [tool.isort] 29 | line_length = 120 30 | profile = "black" 31 | -------------------------------------------------------------------------------- /tests/dbt_cloud/conftest.py: -------------------------------------------------------------------------------- 1 | from pytest import fixture 2 | 3 | 4 | @fixture 5 | def account_id(): 6 | return 123 7 | 8 | 9 | @fixture 10 | def project_id(): 11 | return 456 12 | 13 | 14 | @fixture 15 | def environment_id(): 16 | return 789 17 | 18 | 19 | @fixture 20 | def token(): 21 | return "SECRET_TOKEN" 22 | 23 | 24 | @fixture 25 | def dbt_cloud_ids_cli(account_id, project_id, environment_id, token): 26 | return [ 27 | "--account-id", 28 | account_id, 29 | "--project-id", 30 | project_id, 31 | "--environment-id", 32 | environment_id, 33 | "--token", 34 | token, 35 | ] 36 | 37 | 38 | @fixture 39 | def dbt_cloud_ids(account_id, project_id, environment_id): 40 | return dict(account_id=account_id, project_id=project_id, environment_id=environment_id) 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Mac stuff 10 | .DS_Store 11 | 12 | # Distribution / packaging 13 | .Python 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | wheels/ 26 | pip-wheel-metadata/ 27 | share/python-wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | _version.py 33 | 34 | # Unit test / coverage reports 35 | htmlcov/ 36 | .tox/ 37 | .nox/ 38 | .coverage 39 | .coverage.* 40 | .cache 41 | nosetests.xml 42 | coverage.xml 43 | *.cover 44 | *.py,cover 45 | .hypothesis/ 46 | .pytest_cache/ 47 | 48 | # pyenv 49 | .python-version 50 | 51 | # Environments 52 | .env 53 | .venv 54 | env/ 55 | venv/ 56 | ENV/ 57 | env.bak/ 58 | venv.bak/ 59 | 60 | # IDEs 61 | .idea/ 62 | *.iml 63 | -------------------------------------------------------------------------------- /.github/workflows/run-code-checks.yaml: -------------------------------------------------------------------------------- 1 | name: Code checks 2 | 3 | on: 4 | push: 5 | branches: main 6 | pull_request: 7 | branches: main 8 | 9 | jobs: 10 | run-code-checks: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - uses: actions/checkout@v3 15 | 16 | - name: Set up Python 17 | uses: actions/setup-python@v4 18 | with: 19 | python-version: '3.8.x' 20 | 21 | - name: Install system dependencies 22 | run: pip3 install pipenv 23 | 24 | - name: Cache Python packages 25 | uses: actions/cache@v3 26 | with: 27 | path: ~/.local/share/virtualenvs/ 28 | key: ${{ runner.os }}-python-${{ hashFiles('**/Pipfile.lock') }} 29 | 30 | - name: Install Python dependencies 31 | run: pipenv install --dev --deploy 32 | 33 | - name: Run type checking 34 | run: make type 35 | 36 | - name: Run lint tools 37 | run: make check-lint 38 | -------------------------------------------------------------------------------- /.github/workflows/run-tests.yaml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | push: 5 | branches: main 6 | pull_request: 7 | branches: main 8 | 9 | jobs: 10 | run-tests: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - uses: actions/checkout@v3 15 | 16 | - name: Set up Python 17 | uses: actions/setup-python@v4 18 | with: 19 | python-version: '3.8.x' 20 | 21 | - name: Install system dependencies 22 | run: pip3 install pipenv 23 | 24 | - name: Cache Python packages 25 | uses: actions/cache@v3 26 | with: 27 | path: ~/.local/share/virtualenvs/ 28 | key: ${{ runner.os }}-python-${{ hashFiles('**/Pipfile.lock') }} 29 | 30 | - name: Install Python dependencies 31 | run: pipenv install --dev --deploy 32 | 33 | - name: Install this package 34 | run: pipenv run pip3 install . 35 | 36 | - name: Run tests (with coverage) 37 | run: make test-coverage 38 | 39 | - name: Upload coverage to Codecov 40 | uses: codecov/codecov-action@v1 41 | with: 42 | fail_ci_if_error: true 43 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | APP_NAME?=dbt-toolkit 2 | REGISTRY?=eu.gcr.io/voi-data-warehouse 3 | IMAGE=$(REGISTRY)/$(APP_NAME) 4 | SHORT_SHA?=$(shell git rev-parse --short HEAD) 5 | VERSION?=$(SHORT_SHA) 6 | 7 | .PHONY: build 8 | build: clean-build 9 | python3 setup.py sdist bdist_wheel 10 | 11 | .PHONY: clean-build 12 | clean-build: 13 | rm -rf build dist 14 | 15 | .PHONY: install-dev 16 | install-dev: 17 | @echo "Installing it locally with editable mode" 18 | @pip install -e . 19 | 20 | .PHONY: type 21 | type: 22 | @echo "Running mypy" 23 | @pipenv run mypy . 24 | 25 | .PHONY: check-lint 26 | check-lint: 27 | @echo "Running isort" 28 | @pipenv run isort --check-only . 29 | @echo "Running black" 30 | @pipenv run black --check . 31 | @echo "Running flake" 32 | @pipenv run flake8 33 | 34 | .PHONY: lint 35 | lint: 36 | @echo "Running isort" 37 | @pipenv run isort . 38 | @echo "Running black" 39 | @pipenv run black . 40 | @echo "Running flake" 41 | @pipenv run flake8 42 | 43 | .PHONY: test 44 | test: 45 | @pipenv run pytest tests/ 46 | 47 | .PHONY: test-coverage 48 | test-coverage: 49 | @pipenv run pytest tests/ --cov=dbttoolkit --cov-report=xml 50 | -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/models/schema.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | sources: 4 | - name: raw 5 | description: Description of the sources 6 | tables: 7 | - name: city 8 | description: Description of the city source. 9 | 10 | columns: 11 | - name: id 12 | description: Primary key of the city table in the source. 13 | 14 | - name: name 15 | description: Name column of the city table in the source. 16 | 17 | - name: user 18 | description: Description of the user source. 19 | 20 | columns: 21 | - name: id 22 | description: Primary key of the user table in the source. 23 | 24 | - name: name 25 | description: Name column of the user table in the source. 26 | 27 | - name: height 28 | description: Height (in cm) 29 | 30 | models: 31 | - name: stg_city 32 | description: Description of the stg_city model 33 | columns: 34 | - name: id 35 | - name: name 36 | 37 | - name: stg_user 38 | description: Description of the stg_user model 39 | columns: 40 | - name: id 41 | - name: name 42 | - name: height_cm 43 | meta: 44 | original_name: height 45 | 46 | - name: mart_user 47 | columns: 48 | - name: name 49 | - name: height_cm 50 | -------------------------------------------------------------------------------- /src/dbttoolkit/documentation/presentation/formatters.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, List 2 | 3 | from dbttoolkit.documentation.models.column import ColumnDescriptionWithSource 4 | 5 | 6 | def format_node_link_in_markdown(node_id: str) -> str: 7 | """ 8 | Returns a Markdown-formatted link 9 | """ 10 | node_type = node_id.split(".")[0] 11 | 12 | return f"[{node_id}](/#!/{node_type}/{node_id})" 13 | 14 | 15 | def format_upstream_descriptions_to_human_readable(descriptions_from_upstream: ColumnDescriptionWithSource) -> str: 16 | # To achieve a deterministic outcome, we sort the descriptions by node_id 17 | sorted_descriptions = {k: v for k, v in sorted(descriptions_from_upstream.items(), key=lambda item: item[0])} 18 | 19 | # Invert the dict. Description becomes the keys and the sources the values 20 | inverted_descriptions: Dict[str, set] = {} 21 | 22 | for node_id, description in sorted_descriptions.items(): 23 | inverted_descriptions.setdefault(description, set()).add(node_id) 24 | 25 | output: List[str] = [] 26 | 27 | for description, node_ids in inverted_descriptions.items(): 28 | nodes = [format_node_link_in_markdown(node_id) for node_id in node_ids] 29 | 30 | description += f' [propagated from {", ".join(sorted(nodes))}]' 31 | output.append(description) 32 | 33 | return "\n\n".join(output) 34 | -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/dbt_project.yml: -------------------------------------------------------------------------------- 1 | 2 | # Name your project! Project names should contain only lowercase characters 3 | # and underscores. A good package name should reflect your organization's 4 | # name or the intended use of these models 5 | name: 'dbt_sample_project' 6 | version: '0.1.0' 7 | config-version: 2 8 | 9 | # This setting configures which "profile" dbt uses for this project. 10 | profile: 'dbt_sample_project' 11 | 12 | # These configurations specify where dbt should look for different types of files. 13 | # The `source-paths` config, for example, states that models in this project can be 14 | # found in the "models/" directory. You probably won't need to change these! 15 | source-paths: ["models"] 16 | analysis-paths: ["analysis"] 17 | test-paths: ["tests"] 18 | data-paths: ["data"] 19 | macro-paths: ["macros"] 20 | snapshot-paths: ["snapshots"] 21 | 22 | target-path: "target" # directory which will store compiled SQL files 23 | clean-targets: # directories to be removed by `dbt clean` 24 | - "target" 25 | - "dbt_modules" 26 | 27 | 28 | # Configuring models 29 | # Full documentation: https://docs.getdbt.com/docs/configuring-models 30 | 31 | # In this example config, we tell dbt to build all models in the example/ directory 32 | # as tables. These settings can be overridden in the individual model files 33 | # using the `{{ config(...) }}` macro. 34 | models: 35 | dbt_sample_project: 36 | +materialized: view 37 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | 3 | from setuptools import find_packages, setup 4 | 5 | # Get the long description from the README file 6 | current_folder = pathlib.Path(__file__).parent.resolve() 7 | long_description = (current_folder / "README.md").read_text(encoding="utf-8") 8 | 9 | # Based on: https://packaging.python.org/tutorials/packaging-projects/ 10 | setup( 11 | name="dbt-toolkit", 12 | use_scm_version={ 13 | "write_to": "src/dbttoolkit/_version.py", 14 | "write_to_template": '__version__ = "{version}"\n', 15 | "local_scheme": "no-local-version", 16 | "fallback_version": "0+unknown.scm_missing", 17 | }, 18 | description="Utilities for running dbt automations", 19 | long_description=long_description, 20 | long_description_content_type="text/markdown", 21 | url="https://github.com/voi-oss/dbt-toolkit", 22 | author="Voi Technology AB", 23 | author_email="opensource@voiapp.io", 24 | license="Apache License, Version 2.0", 25 | classifiers=[ 26 | "Development Status :: 3 - Alpha", 27 | "License :: OSI Approved :: Apache Software License", 28 | "Programming Language :: Python :: 3", 29 | "Programming Language :: Python :: 3.9", 30 | "Programming Language :: Python :: 3 :: Only", 31 | ], 32 | scripts=["bin/dbt-toolkit"], 33 | package_dir={"": "src"}, 34 | packages=find_packages(where="src"), 35 | python_requires=">=3.8, <4", 36 | setup_requires=["wheel", "setuptools_scm"], 37 | install_requires=[ 38 | "requests ~= 2.28.0", 39 | "typer ~= 0.4.2", 40 | "google-cloud-storage ~= 2.4.0", 41 | "pydantic ~= 1.9.1", 42 | "rich ~= 13.3.5", 43 | ], 44 | ) 45 | -------------------------------------------------------------------------------- /src/dbttoolkit/utils/io.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pathlib import Path 3 | from typing import Mapping 4 | 5 | from google.cloud import storage 6 | 7 | from dbttoolkit.utils.logger import get_logger 8 | 9 | logger = get_logger() 10 | 11 | 12 | def persist(content: str, output_folder: Path, filename: str, *, bucket_name: str = None): 13 | """ 14 | Writes either to a local file or remotely to a bucket if "bucket_name" is provided 15 | 16 | :param content: the string to be written 17 | :param output_folder: the path to the folder 18 | :param filename: the name of the file 19 | :param bucket_name: the name of the bucket 20 | :return: None 21 | """ 22 | if bucket_name: 23 | write_to_bucket(content, bucket_name, output_folder, filename) 24 | else: 25 | write_to_file(content, output_folder, filename) 26 | 27 | 28 | def write_to_bucket(content: str, bucket_name: str, output_folder: Path, filename: str) -> None: 29 | """ 30 | Uploads files to a GCS bucket 31 | 32 | :param content: the string to be written 33 | :param bucket_name: the name of the bucket 34 | :param output_folder: the relative path in the bucket 35 | :param filename: the name of the file 36 | :return: None 37 | """ 38 | storage_client = storage.Client() 39 | bucket = storage_client.bucket(bucket_name) 40 | 41 | file_path = Path(output_folder, filename) 42 | blob = bucket.blob(str(file_path)) 43 | 44 | logger.info(f"Uploading to GCS: {bucket_name}, {file_path}") 45 | 46 | blob.upload_from_string(content) 47 | 48 | 49 | def write_to_file(content: str, output_folder: Path, filename: str) -> None: 50 | """ 51 | Writes a string to a file. 52 | Takes care of creating the folder if necessary 53 | 54 | :param content: the string to be written 55 | :param output_folder: the path to the folder 56 | :param filename: the name of the file 57 | :return: None 58 | """ 59 | Path.mkdir(output_folder, parents=True, exist_ok=True) 60 | 61 | logger.info(f"Writing to file: {output_folder / filename}") 62 | 63 | with open(output_folder / filename, "w") as file: 64 | file.write(content) 65 | 66 | 67 | def load_json_file(path) -> dict: 68 | with open(path) as file: 69 | manifest = json.load(file) 70 | 71 | return manifest 72 | 73 | 74 | def write_json_file(json_object: Mapping, output_path: Path) -> None: 75 | with open(output_path, "w") as file: 76 | json.dump(json_object, file, indent=4) 77 | -------------------------------------------------------------------------------- /src/dbttoolkit/dbt_cloud/actions/retrieve_most_recent_artifact.py: -------------------------------------------------------------------------------- 1 | """ 2 | https://docs.getdbt.com/dbt-cloud/api#section/How-to-use-this-API 3 | 4 | curl --request GET \ 5 | --url https://cloud.getdbt.com/api/v2/accounts/ \ 6 | --header 'Content-Type: application/json' \ 7 | --header 'Authorization: Token ' 8 | """ 9 | import json 10 | from pathlib import Path 11 | 12 | import typer 13 | 14 | from dbttoolkit.dbt_cloud.actions._docs import HELP 15 | from dbttoolkit.dbt_cloud.clients.dbt_cloud_client import DbtCloudClient 16 | from dbttoolkit.dbt_cloud.models.dbt_artifact import DbtArtifact 17 | from dbttoolkit.utils.io import write_to_file 18 | from dbttoolkit.utils.logger import get_logger 19 | 20 | typer_app = typer.Typer() 21 | logger = get_logger() 22 | 23 | 24 | @typer_app.command("retrieve-most-recent-artifact") 25 | def run( 26 | artifact_name: DbtArtifact = typer.Argument(..., help="which artifact you want to retrieve"), 27 | output_folder: Path = typer.Option(..., help=HELP["output_folder"]), 28 | preferred_commit: str = typer.Option( 29 | None, 30 | help="if provided, tries to find the most recent run from " 31 | "that commit. If nothing is found, falls back to the most " 32 | "recent run overall (always respecting the job_id argument)", 33 | ), 34 | environment_id: int = typer.Option(..., envvar="DBT_CLOUD_ENVIRONMENT_ID", help=HELP["environment_id"]), 35 | account_id: int = typer.Option(..., envvar="DBT_CLOUD_ACCOUNT_ID", help=HELP["account_id"]), 36 | project_id: int = typer.Option(..., envvar="DBT_CLOUD_PROJECT_ID", help=HELP["project_id"]), 37 | job_id: int = typer.Option(..., envvar="DBT_CLOUD_JOB_ID", help=HELP["job_id"]), 38 | token: str = typer.Option(..., envvar="DBT_CLOUD_TOKEN", help=HELP["token"]), 39 | ) -> None: 40 | """ 41 | Retrieves the `artifact_name` from the latest run from a job. 42 | """ 43 | client = DbtCloudClient(account_id=account_id, project_id=project_id, environment_id=environment_id, token=token) 44 | logger.info(f"Initializing dbt Cloud client for account {account_id}, project {project_id}, job {job_id}") 45 | 46 | run = client.retrieve_most_recent_run_for_job(job_id, preferred_commit) 47 | logger.info(f'Retrieved run {run["id"]} from {run["finished_at_humanized"]} ago (commit: {run["git_sha"]})') 48 | 49 | manifest = client.retrieve_artifact_from_run(run["id"], artifact_name) 50 | write_to_file(json.dumps(manifest, indent=2), output_folder, f"{artifact_name}.json") 51 | 52 | 53 | # Entry point for direct execution 54 | if __name__ == "__main__": 55 | typer_app() 56 | -------------------------------------------------------------------------------- /tests/documentation/presentation/test_formatters.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import patch 2 | 3 | import pytest 4 | 5 | from dbttoolkit.documentation.models.column import ColumnDescriptionWithSource 6 | from dbttoolkit.documentation.presentation.formatters import ( 7 | format_node_link_in_markdown, 8 | format_upstream_descriptions_to_human_readable, 9 | ) 10 | 11 | 12 | @pytest.mark.parametrize( 13 | "test_input,expected", 14 | [ 15 | ( 16 | "model.project.model_path.model_name", 17 | "[model.project.model_path.model_name](/#!/model/model.project.model_path.model_name)", 18 | ), 19 | ( 20 | "source.project.source_path.source_name", 21 | "[source.project.source_path.source_name](/#!/source/source.project.source_path.source_name)", 22 | ), 23 | ], 24 | ids=["model", "source"], 25 | ) 26 | def test_format_node_link_in_markdown(test_input, expected): 27 | assert format_node_link_in_markdown(test_input) == expected 28 | 29 | 30 | class TestFormatUpstreamDescriptionsToHumanReadable: 31 | @pytest.mark.parametrize( 32 | "test_input,expected_output", 33 | [ 34 | ( 35 | "description_from_upstream_simple", 36 | "description in source1 [propagated from source1]\n\ndescription in source2 [propagated from source2]", 37 | ), 38 | ("description_from_upstream_common", "common description [propagated from source1, source2]"), 39 | ], 40 | ) 41 | def test_simple(self, test_input, expected_output, request): 42 | test_input = request.getfixturevalue(test_input) 43 | 44 | output = format_upstream_descriptions_to_human_readable(test_input) 45 | assert output == expected_output 46 | 47 | @pytest.fixture() 48 | def description_from_upstream_simple(self) -> ColumnDescriptionWithSource: 49 | return { 50 | "source1": "description in source1", 51 | "source2": "description in source2", 52 | } 53 | 54 | @pytest.fixture() 55 | def description_from_upstream_common(self) -> ColumnDescriptionWithSource: 56 | return { 57 | "source1": "common description", 58 | "source2": "common description", 59 | } 60 | 61 | @pytest.fixture(autouse=True, scope="class") 62 | def mocked_link_formatter(self): 63 | """Convenience patch so the strings in the output are more readable""" 64 | with patch( 65 | "dbttoolkit.documentation.presentation.formatters.format_node_link_in_markdown", 66 | side_effect=lambda node_id: node_id, 67 | ): 68 | yield 69 | -------------------------------------------------------------------------------- /tests/dbt_cloud/actions/test_retrieve_most_recent_artifact.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pathlib import Path 3 | from unittest.mock import patch 4 | 5 | import responses 6 | from pytest import fixture 7 | from typer.testing import CliRunner 8 | 9 | from dbttoolkit.dbt_cloud.actions import retrieve_most_recent_artifact 10 | from dbttoolkit.dbt_cloud.actions.retrieve_most_recent_artifact import typer_app 11 | 12 | runner = CliRunner() 13 | 14 | 15 | def test_cli_has_mandatory_fields(): 16 | result = runner.invoke(typer_app, ["manifest"]) 17 | assert result.exit_code != 0 18 | assert "Missing option" in result.stdout 19 | 20 | 21 | @responses.activate 22 | def test_cli_execution(dbt_cloud_ids_cli, rest_api_run_result, manifest_json, job_id, account_id): 23 | path = Path("/tmp/") 24 | 25 | responses.add( 26 | responses.GET, 27 | f"https://cloud.getdbt.com/api/v2/accounts/{account_id}/runs", 28 | json=rest_api_run_result, 29 | status=200, 30 | ) 31 | 32 | # Should pick job_id = 1 because it's the successful one, even if it's not the most recent 33 | responses.add( 34 | responses.GET, 35 | f"https://cloud.getdbt.com/api/v2/accounts/{account_id}/runs/1/artifacts/manifest.json", 36 | json=manifest_json, 37 | status=200, 38 | ) 39 | 40 | with patch.object(retrieve_most_recent_artifact, "write_to_file", autospec=True) as mock: 41 | result = runner.invoke(typer_app, ["manifest", "--output-folder", path, "--job-id", job_id, *dbt_cloud_ids_cli]) 42 | 43 | assert result.exit_code == 0 44 | 45 | assert mock.call_count == 1 46 | assert mock.call_args[0] == (json.dumps(manifest_json, indent=2), path, "manifest.json") 47 | 48 | 49 | @fixture 50 | def job_id(): 51 | return 999 52 | 53 | 54 | @fixture 55 | def rest_api_run_result(job_id, dbt_cloud_ids): 56 | common = {**dbt_cloud_ids, "job_id": job_id, "finished_at_humanized": "1 hour ago", "git_sha": "abc123"} 57 | 58 | failure = { 59 | "id": 2, 60 | "is_complete": True, 61 | "is_success": False, 62 | "is_cancelled": False, 63 | "created_at": "2022-06-10 11:30:00.321339+00:00", 64 | **common, 65 | } 66 | 67 | cancelled = { 68 | "id": 3, 69 | "is_complete": True, 70 | "is_success": False, 71 | "is_cancelled": True, 72 | "created_at": "2022-06-10 11:30:00.321339+00:00", 73 | **common, 74 | } 75 | 76 | success = { 77 | "id": 1, 78 | "is_complete": True, 79 | "is_success": True, 80 | "is_cancelled": False, 81 | "created_at": "2022-06-09 11:30:00.321339+00:00", 82 | **common, 83 | } 84 | 85 | return {"data": [failure, cancelled, success]} 86 | 87 | 88 | @fixture 89 | def manifest_json(): 90 | return {"manifest": "mock"} 91 | -------------------------------------------------------------------------------- /tests/README.md: -------------------------------------------------------------------------------- 1 | # Tests 2 | 3 | ## dbt sample project 4 | 5 | In order to run tests on dbt artifacts that are as close to reality as possible, we have a sample dbt project inside 6 | the `_fixtures` folder. 7 | 8 | It uses a Postgres adapter to connect on a Postgres database provided in this repository through Docker. 9 | 10 | Those steps are not necessary for running or writing new tests, but only when you need to update the artifacts generated. 11 | 12 | ### Updating the project 13 | 14 | ```shell 15 | $ pip install "dbt~=0.21.0" 16 | ``` 17 | 18 | Add the following entry in your dbt `~/.dbt/profiles.yaml`: 19 | 20 | ```shell 21 | dbt_sample_project: 22 | outputs: 23 | dev: 24 | type: postgres 25 | host: localhost 26 | port: 5433 27 | user: postgres 28 | password: postgres 29 | dbname: dbt_sample_project 30 | schema: dev 31 | threads: 8 32 | 33 | target: dev 34 | ``` 35 | 36 | Go to the root folder of the dbt sample project before running the next commands. 37 | 38 | To spin up the database, open up a new terminal and run: 39 | 40 | ```shell 41 | $ docker-compose up 42 | ``` 43 | 44 | And finally, test your connection by running: 45 | 46 | ```shell 47 | $ dbt debug --target dev 48 | ``` 49 | 50 | As you confirm that connection could be established, you can proceed with your changes in the dbt project (e.g adding 51 | new columns or documentation). After your changes in the dbt project are done, create the artifacts and copy 52 | the `manifest` to a new file, since this is what the unit tests are expecting. 53 | 54 | Note: we use `jq` to prettify the JSON files. This tool needs to be installed separately (eg: throw `brew`). For the 55 | command below, we need a temporary file because it's not possible to directly redirect the output to the same file we 56 | are reading. 57 | 58 | ```shell 59 | $ dbt run 60 | $ dbt docs generate 61 | $ cat target/catalog.json | jq '.' | cat > target/catalog.json.tmp && mv target/catalog.json.tmp target/catalog.json 62 | $ cat target/manifest.json | jq '.' | cat > target/manifest.json.tmp && mv target/manifest.json.tmp target/manifest.json 63 | $ cp target/manifest.json target/manifest_original.json 64 | ``` 65 | 66 | Another advantage of copying the file to a new name is that you can now run our project while still preserving the 67 | original manifest. 68 | 69 | ```shell 70 | $ python -m dbttoolkit.documentation.actions.propagate \ 71 | --artifacts-folder=./target \ 72 | --input-manifest-filename=manifest_original.json \ 73 | --output-manifest-path=./target/manifest.json 74 | ``` 75 | 76 | And finally, you can spin up the dbt docs server to inspect the modified manifest. 77 | 78 | ```shell 79 | $ dbt docs serve 80 | ``` 81 | 82 | After you are done, note that `Ctrl+c` will stop the Postgres container but not its network and storage. For a complete 83 | tear down of the `docker-compose`, run: 84 | 85 | ```shell 86 | $ docker-compose down 87 | ``` 88 | 89 | ### Initial setup 90 | 91 | Those are the commands used to set up the sample project. You do not need to run it when updating the sample project. 92 | 93 | ```shell 94 | $ dbt init dbt_sample_project 95 | ``` 96 | -------------------------------------------------------------------------------- /src/dbttoolkit/documentation/presentation/stats.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | from typing import List 3 | 4 | from dbttoolkit.documentation.models.column import ColumnRegistry 5 | from dbttoolkit.utils.logger import get_logger 6 | 7 | logger = get_logger() 8 | 9 | PotentialPropagation = namedtuple("PotentialPropagation", ["parent", "children"]) 10 | 11 | 12 | def calculate_and_print(registry: ColumnRegistry) -> None: 13 | """ 14 | Calculate some basic statistics (total number of parsed columns, how many are documented, etc) and 15 | which columns would benefit the most from being documented. 16 | """ 17 | logger.info(f"Total columns: {len(registry.data.keys())}") 18 | 19 | columns_without_docs = [column for column in registry.data.values() if not column.description] 20 | columns_with_docs = [column for column in registry.data.values() if column.description] 21 | logger.info(f"👍 With docs: {len(columns_with_docs)} ({len(columns_with_docs) / len(registry.data.keys())})") 22 | logger.info( 23 | f"👎 Without docs: {len(columns_without_docs)} ({len(columns_without_docs) / len(registry.data.keys())})" 24 | ) 25 | 26 | columns_can_receive_propagation = [ 27 | column for column in registry.data.values() if column.descriptions_from_upstream and not column.description 28 | ] 29 | logger.info(f"✅ Columns with documentation propagated: {len(columns_can_receive_propagation)}") 30 | 31 | best_columns_to_be_documented = _find_best_columns_to_be_documented(registry) 32 | logger.info(f"🕵 Can propagate documentation if documented: {len(best_columns_to_be_documented)}") 33 | 34 | if best_columns_to_be_documented: 35 | logger.info("\n=> Top 25:") 36 | 37 | logger.info( 38 | "\n".join( 39 | [ 40 | f"{potential.parent} ({len(potential.children)}) => {potential.children}" 41 | for potential in best_columns_to_be_documented[0:25] 42 | ] 43 | ) 44 | ) 45 | 46 | 47 | def _find_best_columns_to_be_documented(registry: ColumnRegistry) -> list: 48 | """ 49 | Retrieves which columns are not documented but would benefit the most if documented (i.e. propagating its 50 | documentation would affect most downstream columns). 51 | """ 52 | best_columns_to_be_documented: List[PotentialPropagation] = [] 53 | 54 | for column in registry.data.values(): 55 | if column.upstream_matches: 56 | # We only want nodes without parents, because otherwise we would be repeating nodes 57 | continue 58 | 59 | if column.description: 60 | # And not currently documented 61 | continue 62 | 63 | children = [column for column in column.downstream_matches_recursive if not column.description] 64 | 65 | if len(children) == 0: 66 | # Wouldn't be propagated anywhere 67 | continue 68 | 69 | best_columns_to_be_documented.append(PotentialPropagation(column, children)) 70 | 71 | best_columns_to_be_documented.sort(key=lambda potential: len(potential.children)) 72 | best_columns_to_be_documented.reverse() 73 | 74 | return best_columns_to_be_documented 75 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # dbt-toolkit 2 | 3 | [![PyPI version](https://badge.fury.io/py/dbt-toolkit.svg)](https://badge.fury.io/py/dbt-toolkit) 4 | [![Tests](https://github.com/voi-oss/dbt-toolkit/actions/workflows/run-tests.yaml/badge.svg)](https://github.com/voi-oss/dbt-toolkit/actions/workflows/run-tests.yaml) 5 | [![Code checks](https://github.com/voi-oss/dbt-toolkit/actions/workflows/run-code-checks.yaml/badge.svg)](https://github.com/voi-oss/dbt-toolkit/actions/workflows/run-code-checks.yaml) 6 | [![codecov](https://codecov.io/gh/voi-oss/dbt-toolkit/branch/main/graph/badge.svg?token=5JS1RLYRQF)](https://codecov.io/gh/voi-oss/dbt-toolkit) 7 | [![Apache License 2.0](https://img.shields.io/github/license/voi-oss/dbt-toolkit)](https://github.com/voi-oss/dbt-toolkit) 8 | 9 | A collection of utilities and tools for teams and organizations using dbt. 10 | 11 | > This project is in an ALPHA stage. Internal and external APIs might change between minor versions. 12 | 13 | > Please reach out if you try this at your own organization. Feedback is very appreciated, and we 14 | > would love to hear if you had any issues setting this up at your own. 15 | 16 | ## Automations 17 | 18 | ### Documentation 19 | 20 | * Propagates the documentation of columns that have the same name on downstream models, improving documentation 21 | coverage while reducing manual repeated work 22 | 23 | More information can be found on the package's [README](src/dbttoolkit/documentation/README.md). 24 | 25 | ### dbt Cloud artifacts 26 | 27 | * Retrieve artifacts from a dbt Cloud project. Useful for building reports (such as test and documentation coverage) 28 | 29 | More information can be found on the package's [README](src/dbttoolkit/dbt_cloud/README.md). 30 | 31 | ## Installation 32 | 33 | This project requires Python 3.8+. You can install the latest version of this package from PyPI by running the 34 | command below. 35 | 36 | ```shell 37 | $ pip install dbt-toolkit 38 | ``` 39 | 40 | ## Development 41 | 42 | ### Contributing 43 | 44 | We are open and would love to have contributions, both in Pull Requests but also in ideas and feedback. Don't hesitate 45 | to create an Issue on this repository if you are trying this project in your organization or have anything to share. 46 | 47 | Remember to run `make lint`, `make type` and `make test` before committing. 48 | 49 | Run `make install-dev` to install it locally in editable mode. This is necessary for running the tests. 50 | 51 | ### Architecture 52 | 53 | Groups of functionalities are encapsulated together in top-level packages, such as `dbt_cloud/` or `documentation/`. 54 | Each package that exposes CLI commands should contain an `actions` sub-package. 55 | 56 | ## Tests 57 | 58 | More information can be found on the tests' [README](tests/README.md). 59 | 60 | ## Release 61 | 62 | There is a GitHub Action that will trigger a release of this package on PyPI based on releases created on GitHub. 63 | Steps: 64 | 65 | * Loosely follow [semantic versioning](https://semver.org/) 66 | * Remember to prefix the tag name with `v` 67 | * Use the tag name as the release title on GitHub 68 | * Use the auto-generated release notes from GitHub 69 | * Append a link at the end of the release notes to the released version on PyPI 70 | 71 | ## License 72 | 73 | This project is licensed under the Apache License, Version 2.0: http://www.apache.org/licenses/LICENSE-2.0. 74 | -------------------------------------------------------------------------------- /src/dbttoolkit/documentation/README.md: -------------------------------------------------------------------------------- 1 | # dbt documentation propagation 2 | 3 | Propagates dbt documentation for columns that are documented and have the same name in downstream dependencies. 4 | 5 | > This project is in an ALPHA stage. Internal and external APIs might change between minor versions. This module has only been tested against Snowflake. 6 | 7 | > Example: A dbt model `user` has a column called `name`, which is documented. Another dbt model `user_stats` 8 | > depends on `user` and also has a column called `name`, which is not documented. This automation will 9 | > propagate (i.e., "pass along") the documentation from `user.name` to `user_stats.name`. 10 | 11 | Example: 12 | 13 |

14 | 15 | Propagation example 20 | 21 |

22 | 23 | Currently, the documentation is propagated by modifying the `manifest.json`, and not by editing the 24 | existing `schema.yml` files. 25 | 26 | ## Features 27 | 28 | * The documentation is propagated from upstream models (or sources) to downstream dependencies for columns with the same 29 | name. 30 | * The propagation happens recursively. It can go 31 | from `source (documented) -> child (undocumented) -> grandchild (undocumented)`, no matter how many levels deep. 32 | * A downstream dependency that renamed a column (e.g.: from `id` to `user_id`) can specify the original column name as a 33 | meta property and still have the documentation propagated even if the name of the column has changed. 34 | * The propagated documentation automatically includes a link to the node where the documentation was defined. 35 | * If a column can inherit documentation from multiple upstream columns (because they all have the same name), all 36 | documentation will be included. 37 | * The columns that have most downstream dependencies but are not documented are listed (i.e. which columns would benefit 38 | the most from this automation if they are documented) 39 | 40 | ### Column renaming 41 | 42 | If a source `user` has a column called `id` and a downstream model `stg_user` has a column `user_id` and we want the 43 | documentation from the former to be propagated to the latter, the following meta property can be used: 44 | 45 | ```yaml 46 | models: 47 | - name: stg_user 48 | columns: 49 | - name: user_id 50 | meta: 51 | original_name: id 52 | ``` 53 | 54 | Note that if `stg_user` has multiple upstream models with documented columns with the name `id`, all of them will be 55 | inherited. 56 | 57 | ### Features roadmap 58 | 59 | * Propagation from ephemeral models to models and between macros has not been tested yet 60 | * Check if it works for columns documented in the source that does not exist in the warehouse (i.e. if we go with a 61 | raw `json` strategy) 62 | * What happens if 3 upstream columns have the same documentation. Are we repeating all 3? 63 | * Write a test for capturing the behavior of multiple inheritance on different levels: 64 | 65 | ``` 66 | Source1 (YES) ---------------> Target (NO) 67 | Source2 (YES) -> Model2 (NO) / 68 | ``` 69 | 70 | --- 71 | 72 | ## How does it work 73 | 74 | The following dbt artifacts are used as input: 75 | 76 | * `catalog.json` to get all the columns available in the objects (tables and views) in the data warehouse 77 | * `manifest.json` to get the existing project documentation 78 | 79 | They are parsed and all columns are combined on a data structure, which is recursively traversed to identify the columns 80 | with the same name that are part of the same dependency graph. The propagated column documentation is then written back 81 | to the `manifest.json`. 82 | -------------------------------------------------------------------------------- /tests/documentation/actions/test_propagate.py: -------------------------------------------------------------------------------- 1 | import json 2 | import tempfile 3 | from pathlib import Path 4 | from typing import Mapping 5 | 6 | import pytest 7 | 8 | from dbttoolkit.documentation.actions import propagate 9 | 10 | 11 | @pytest.fixture(scope="module") 12 | def transformed_artifact(dbt_sample_project_path: Path): 13 | with tempfile.NamedTemporaryFile() as tmpfile: 14 | propagate.run(dbt_sample_project_path / "target", "manifest_original.json", Path(tmpfile.name)) 15 | 16 | yield json.load(tmpfile) 17 | 18 | 19 | def test_propagation_1_level(transformed_artifact: Mapping): 20 | """ 21 | Parent (with documentation) -> child (without documentation) propagation works 22 | """ 23 | description = column_description(transformed_artifact, "stg_user", "name") 24 | 25 | assert ( 26 | description == "Name column of the user table in the source. " 27 | "[propagated from [source.dbt_sample_project.raw.user]" 28 | "(/#!/source/source.dbt_sample_project.raw.user)]" 29 | ) 30 | 31 | 32 | def test_propagation_2_level(transformed_artifact: Mapping): 33 | """ 34 | Parent (with documentation) -> child (without documentation) -> grandchild (without documentation) 35 | propagation works. The first parent is listed as source. 36 | """ 37 | description = column_description(transformed_artifact, "mart_user", "name") 38 | 39 | assert ( 40 | description == "Name column of the user table in the source. " 41 | "[propagated from [source.dbt_sample_project.raw.user]" 42 | "(/#!/source/source.dbt_sample_project.raw.user)]" 43 | ) 44 | 45 | 46 | def test_ignored_columns(transformed_artifact: Mapping): 47 | """ 48 | Some columns can be globally ignored (example: id) 49 | """ 50 | description = column_description(transformed_artifact, "stg_user", "id") 51 | 52 | assert description == "" 53 | 54 | 55 | def test_alias(transformed_artifact: Mapping): 56 | """ 57 | Children can specify a meta attribute for renamed columns 58 | """ 59 | original_description = column_description(transformed_artifact, "raw.user", "height", node_type="source") 60 | propagated_description = column_description(transformed_artifact, "stg_user", "height_cm") 61 | 62 | assert original_description is not None 63 | assert ( 64 | propagated_description 65 | == original_description 66 | + " [propagated from [source.dbt_sample_project.raw.user](/#!/source/source.dbt_sample_project.raw.user)]" 67 | ) 68 | 69 | 70 | def test_2_parents(transformed_artifact: Mapping): 71 | """ 72 | A column can inherit documentation from more than one parent. Note here that they are even on different levels 73 | """ 74 | 75 | description = column_description(transformed_artifact, "mart_user_and_city", "name") 76 | 77 | expected = ( 78 | "Name column of the city table in the source. " 79 | "[propagated from [source.dbt_sample_project.raw.city]" 80 | "(/#!/source/source.dbt_sample_project.raw.city)]" 81 | "\n\n" 82 | "Name column of the user table in the source. " 83 | "[propagated from [source.dbt_sample_project.raw.user]" 84 | "(/#!/source/source.dbt_sample_project.raw.user)]" 85 | ) 86 | 87 | assert description == expected 88 | 89 | 90 | """ 91 | Helper functions 92 | """ 93 | 94 | 95 | def column_description(artifact: Mapping, node_name: str, column_name: str, *, node_type: str = "model"): 96 | """ 97 | Retrieves the description of a column on a node given an artifact. 98 | The node type can be both a model (default) or a source. 99 | """ 100 | if node_type == "model": 101 | node = artifact["nodes"][f"model.dbt_sample_project.{node_name}"] 102 | elif node_type == "source": 103 | node = artifact["sources"][f"source.dbt_sample_project.{node_name}"] 104 | else: 105 | raise NotImplementedError(f"Unexpected {node_type}") 106 | 107 | return node["columns"][column_name]["description"] 108 | -------------------------------------------------------------------------------- /src/dbttoolkit/dbt_cloud/README.md: -------------------------------------------------------------------------------- 1 | # dbt Cloud REST API client 2 | 3 | A simple client for the [dbt Cloud REST API](https://docs.getdbt.com/dbt-cloud/api-v2). 4 | 5 | The main use-cases implemented are related to downloading 6 | [dbt artifacts](https://docs.getdbt.com/reference/artifacts/dbt-artifacts) from runs executed in a dbt Cloud project. 7 | 8 | ## Commands 9 | 10 | ### Common options 11 | 12 | The following options are mandatory and required for all the commands below. 13 | They can be provided either as CLI options or environment variables. 14 | 15 | ``` 16 | --account-id INTEGER [env var: DBT_CLOUD_ACCOUNT_ID; required] 17 | --project-id INTEGER [env var: DBT_CLOUD_PROJECT_ID; required] 18 | --environment-id INTEGER [env var: DBT_CLOUD_ENVIRONMENT_ID; required] 19 | --token TEXT [env var: DBT_CLOUD_TOKEN; required] 20 | ``` 21 | 22 | In all examples above, it is assumed that those options have been passed as environment 23 | variables. 24 | 25 | ### Retrieve most recent artifact 26 | 27 | Retrieves the most recent specified artifact from a specified job. 28 | 29 | We use this command internally to retrieve the most recent `manifest` from our main 30 | production job in order to use it for runs using dbt's 31 | [state-based](https://docs.getdbt.com/reference/node-selection/methods#the-state-method) 32 | selectors as part of our CI workflows. 33 | 34 | Basic usage: 35 | ```shell 36 | $ dbt-toolkit dbt-cloud retrieve-most-recent-artifact ARTIFACT_NAME \ 37 | --output-folder PATH [required] \ 38 | --job-id INTEGER [env var: DBT_CLOUD_JOB_ID; required] 39 | ``` 40 | 41 | Example: 42 | ```shell 43 | $ dbt-toolkit dbt-cloud retrieve-most-recent-artifact manifest \ 44 | --output-folder "/tmp/" \ 45 | --job-id 1234 46 | ``` 47 | 48 | The `PATH` (required option) must be a path to a folder and `ARTIFACT_NAME` must be one of the following: `catalog|manifest|run_results|sources`. 49 | 50 | ### Retrieve artifacts by time interval 51 | 52 | Retrieves all the artifacts from all the jobs (in the given dbt Cloud project) that finished between 53 | a specified time interval. 54 | 55 | We use this internally to retrieve all the artifacts from all our jobs for further metadata analysis, such as historical 56 | tracking of our documentation and test coverage. The time interval help us execute this on an hourly basis in our 57 | scheduler. 58 | 59 | Basic usage: 60 | ```shell 61 | $ dbt-toolkit dbt-cloud retrieve-artifacts-time-interval \ 62 | --output-folder PATH [required] \ 63 | --start-time [%Y-%m-%dT%H:%M:%S] [required] \ 64 | --end-time [%Y-%m-%dT%H:%M:%S] [required] 65 | ``` 66 | 67 | Example: 68 | ```shell 69 | $ dbt-toolkit dbt-cloud retrieve-artifacts-time-interval \ 70 | --output-folder "/tmp/" \ 71 | --start-time 2022-06-01T00:00:00 \ 72 | --end-time 2022-06-01T01:00:00 73 | ``` 74 | 75 | The output files are partitioned by date, hour, `job_id`, `run_id` and `step`, as the following 76 | example: 77 | 78 | ``` 79 | date=2022-06-01 80 | └── hour=00 81 | ├── job_id=1234 82 | │ └── run_id=60060838 83 | │ ├── _run.json 84 | │ └── step=4 85 | │ ├── manifest.json 86 | │ └── test_run_results.json 87 | └── job_id=4567 88 | └── run_id=60061366 89 | ├── _run.json 90 | ├── step=4 91 | │ ├── manifest.json 92 | │ └── seed_run_results.json 93 | └── step=5 94 | ├── manifest.json 95 | └── run_run_results.json 96 | ``` 97 | 98 | On dbt Cloud, each job can have several steps. The first 3 steps are internal from dbt Cloud 99 | (1: cloning the repository, 2: creating a profile file, 3: running `dbt deps`) and ignored. 100 | The steps given by the dbt Cloud user starts from `step=4`. 101 | 102 | For the file names: run results artifacts are prefixed by which command was executed 103 | (eg: `run_run_results.json` for a `dbt run` execution, or `seed_run_results.json` for a `dbt seed` execution). 104 | The other artifacts (manifest, catalog, sources) have their original names maintained. 105 | 106 | A `_run.json` metadata file with the dbt Cloud API response is also persisted. 107 | 108 | There is an optional argument to this command named `gcs_bucket_name`. If provided, all the folders and files will be 109 | written in a Google Cloud Storage bucket instead of in the local file system. Both `gcs_bucket_name` and `output_folder` 110 | can be provided, in case you want to add the files to a subfolder in the bucket. 111 | -------------------------------------------------------------------------------- /src/dbttoolkit/documentation/models/column.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | from typing import Any, Dict, Mapping, Optional, Set 3 | 4 | from pydantic import BaseModel, Field 5 | 6 | ColumnFqn = namedtuple("ColumnFqn", ["node_id", "name"]) 7 | 8 | ColumnDescriptionWithSource = Dict[str, str] # { 'node_id': 'description' } 9 | 10 | 11 | class Column(BaseModel): 12 | """ 13 | A column that exists in a dbt model. An common representation for both dbt catalog and dbt manifest columns. 14 | 15 | It has pointers to the node object from the artifact and also the original column object. 16 | Also has pointers to upstream and downstream dependencies and methods to recurse through them. 17 | """ 18 | 19 | name: str = Field(..., allow_mutation=False) 20 | node: Mapping = Field(..., allow_mutation=False, repr=False) 21 | 22 | artifact_column: Mapping # Can be either the column representation in the manifest or in the catalog 23 | description: Optional[str] 24 | 25 | upstream_matches: Set = set() 26 | downstream_matches: Set = set() 27 | 28 | class Config: 29 | validate_assignment = True 30 | 31 | def __post__init__(self) -> None: 32 | if self.description == "": 33 | self.description = None 34 | 35 | def add_upstream_match(self, column: "Column") -> None: 36 | self.upstream_matches.add(column) 37 | 38 | if self not in column.downstream_matches: 39 | column.add_downstream_match(self) 40 | 41 | def add_downstream_match(self, column: "Column") -> None: 42 | self.downstream_matches.add(column) 43 | 44 | if self not in column.upstream_matches: 45 | column.add_upstream_match(self) 46 | 47 | @property 48 | def downstream_matches_recursive(self) -> set: 49 | output = set() 50 | 51 | for column in self.downstream_matches: 52 | output.add(column) 53 | output.update(column.downstream_matches_recursive) 54 | 55 | return output 56 | 57 | @property 58 | def descriptions_from_upstream(self) -> ColumnDescriptionWithSource: 59 | """ 60 | :return: dict with node_id as key and original description as value 61 | """ 62 | descriptions = {} 63 | 64 | for column in self.upstream_matches: 65 | if column.description: 66 | descriptions[column.node_id] = column.description 67 | else: 68 | # Only search up if it was not found already 69 | for node_id, description in column.descriptions_from_upstream.items(): 70 | descriptions[node_id] = description 71 | 72 | return descriptions 73 | 74 | @property 75 | def node_id(self) -> str: 76 | return self.node["unique_id"] 77 | 78 | @property 79 | def fqn(self) -> ColumnFqn: 80 | return ColumnFqn(self.node_id, self.name) 81 | 82 | def __hash__(self): 83 | return hash(self.fqn) 84 | 85 | def __eq__(self, other: Any): 86 | return isinstance(other, self.__class__) and self.fqn == other.fqn 87 | 88 | def __repr__(self): 89 | model_type = self.fqn.node_id.split(".")[0] 90 | friendly_model_name = ".".join(self.fqn.node_id.split(".")[2:]) 91 | return f"[{model_type}] {friendly_model_name}.{self.fqn.name}" 92 | 93 | def __str__(self): 94 | return self.__repr__() 95 | 96 | @classmethod 97 | def build_from_dbt_manifest_column(cls, column: Mapping, node: Mapping) -> "Column": 98 | return cls(name=column["name"].lower(), node=node, description=column["description"], artifact_column=column) 99 | 100 | @classmethod 101 | def build_from_dbt_catalog_column(cls, column: Mapping, node: Mapping) -> "Column": 102 | return cls(name=column["name"].lower(), node=node, artifact_column=column) 103 | 104 | 105 | class ColumnRegistry(BaseModel): 106 | """ 107 | A data structure to make it easy to work with dbt column representations. 108 | 109 | It has a simple internal data structure that maps column unique identifiers to their dbt representation, and it 110 | provides a convenience method to add or retrieve column definitions both from the manifest and the catalog. 111 | """ 112 | 113 | data: Dict[ColumnFqn, Column] = {} 114 | 115 | def add_or_retrieve(self, *, column_in_manifest: dict = None, column_in_catalog: dict = None, node) -> Column: 116 | if column_in_manifest: 117 | column = Column.build_from_dbt_manifest_column(column_in_manifest, node) 118 | elif column_in_catalog: 119 | column = Column.build_from_dbt_catalog_column(column_in_catalog, node) 120 | else: 121 | raise ValueError("Invalid column details") 122 | 123 | return self.data.setdefault(column.fqn, column) 124 | -------------------------------------------------------------------------------- /src/dbttoolkit/dbt_cloud/actions/retrieve_artifacts_time_interval.py: -------------------------------------------------------------------------------- 1 | """ 2 | https://docs.getdbt.com/dbt-cloud/api#section/How-to-use-this-API 3 | 4 | curl --request GET \ 5 | --url https://cloud.getdbt.com/api/v2/accounts/ \ 6 | --header 'Content-Type: application/json' \ 7 | --header 'Authorization: Token ' 8 | """ 9 | import json 10 | from datetime import datetime, timezone 11 | from pathlib import Path 12 | from typing import Mapping 13 | 14 | import requests 15 | import typer 16 | 17 | from dbttoolkit.dbt_cloud.actions._docs import HELP 18 | from dbttoolkit.dbt_cloud.clients.dbt_cloud_client import DbtCloudClient 19 | from dbttoolkit.dbt_cloud.models.dbt_artifact import DbtArtifact 20 | from dbttoolkit.utils.io import persist 21 | from dbttoolkit.utils.logger import get_logger 22 | 23 | typer_app = typer.Typer() 24 | logger = get_logger() 25 | 26 | RELEVANT_ARTIFACTS = [DbtArtifact.sources, DbtArtifact.run_results, DbtArtifact.manifest] 27 | 28 | 29 | def _process_run(client: DbtCloudClient, run: Mapping, output_folder: Path, bucket_name: str = None) -> None: 30 | """ 31 | Processes an individual run, by writing a run metadata file and looping through the run steps 32 | """ 33 | logger.info(f'Processing run {run["id"]} from {run["finished_at_humanized"]} ago') 34 | 35 | finished_at = datetime.fromisoformat(run["finished_at"]) 36 | 37 | folder_path = Path( 38 | output_folder, 39 | "date={}".format(finished_at.date().isoformat()), 40 | "hour={}".format(finished_at.strftime("%H")), 41 | "job_id={}".format(run["job_id"]), 42 | "run_id={}".format(run["id"]), 43 | ) 44 | 45 | persist(json.dumps(run, indent=2), folder_path, "_run.json", bucket_name=bucket_name) 46 | 47 | # Enumerate starting at 4 since dbt Cloud indexes steps starting at 1 48 | # and has 3 internal steps (clone, profile, dbt deps) 49 | for step_index, _ in enumerate(run["job"]["execute_steps"], 4): 50 | _process_step(client, run, step_index, folder_path, bucket_name) 51 | 52 | 53 | def _process_step( 54 | client: DbtCloudClient, run: Mapping, step_index: int, folder_path: Path, bucket_name: str = None 55 | ) -> None: 56 | """ 57 | Processes an individual run step, by writing the artifacts in the file system 58 | """ 59 | step_folder_path = Path(folder_path, f"step={step_index}") 60 | 61 | for artifact_enum in RELEVANT_ARTIFACTS: 62 | logger.info(f"Downloading {artifact_enum.name}") 63 | 64 | try: 65 | artifact = client.retrieve_artifact_from_run(run["id"], artifact_enum.value, step=step_index) 66 | filename = _generate_step_filename(artifact, artifact_enum) 67 | persist(json.dumps(artifact, indent=2), step_folder_path, filename, bucket_name=bucket_name) 68 | except requests.exceptions.HTTPError: 69 | logger.debug(f"Artifact not found: {artifact_enum.name}") 70 | 71 | 72 | def _generate_step_filename(artifact: Mapping, artifact_name: str): 73 | """ 74 | If this is a run result, we append which command generated it to the filename. 75 | This makes it easier for consumers that are interested in only run results from tests 76 | or only run results from models to only open the files they need. 77 | """ 78 | if artifact_name == DbtArtifact.run_results: 79 | command_executed = artifact["args"]["which"] 80 | return f"{command_executed}_{artifact_name}.json" 81 | 82 | # For all other artifacts, just use the original artifact_name 83 | return f"{artifact_name}.json" 84 | 85 | 86 | @typer_app.command("retrieve-artifacts-time-interval") 87 | def run( 88 | output_folder: Path = typer.Option(..., help=HELP["output_folder"]), 89 | gcs_bucket_name: str = typer.Option( 90 | None, 91 | help="if provided, it will write to a GCS bucket instead of in " 92 | "the local file system. Both `gcs_bucket_name` and `output_folder` " 93 | "can be provided, in case you want to add the files to a subfolder " 94 | "in the bucket.", 95 | ), 96 | start_time: datetime = typer.Option(..., help="the start time (inclusive), in UTC"), 97 | end_time: datetime = typer.Option(..., help="the end time (not inclusive), in UTC"), 98 | environment_id: int = typer.Option(..., envvar="DBT_CLOUD_ENVIRONMENT_ID", help=HELP["environment_id"]), 99 | account_id: int = typer.Option(..., envvar="DBT_CLOUD_ACCOUNT_ID", help=HELP["account_id"]), 100 | project_id: int = typer.Option(..., envvar="DBT_CLOUD_PROJECT_ID", help=HELP["project_id"]), 101 | token: str = typer.Option(..., envvar="DBT_CLOUD_TOKEN", help=HELP["token"]), 102 | ) -> None: 103 | """ 104 | Retrieves artifacts from all runs between start_time (inclusive) and end_time (not inclusive). 105 | """ 106 | start_time = start_time.replace(tzinfo=timezone.utc) 107 | end_time = end_time.replace(tzinfo=timezone.utc) 108 | 109 | client = DbtCloudClient(account_id=account_id, project_id=project_id, environment_id=environment_id, token=token) 110 | logger.info(f"Initializing dbt Cloud client for account {account_id}, project {project_id}") 111 | 112 | runs = client.retrieve_runs_finished_between(start_time, end_time) 113 | 114 | for run in runs: 115 | _process_run(client, run, output_folder, gcs_bucket_name) 116 | 117 | 118 | # Entry point for direct execution 119 | if __name__ == "__main__": 120 | typer_app() 121 | -------------------------------------------------------------------------------- /src/dbttoolkit/dbt_cloud/clients/dbt_cloud_client.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from datetime import datetime 3 | from typing import ClassVar, Dict, Iterable, List, Mapping, Optional 4 | 5 | import requests 6 | 7 | from dbttoolkit.utils.logger import get_logger 8 | 9 | logger = get_logger() 10 | 11 | STANDARD_PAGE_SIZE = 100 12 | 13 | 14 | @dataclass 15 | class DbtCloudClient: 16 | """ 17 | A wrapper around the dbt Cloud REST API 18 | """ 19 | 20 | account_id: int 21 | project_id: int 22 | environment_id: int 23 | token: str 24 | 25 | BASE_URL: ClassVar[str] = "https://cloud.getdbt.com/api/v2" 26 | 27 | def retrieve_runs_finished_between(self, start_time: datetime, end_time: datetime) -> List[Dict]: 28 | """ 29 | Retrieves all runs that finished between start_time (inclusive) and end_time (not inclusive). 30 | 31 | :param start_time: the start time 32 | :param end_time: the end time 33 | :return: a list of runs 34 | """ 35 | logger.info(f"Retrieving all production runs (between {start_time} and {end_time})") 36 | 37 | completed_runs = self.retrieve_completed_runs(created_after=start_time) 38 | runs = [ 39 | run 40 | for run in completed_runs 41 | if datetime.fromisoformat(run["finished_at"]) >= start_time 42 | and datetime.fromisoformat(run["finished_at"]) < end_time 43 | ] 44 | 45 | return runs 46 | 47 | def retrieve_most_recent_run_for_job(self, job_id: int, preferred_commit: str = None) -> Dict: 48 | """ 49 | Retrieves the most successful run from `job_id`. 50 | 51 | If a preferred_commit SHA is provided, first we try to find the latest run from that 52 | commit before falling back to the latest run overall (always respecting the job_id) 53 | 54 | :param job_id: the id of the job 55 | :param preferred_commit: an optional commit SHA 56 | :return: one individual run 57 | """ 58 | logger.info(f"Retrieving most recent run for job {job_id} (preferred commit: {preferred_commit})") 59 | 60 | completed_runs = self.retrieve_completed_runs() 61 | successful_runs = self.filter_successful_runs(completed_runs) 62 | successful_runs = [run for run in successful_runs if run["job_id"] == job_id] 63 | 64 | if preferred_commit: 65 | runs_from_preferred_commit = [run for run in successful_runs if run["git_sha"] == preferred_commit] 66 | 67 | if runs_from_preferred_commit: 68 | logger.info("Found run from preferred commit") 69 | return runs_from_preferred_commit[0] 70 | 71 | if not successful_runs: 72 | raise RuntimeError(f"No successful run found for job {job_id}") 73 | 74 | logger.info("Did not find run from preferred commit (or it was not provided). Picking the latest run instead.") 75 | return successful_runs[0] 76 | 77 | def retrieve_artifact_from_run(self, run_id: int, artifact_name: str, *, step: int = None) -> Dict: 78 | """ 79 | Returns the artifact from a given run 80 | 81 | :param run_id: the id the the run 82 | :param artifact_name: the name of the artifact 83 | :param step: step index, starting at 1 84 | :return: the parsed (dict) artifact 85 | """ 86 | 87 | params: Optional[Mapping] = None 88 | 89 | if step: 90 | params = {"step": step} 91 | 92 | response = requests.get( 93 | self.BASE_URL + f"/accounts/{self.account_id}/runs/{run_id}/artifacts/{artifact_name}.json", 94 | params=params, 95 | headers=self._default_headers(), 96 | ) 97 | response.raise_for_status() 98 | return response.json() 99 | 100 | def retrieve_completed_runs( 101 | self, *, page_size: int = STANDARD_PAGE_SIZE, created_after: datetime = None 102 | ) -> List[Dict]: 103 | """ 104 | Retrieves an arbitrary amount of recent completed runs 105 | 106 | :return: a list of runs 107 | """ 108 | runs = [] 109 | page = 0 110 | 111 | fetch_next_page = True 112 | 113 | while fetch_next_page: 114 | # A bit weird, but they do expect a string with an array inside 115 | params = { 116 | "order_by": "-id", 117 | "limit": str(page_size), 118 | "offset": str(page * page_size), 119 | "include_related": '["job", "environment", "trigger"]', 120 | } 121 | 122 | response = requests.get( 123 | self.BASE_URL + f"/accounts/{self.account_id}/runs", params=params, headers=self._default_headers() 124 | ) 125 | response.raise_for_status() 126 | 127 | data = response.json()["data"] 128 | runs += data 129 | 130 | # Pagination: if the last record on this page was created after the time given, check the next page 131 | last_created_at = datetime.fromisoformat(data[-1]["created_at"]) 132 | 133 | if created_after and last_created_at >= created_after: 134 | logger.info(f"Last result on page {page} ({last_created_at}) >= created after filter ({created_after})") 135 | page += 1 136 | fetch_next_page = True 137 | else: 138 | fetch_next_page = False 139 | 140 | completed_runs = [ 141 | run 142 | for run in runs 143 | if run["is_complete"] 144 | and not run["is_cancelled"] 145 | and run["project_id"] == self.project_id 146 | and run["environment_id"] == self.environment_id 147 | ] 148 | 149 | return completed_runs 150 | 151 | @staticmethod 152 | def filter_successful_runs(runs: Iterable[Dict]) -> List[Dict]: 153 | """ 154 | Retrieves an arbitrary amount of recent successful runs 155 | 156 | :return: a list of runs 157 | """ 158 | successful_runs = [run for run in runs if run["is_success"]] 159 | 160 | return successful_runs 161 | 162 | def _default_headers(self) -> Dict: 163 | """ 164 | The default headers for HTTP requests against the dbt Cloud API 165 | """ 166 | return {"Content-Type": "application/json", "Authorization": f"Token {self.token}"} 167 | -------------------------------------------------------------------------------- /src/dbttoolkit/documentation/actions/propagate.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Mapping, Optional 3 | 4 | import typer 5 | from rich import print 6 | 7 | from dbttoolkit.documentation.models.column import Column, ColumnRegistry 8 | from dbttoolkit.documentation.presentation.formatters import format_upstream_descriptions_to_human_readable 9 | from dbttoolkit.documentation.presentation.stats import calculate_and_print 10 | from dbttoolkit.utils.io import load_json_file, write_json_file 11 | from dbttoolkit.utils.logger import init_logger 12 | 13 | IGNORED_COLUMNS = ["id", "created_at", "updated_at", "_row_updated_at", "deleted_at"] 14 | 15 | typer_app = typer.Typer() 16 | logger = init_logger() 17 | 18 | 19 | def traverse_upstream(column: Column, manifest: Mapping, catalog: Mapping, registry: ColumnRegistry) -> None: 20 | """ 21 | Given our main column (expected to already have been added to the registry), traverse the project upstream 22 | looking for columns with the same name. If a match is found, make sure the upstream column is already 23 | in the catalog and register it as a dependency to the main column. 24 | 25 | :return: None. The results are stored in the column and registry objects. 26 | """ 27 | depends_on = column.node.get("depends_on", {}).get("nodes") 28 | 29 | if not depends_on: 30 | # No upstream dependencies, nothing to do 31 | return None 32 | 33 | # Sources are stored in a different place, but we want to process them together 34 | manifest_nodes = {**manifest["nodes"], **manifest["sources"]} 35 | catalog_nodes = {**catalog["nodes"], **catalog["sources"]} 36 | 37 | # Support for our "column renaming"/alias feature 38 | column_alias = column.artifact_column.get("meta", {}).get("original_name") 39 | column_name = column_alias or column.name 40 | 41 | for upstream_node_key in depends_on: 42 | # For every upstream node, look for it in the manifest and in the catalog 43 | manifest_node = manifest_nodes.get(upstream_node_key, {}) 44 | manifest_column_in_upstream_node = manifest_node.get("columns", {}).get(column_name.lower()) 45 | 46 | # Might not in the catalog (eg: ephemeral models), so the get logic needs fallbacks 47 | # TODO: Assuming everything is uppercase in the catalog is probably Snowflake specific. Better to normalize it 48 | # from both sides (ours and from the artifact) 49 | catalog_node = catalog_nodes.get(upstream_node_key, {}) 50 | catalog_column_in_upstream_node = catalog_node.get("columns", {}).get(column_name.upper()) 51 | 52 | # If it's available in any of the two, add (or retrieve) it to the catalog and 53 | # register it in the column as an upstream dependency 54 | if manifest_column_in_upstream_node or catalog_column_in_upstream_node: 55 | upstream_column = registry.add_or_retrieve( 56 | column_in_manifest=manifest_column_in_upstream_node, 57 | column_in_catalog=catalog_column_in_upstream_node, 58 | node=manifest_node, 59 | ) 60 | 61 | column.add_upstream_match(upstream_column) 62 | 63 | 64 | def traverse_artifacts(catalog: Mapping, manifest: Mapping, registry: ColumnRegistry) -> None: 65 | """ 66 | Traverse the catalog while using the manifest to look for information to populate the registry. 67 | 68 | * For every node in the catalog: 69 | * See if the node is available in the manifest 70 | * For every column in the catalog: 71 | * Check if the column is also represented in the manifest 72 | * Add both the catalog and manifest representation of that column on the registry 73 | * Look if any upstream model has the same column. That method is not recursive, but 74 | `traverse_artifacts` will navigate through all nodes, covering the entire project 75 | 76 | :return: None. The results are stored in the registry 77 | """ 78 | for node_key, node_in_catalog in catalog["nodes"].items(): 79 | node_in_manifest = manifest["nodes"][node_key] 80 | columns_in_catalog = node_in_catalog["columns"] 81 | 82 | for column_key, column_in_catalog in columns_in_catalog.items(): 83 | column_key = column_key.lower() 84 | column_in_manifest = node_in_manifest["columns"].get(column_key) 85 | 86 | column = registry.add_or_retrieve( 87 | column_in_manifest=column_in_manifest, column_in_catalog=column_in_catalog, node=node_in_manifest 88 | ) 89 | 90 | traverse_upstream(column, manifest, catalog, registry) 91 | 92 | 93 | def propagate_documentation_in_the_manifest(registry: ColumnRegistry, manifest: Mapping) -> None: 94 | for column in registry.data.values(): 95 | if column.description or not column.descriptions_from_upstream: 96 | # If the column is already documented or it is not, but there's no upstream documentation 97 | continue 98 | 99 | if column.name in IGNORED_COLUMNS: 100 | print(f"Ignoring column {column.name} in {column.node_id}") 101 | continue 102 | 103 | node_in_manifest = manifest["nodes"][column.node_id] 104 | 105 | print(f"[bold red]MISSING[/]: Column [bold]{column.node_id} → {column.name}[/] missing documentation") 106 | print( 107 | " => 🔎 [bold green]FOUND[/]: Found {} candidate: {}\n".format( 108 | len(column.descriptions_from_upstream.keys()), list(column.descriptions_from_upstream.keys()) 109 | ) 110 | ) 111 | 112 | formatted_description = format_upstream_descriptions_to_human_readable(column.descriptions_from_upstream) 113 | 114 | if column.name in node_in_manifest["columns"]: 115 | node_in_manifest["columns"][column.name]["description"] = formatted_description 116 | else: 117 | node_in_manifest["columns"][column.name] = dict( 118 | name=column.name, description=formatted_description, tags=["inherited-documentation"] 119 | ) 120 | 121 | 122 | @typer_app.command("propagate") 123 | def run( 124 | artifacts_folder: Path = typer.Option(..., help="The path to the artifacts folder to be used as input"), 125 | input_manifest_filename: Optional[str] = typer.Option("manifest.json", help="The name of the manifest file"), 126 | output_manifest_path: Optional[Path] = typer.Option( 127 | None, help="The full path and filename to the modified manifest file." 128 | ), 129 | ): 130 | """ 131 | If the output manifest path is not provided, the path to the input manifest is chosen as output and thus it is 132 | overwritten. 133 | """ 134 | # Read artifacts 135 | manifest_path = artifacts_folder / input_manifest_filename # type: ignore 136 | manifest = load_json_file(manifest_path) 137 | catalog = load_json_file(artifacts_folder / "catalog.json") 138 | 139 | # Create a data structure to hold all columns 140 | registry = ColumnRegistry() 141 | 142 | # Traverse the catalog and manifest and populate the registry 143 | traverse_artifacts(catalog, manifest, registry) 144 | 145 | # Use the registry to find which documentation can be propagated and write it back to the manifest 146 | propagate_documentation_in_the_manifest(registry, manifest) 147 | 148 | # Persist the modified manifest 149 | if output_manifest_path: 150 | output_path = output_manifest_path 151 | else: 152 | output_path = manifest_path 153 | 154 | write_json_file(manifest, output_path) 155 | 156 | # Calculate and print stats 157 | calculate_and_print(registry) 158 | 159 | 160 | # Entry point for direct execution 161 | if __name__ == "__main__": 162 | typer_app() 163 | -------------------------------------------------------------------------------- /tests/_fixtures/dbt_sample_project/target/catalog.json: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", 4 | "dbt_version": "0.21.0", 5 | "generated_at": "2022-01-31T16:06:47.212917Z", 6 | "invocation_id": "16bc053d-0df9-441e-a53f-346de08a94a1", 7 | "env": {} 8 | }, 9 | "nodes": { 10 | "model.dbt_sample_project.mart_user": { 11 | "metadata": { 12 | "type": "VIEW", 13 | "schema": "dev", 14 | "name": "mart_user", 15 | "database": "dbt_sample_project", 16 | "comment": null, 17 | "owner": "postgres" 18 | }, 19 | "columns": { 20 | "id": { 21 | "type": "character varying", 22 | "index": 1, 23 | "name": "id", 24 | "comment": null 25 | }, 26 | "name": { 27 | "type": "character varying", 28 | "index": 2, 29 | "name": "name", 30 | "comment": null 31 | }, 32 | "height_cm": { 33 | "type": "integer", 34 | "index": 3, 35 | "name": "height_cm", 36 | "comment": null 37 | }, 38 | "date_of_birth": { 39 | "type": "date", 40 | "index": 4, 41 | "name": "date_of_birth", 42 | "comment": null 43 | }, 44 | "city_id": { 45 | "type": "character varying", 46 | "index": 5, 47 | "name": "city_id", 48 | "comment": null 49 | } 50 | }, 51 | "stats": { 52 | "has_stats": { 53 | "id": "has_stats", 54 | "label": "Has Stats?", 55 | "value": false, 56 | "include": false, 57 | "description": "Indicates whether there are statistics for this table" 58 | } 59 | }, 60 | "unique_id": "model.dbt_sample_project.mart_user" 61 | }, 62 | "model.dbt_sample_project.mart_user_and_city": { 63 | "metadata": { 64 | "type": "VIEW", 65 | "schema": "dev", 66 | "name": "mart_user_and_city", 67 | "database": "dbt_sample_project", 68 | "comment": null, 69 | "owner": "postgres" 70 | }, 71 | "columns": { 72 | "user_id": { 73 | "type": "character varying", 74 | "index": 1, 75 | "name": "user_id", 76 | "comment": null 77 | }, 78 | "name": { 79 | "type": "character varying", 80 | "index": 2, 81 | "name": "name", 82 | "comment": null 83 | }, 84 | "city_id": { 85 | "type": "character varying", 86 | "index": 3, 87 | "name": "city_id", 88 | "comment": null 89 | } 90 | }, 91 | "stats": { 92 | "has_stats": { 93 | "id": "has_stats", 94 | "label": "Has Stats?", 95 | "value": false, 96 | "include": false, 97 | "description": "Indicates whether there are statistics for this table" 98 | } 99 | }, 100 | "unique_id": "model.dbt_sample_project.mart_user_and_city" 101 | }, 102 | "model.dbt_sample_project.stg_city": { 103 | "metadata": { 104 | "type": "VIEW", 105 | "schema": "dev", 106 | "name": "stg_city", 107 | "database": "dbt_sample_project", 108 | "comment": null, 109 | "owner": "postgres" 110 | }, 111 | "columns": { 112 | "id": { 113 | "type": "character varying", 114 | "index": 1, 115 | "name": "id", 116 | "comment": null 117 | }, 118 | "name": { 119 | "type": "character varying", 120 | "index": 2, 121 | "name": "name", 122 | "comment": null 123 | }, 124 | "population": { 125 | "type": "integer", 126 | "index": 3, 127 | "name": "population", 128 | "comment": null 129 | } 130 | }, 131 | "stats": { 132 | "has_stats": { 133 | "id": "has_stats", 134 | "label": "Has Stats?", 135 | "value": false, 136 | "include": false, 137 | "description": "Indicates whether there are statistics for this table" 138 | } 139 | }, 140 | "unique_id": "model.dbt_sample_project.stg_city" 141 | }, 142 | "model.dbt_sample_project.stg_user": { 143 | "metadata": { 144 | "type": "VIEW", 145 | "schema": "dev", 146 | "name": "stg_user", 147 | "database": "dbt_sample_project", 148 | "comment": null, 149 | "owner": "postgres" 150 | }, 151 | "columns": { 152 | "id": { 153 | "type": "character varying", 154 | "index": 1, 155 | "name": "id", 156 | "comment": null 157 | }, 158 | "name": { 159 | "type": "character varying", 160 | "index": 2, 161 | "name": "name", 162 | "comment": null 163 | }, 164 | "height_cm": { 165 | "type": "integer", 166 | "index": 3, 167 | "name": "height_cm", 168 | "comment": null 169 | }, 170 | "date_of_birth": { 171 | "type": "date", 172 | "index": 4, 173 | "name": "date_of_birth", 174 | "comment": null 175 | }, 176 | "city_id": { 177 | "type": "character varying", 178 | "index": 5, 179 | "name": "city_id", 180 | "comment": null 181 | } 182 | }, 183 | "stats": { 184 | "has_stats": { 185 | "id": "has_stats", 186 | "label": "Has Stats?", 187 | "value": false, 188 | "include": false, 189 | "description": "Indicates whether there are statistics for this table" 190 | } 191 | }, 192 | "unique_id": "model.dbt_sample_project.stg_user" 193 | } 194 | }, 195 | "sources": { 196 | "source.dbt_sample_project.raw.city": { 197 | "metadata": { 198 | "type": "BASE TABLE", 199 | "schema": "raw", 200 | "name": "city", 201 | "database": "dbt_sample_project", 202 | "comment": null, 203 | "owner": "postgres" 204 | }, 205 | "columns": { 206 | "id": { 207 | "type": "character varying", 208 | "index": 1, 209 | "name": "id", 210 | "comment": null 211 | }, 212 | "name": { 213 | "type": "character varying", 214 | "index": 2, 215 | "name": "name", 216 | "comment": null 217 | }, 218 | "population": { 219 | "type": "integer", 220 | "index": 3, 221 | "name": "population", 222 | "comment": null 223 | } 224 | }, 225 | "stats": { 226 | "has_stats": { 227 | "id": "has_stats", 228 | "label": "Has Stats?", 229 | "value": false, 230 | "include": false, 231 | "description": "Indicates whether there are statistics for this table" 232 | } 233 | }, 234 | "unique_id": "source.dbt_sample_project.raw.city" 235 | }, 236 | "source.dbt_sample_project.raw.user": { 237 | "metadata": { 238 | "type": "BASE TABLE", 239 | "schema": "raw", 240 | "name": "user", 241 | "database": "dbt_sample_project", 242 | "comment": null, 243 | "owner": "postgres" 244 | }, 245 | "columns": { 246 | "id": { 247 | "type": "character varying", 248 | "index": 1, 249 | "name": "id", 250 | "comment": null 251 | }, 252 | "name": { 253 | "type": "character varying", 254 | "index": 2, 255 | "name": "name", 256 | "comment": null 257 | }, 258 | "height_cm": { 259 | "type": "integer", 260 | "index": 3, 261 | "name": "height_cm", 262 | "comment": null 263 | }, 264 | "date_of_birth": { 265 | "type": "date", 266 | "index": 4, 267 | "name": "date_of_birth", 268 | "comment": null 269 | }, 270 | "city_id": { 271 | "type": "character varying", 272 | "index": 5, 273 | "name": "city_id", 274 | "comment": null 275 | } 276 | }, 277 | "stats": { 278 | "has_stats": { 279 | "id": "has_stats", 280 | "label": "Has Stats?", 281 | "value": false, 282 | "include": false, 283 | "description": "Indicates whether there are statistics for this table" 284 | } 285 | }, 286 | "unique_id": "source.dbt_sample_project.raw.user" 287 | } 288 | }, 289 | "errors": null 290 | } 291 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "bb1fadcfb1fc208a26469f60ef56b3a4f182d8850f7b3d34c01e0bf5514e338a" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.8" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "cachetools": { 20 | "hashes": [ 21 | "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590", 22 | "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b" 23 | ], 24 | "markers": "python_version >= '3.7'", 25 | "version": "==5.3.1" 26 | }, 27 | "certifi": { 28 | "hashes": [ 29 | "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082", 30 | "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9" 31 | ], 32 | "markers": "python_version >= '3.6'", 33 | "version": "==2023.7.22" 34 | }, 35 | "charset-normalizer": { 36 | "hashes": [ 37 | "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843", 38 | "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786", 39 | "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e", 40 | "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8", 41 | "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4", 42 | "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa", 43 | "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d", 44 | "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82", 45 | "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7", 46 | "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895", 47 | "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d", 48 | "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a", 49 | "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382", 50 | "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678", 51 | "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b", 52 | "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e", 53 | "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741", 54 | "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4", 55 | "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596", 56 | "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9", 57 | "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69", 58 | "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c", 59 | "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77", 60 | "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13", 61 | "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459", 62 | "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e", 63 | "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7", 64 | "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908", 65 | "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a", 66 | "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f", 67 | "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8", 68 | "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482", 69 | "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d", 70 | "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d", 71 | "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545", 72 | "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34", 73 | "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86", 74 | "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6", 75 | "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe", 76 | "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e", 77 | "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc", 78 | "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7", 79 | "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd", 80 | "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c", 81 | "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557", 82 | "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a", 83 | "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89", 84 | "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078", 85 | "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e", 86 | "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4", 87 | "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403", 88 | "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0", 89 | "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89", 90 | "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115", 91 | "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9", 92 | "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05", 93 | "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a", 94 | "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec", 95 | "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56", 96 | "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38", 97 | "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479", 98 | "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c", 99 | "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e", 100 | "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd", 101 | "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186", 102 | "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455", 103 | "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c", 104 | "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65", 105 | "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78", 106 | "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287", 107 | "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df", 108 | "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43", 109 | "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1", 110 | "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7", 111 | "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989", 112 | "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a", 113 | "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63", 114 | "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884", 115 | "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649", 116 | "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810", 117 | "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828", 118 | "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4", 119 | "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2", 120 | "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd", 121 | "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5", 122 | "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe", 123 | "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293", 124 | "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e", 125 | "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e", 126 | "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8" 127 | ], 128 | "markers": "python_full_version >= '3.7.0'", 129 | "version": "==3.3.0" 130 | }, 131 | "click": { 132 | "hashes": [ 133 | "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", 134 | "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de" 135 | ], 136 | "markers": "python_version >= '3.7'", 137 | "version": "==8.1.7" 138 | }, 139 | "google-api-core": { 140 | "hashes": [ 141 | "sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553", 142 | "sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160" 143 | ], 144 | "markers": "python_version >= '3.7'", 145 | "version": "==2.12.0" 146 | }, 147 | "google-auth": { 148 | "hashes": [ 149 | "sha256:5a9af4be520ba33651471a0264eead312521566f44631cbb621164bc30c8fd40", 150 | "sha256:c2e253347579d483004f17c3bd0bf92e611ef6c7ba24d41c5c59f2e7aeeaf088" 151 | ], 152 | "markers": "python_version >= '3.7'", 153 | "version": "==2.23.2" 154 | }, 155 | "google-cloud-core": { 156 | "hashes": [ 157 | "sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb", 158 | "sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863" 159 | ], 160 | "markers": "python_version >= '3.7'", 161 | "version": "==2.3.3" 162 | }, 163 | "google-cloud-storage": { 164 | "hashes": [ 165 | "sha256:5fe26f1381b30e3cc328f46e13531ca8525458f870c1e303c616bdeb7b7f5c66", 166 | "sha256:973e7f7d9afcd4805769b6ea9ac15ab9df7037530850374f1494b5a2c8f65b6b" 167 | ], 168 | "index": "pypi", 169 | "version": "==2.4.0" 170 | }, 171 | "google-crc32c": { 172 | "hashes": [ 173 | "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a", 174 | "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876", 175 | "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c", 176 | "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289", 177 | "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298", 178 | "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02", 179 | "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f", 180 | "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2", 181 | "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a", 182 | "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb", 183 | "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210", 184 | "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5", 185 | "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee", 186 | "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c", 187 | "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a", 188 | "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314", 189 | "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd", 190 | "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65", 191 | "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37", 192 | "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4", 193 | "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13", 194 | "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894", 195 | "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31", 196 | "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e", 197 | "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709", 198 | "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740", 199 | "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc", 200 | "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d", 201 | "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c", 202 | "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c", 203 | "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d", 204 | "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906", 205 | "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61", 206 | "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57", 207 | "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c", 208 | "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a", 209 | "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438", 210 | "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946", 211 | "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7", 212 | "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96", 213 | "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091", 214 | "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae", 215 | "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d", 216 | "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88", 217 | "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2", 218 | "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd", 219 | "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541", 220 | "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728", 221 | "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178", 222 | "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968", 223 | "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346", 224 | "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8", 225 | "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93", 226 | "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7", 227 | "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273", 228 | "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462", 229 | "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94", 230 | "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd", 231 | "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e", 232 | "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57", 233 | "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b", 234 | "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9", 235 | "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a", 236 | "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100", 237 | "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325", 238 | "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183", 239 | "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556", 240 | "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4" 241 | ], 242 | "markers": "python_version >= '3.7'", 243 | "version": "==1.5.0" 244 | }, 245 | "google-resumable-media": { 246 | "hashes": [ 247 | "sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7", 248 | "sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b" 249 | ], 250 | "markers": "python_version >= '3.7'", 251 | "version": "==2.6.0" 252 | }, 253 | "googleapis-common-protos": { 254 | "hashes": [ 255 | "sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918", 256 | "sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708" 257 | ], 258 | "markers": "python_version >= '3.7'", 259 | "version": "==1.60.0" 260 | }, 261 | "idna": { 262 | "hashes": [ 263 | "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", 264 | "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" 265 | ], 266 | "markers": "python_version >= '3.5'", 267 | "version": "==3.4" 268 | }, 269 | "markdown-it-py": { 270 | "hashes": [ 271 | "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", 272 | "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb" 273 | ], 274 | "markers": "python_version >= '3.8'", 275 | "version": "==3.0.0" 276 | }, 277 | "mdurl": { 278 | "hashes": [ 279 | "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", 280 | "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba" 281 | ], 282 | "markers": "python_version >= '3.7'", 283 | "version": "==0.1.2" 284 | }, 285 | "protobuf": { 286 | "hashes": [ 287 | "sha256:02212557a76cd99574775a81fefeba8738d0f668d6abd0c6b1d3adcc75503dbe", 288 | "sha256:1badab72aa8a3a2b812eacfede5020472e16c6b2212d737cefd685884c191085", 289 | "sha256:2fa3886dfaae6b4c5ed2730d3bf47c7a38a72b3a1f0acb4d4caf68e6874b947b", 290 | "sha256:5a70731910cd9104762161719c3d883c960151eea077134458503723b60e3667", 291 | "sha256:6b7d2e1c753715dcfe9d284a25a52d67818dd43c4932574307daf836f0071e37", 292 | "sha256:80797ce7424f8c8d2f2547e2d42bfbb6c08230ce5832d6c099a37335c9c90a92", 293 | "sha256:8e61a27f362369c2f33248a0ff6896c20dcd47b5d48239cb9720134bef6082e4", 294 | "sha256:9fee5e8aa20ef1b84123bb9232b3f4a5114d9897ed89b4b8142d81924e05d79b", 295 | "sha256:b493cb590960ff863743b9ff1452c413c2ee12b782f48beca77c8da3e2ffe9d9", 296 | "sha256:b77272f3e28bb416e2071186cb39efd4abbf696d682cbb5dc731308ad37fa6dd", 297 | "sha256:bffa46ad9612e6779d0e51ae586fde768339b791a50610d85eb162daeb23661e", 298 | "sha256:dbbed8a56e56cee8d9d522ce844a1379a72a70f453bde6243e3c86c30c2a3d46", 299 | "sha256:ec9912d5cb6714a5710e28e592ee1093d68c5ebfeda61983b3f40331da0b1ebb" 300 | ], 301 | "markers": "python_version >= '3.7'", 302 | "version": "==4.24.4" 303 | }, 304 | "pyasn1": { 305 | "hashes": [ 306 | "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57", 307 | "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde" 308 | ], 309 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", 310 | "version": "==0.5.0" 311 | }, 312 | "pyasn1-modules": { 313 | "hashes": [ 314 | "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c", 315 | "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d" 316 | ], 317 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", 318 | "version": "==0.3.0" 319 | }, 320 | "pydantic": { 321 | "hashes": [ 322 | "sha256:1061c6ee6204f4f5a27133126854948e3b3d51fcc16ead2e5d04378c199b2f44", 323 | "sha256:19b5686387ea0d1ea52ecc4cffb71abb21702c5e5b2ac626fd4dbaa0834aa49d", 324 | "sha256:2bd446bdb7755c3a94e56d7bdfd3ee92396070efa8ef3a34fab9579fe6aa1d84", 325 | "sha256:328558c9f2eed77bd8fffad3cef39dbbe3edc7044517f4625a769d45d4cf7555", 326 | "sha256:32e0b4fb13ad4db4058a7c3c80e2569adbd810c25e6ca3bbd8b2a9cc2cc871d7", 327 | "sha256:3ee0d69b2a5b341fc7927e92cae7ddcfd95e624dfc4870b32a85568bd65e6131", 328 | "sha256:4aafd4e55e8ad5bd1b19572ea2df546ccace7945853832bb99422a79c70ce9b8", 329 | "sha256:4b3946f87e5cef3ba2e7bd3a4eb5a20385fe36521d6cc1ebf3c08a6697c6cfb3", 330 | "sha256:4de71c718c9756d679420c69f216776c2e977459f77e8f679a4a961dc7304a56", 331 | "sha256:5565a49effe38d51882cb7bac18bda013cdb34d80ac336428e8908f0b72499b0", 332 | "sha256:5803ad846cdd1ed0d97eb00292b870c29c1f03732a010e66908ff48a762f20e4", 333 | "sha256:5da164119602212a3fe7e3bc08911a89db4710ae51444b4224c2382fd09ad453", 334 | "sha256:615661bfc37e82ac677543704437ff737418e4ea04bef9cf11c6d27346606044", 335 | "sha256:78a4d6bdfd116a559aeec9a4cfe77dda62acc6233f8b56a716edad2651023e5e", 336 | "sha256:7d0f183b305629765910eaad707800d2f47c6ac5bcfb8c6397abdc30b69eeb15", 337 | "sha256:7ead3cd020d526f75b4188e0a8d71c0dbbe1b4b6b5dc0ea775a93aca16256aeb", 338 | "sha256:84d76ecc908d917f4684b354a39fd885d69dd0491be175f3465fe4b59811c001", 339 | "sha256:8cb0bc509bfb71305d7a59d00163d5f9fc4530f0881ea32c74ff4f74c85f3d3d", 340 | "sha256:91089b2e281713f3893cd01d8e576771cd5bfdfbff5d0ed95969f47ef6d676c3", 341 | "sha256:9c9e04a6cdb7a363d7cb3ccf0efea51e0abb48e180c0d31dca8d247967d85c6e", 342 | "sha256:a8c5360a0297a713b4123608a7909e6869e1b56d0e96eb0d792c27585d40757f", 343 | "sha256:afacf6d2a41ed91fc631bade88b1d319c51ab5418870802cedb590b709c5ae3c", 344 | "sha256:b34ba24f3e2d0b39b43f0ca62008f7ba962cff51efa56e64ee25c4af6eed987b", 345 | "sha256:bd67cb2c2d9602ad159389c29e4ca964b86fa2f35c2faef54c3eb28b4efd36c8", 346 | "sha256:c0f5e142ef8217019e3eef6ae1b6b55f09a7a15972958d44fbd228214cede567", 347 | "sha256:cdb4272678db803ddf94caa4f94f8672e9a46bae4a44f167095e4d06fec12979", 348 | "sha256:d70916235d478404a3fa8c997b003b5f33aeac4686ac1baa767234a0f8ac2326", 349 | "sha256:d8ce3fb0841763a89322ea0432f1f59a2d3feae07a63ea2c958b2315e1ae8adb", 350 | "sha256:e0b214e57623a535936005797567231a12d0da0c29711eb3514bc2b3cd008d0f", 351 | "sha256:e631c70c9280e3129f071635b81207cad85e6c08e253539467e4ead0e5b219aa", 352 | "sha256:e78578f0c7481c850d1c969aca9a65405887003484d24f6110458fb02cca7747", 353 | "sha256:f0ca86b525264daa5f6b192f216a0d1e860b7383e3da1c65a1908f9c02f42801", 354 | "sha256:f1a68f4f65a9ee64b6ccccb5bf7e17db07caebd2730109cb8a95863cfa9c4e55", 355 | "sha256:fafe841be1103f340a24977f61dee76172e4ae5f647ab9e7fd1e1fca51524f08", 356 | "sha256:ff68fc85355532ea77559ede81f35fff79a6a5543477e168ab3a381887caea76" 357 | ], 358 | "index": "pypi", 359 | "version": "==1.9.2" 360 | }, 361 | "pygments": { 362 | "hashes": [ 363 | "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692", 364 | "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29" 365 | ], 366 | "markers": "python_version >= '3.7'", 367 | "version": "==2.16.1" 368 | }, 369 | "requests": { 370 | "hashes": [ 371 | "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa", 372 | "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf" 373 | ], 374 | "index": "pypi", 375 | "version": "==2.28.2" 376 | }, 377 | "rich": { 378 | "hashes": [ 379 | "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245", 380 | "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef" 381 | ], 382 | "index": "pypi", 383 | "version": "==13.6.0" 384 | }, 385 | "rsa": { 386 | "hashes": [ 387 | "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7", 388 | "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21" 389 | ], 390 | "markers": "python_version >= '3.6' and python_version < '4'", 391 | "version": "==4.9" 392 | }, 393 | "typer": { 394 | "hashes": [ 395 | "sha256:023bae00d1baf358a6cc7cea45851639360bb716de687b42b0a4641cd99173f1", 396 | "sha256:b8261c6c0152dd73478b5ba96ba677e5d6948c715c310f7c91079f311f62ec03" 397 | ], 398 | "index": "pypi", 399 | "version": "==0.4.2" 400 | }, 401 | "typing-extensions": { 402 | "hashes": [ 403 | "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0", 404 | "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef" 405 | ], 406 | "markers": "python_version >= '3.8'", 407 | "version": "==4.8.0" 408 | }, 409 | "urllib3": { 410 | "hashes": [ 411 | "sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21", 412 | "sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b" 413 | ], 414 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", 415 | "version": "==1.26.17" 416 | } 417 | }, 418 | "develop": { 419 | "attrs": { 420 | "hashes": [ 421 | "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04", 422 | "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015" 423 | ], 424 | "markers": "python_version >= '3.7'", 425 | "version": "==23.1.0" 426 | }, 427 | "black": { 428 | "hashes": [ 429 | "sha256:074458dc2f6e0d3dab7928d4417bb6957bb834434516f21514138437accdbe90", 430 | "sha256:187d96c5e713f441a5829e77120c269b6514418f4513a390b0499b0987f2ff1c", 431 | "sha256:2ea29072e954a4d55a2ff58971b83365eba5d3d357352a07a7a4df0d95f51c78", 432 | "sha256:4af5bc0e1f96be5ae9bd7aaec219c901a94d6caa2484c21983d043371c733fc4", 433 | "sha256:560558527e52ce8afba936fcce93a7411ab40c7d5fe8c2463e279e843c0328ee", 434 | "sha256:568ac3c465b1c8b34b61cd7a4e349e93f91abf0f9371eda1cf87194663ab684e", 435 | "sha256:6797f58943fceb1c461fb572edbe828d811e719c24e03375fd25170ada53825e", 436 | "sha256:6c1734ab264b8f7929cef8ae5f900b85d579e6cbfde09d7387da8f04771b51c6", 437 | "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9", 438 | "sha256:7ba9be198ecca5031cd78745780d65a3f75a34b2ff9be5837045dce55db83d1c", 439 | "sha256:94783f636bca89f11eb5d50437e8e17fbc6a929a628d82304c80fa9cd945f256", 440 | "sha256:a218d7e5856f91d20f04e931b6f16d15356db1c846ee55f01bac297a705ca24f", 441 | "sha256:a3db5b6409b96d9bd543323b23ef32a1a2b06416d525d27e0f67e74f1446c8f2", 442 | "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c", 443 | "sha256:b154e6bbde1e79ea3260c4b40c0b7b3109ffcdf7bc4ebf8859169a6af72cd70b", 444 | "sha256:b270a168d69edb8b7ed32c193ef10fd27844e5c60852039599f9184460ce0807", 445 | "sha256:b9fd45787ba8aa3f5e0a0a98920c1012c884622c6c920dbe98dbd05bc7c70fbf", 446 | "sha256:c85928b9d5f83b23cee7d0efcb310172412fbf7cb9d9ce963bd67fd141781def", 447 | "sha256:c9a3ac16efe9ec7d7381ddebcc022119794872abce99475345c5a61aa18c45ad", 448 | "sha256:cfaf3895a9634e882bf9d2363fed5af8888802d670f58b279b0bece00e9a872d", 449 | "sha256:e439798f819d49ba1c0bd9664427a05aab79bfba777a6db94fd4e56fae0cb849", 450 | "sha256:f586c26118bc6e714ec58c09df0157fe2d9ee195c764f630eb0d8e7ccce72e69", 451 | "sha256:f6fe02afde060bbeef044af7996f335fbe90b039ccf3f5eb8f16df8b20f77666" 452 | ], 453 | "index": "pypi", 454 | "version": "== 22.6.0" 455 | }, 456 | "certifi": { 457 | "hashes": [ 458 | "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082", 459 | "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9" 460 | ], 461 | "markers": "python_version >= '3.6'", 462 | "version": "==2023.7.22" 463 | }, 464 | "charset-normalizer": { 465 | "hashes": [ 466 | "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843", 467 | "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786", 468 | "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e", 469 | "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8", 470 | "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4", 471 | "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa", 472 | "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d", 473 | "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82", 474 | "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7", 475 | "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895", 476 | "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d", 477 | "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a", 478 | "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382", 479 | "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678", 480 | "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b", 481 | "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e", 482 | "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741", 483 | "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4", 484 | "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596", 485 | "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9", 486 | "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69", 487 | "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c", 488 | "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77", 489 | "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13", 490 | "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459", 491 | "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e", 492 | "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7", 493 | "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908", 494 | "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a", 495 | "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f", 496 | "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8", 497 | "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482", 498 | "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d", 499 | "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d", 500 | "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545", 501 | "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34", 502 | "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86", 503 | "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6", 504 | "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe", 505 | "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e", 506 | "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc", 507 | "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7", 508 | "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd", 509 | "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c", 510 | "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557", 511 | "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a", 512 | "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89", 513 | "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078", 514 | "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e", 515 | "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4", 516 | "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403", 517 | "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0", 518 | "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89", 519 | "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115", 520 | "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9", 521 | "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05", 522 | "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a", 523 | "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec", 524 | "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56", 525 | "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38", 526 | "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479", 527 | "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c", 528 | "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e", 529 | "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd", 530 | "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186", 531 | "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455", 532 | "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c", 533 | "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65", 534 | "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78", 535 | "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287", 536 | "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df", 537 | "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43", 538 | "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1", 539 | "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7", 540 | "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989", 541 | "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a", 542 | "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63", 543 | "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884", 544 | "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649", 545 | "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810", 546 | "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828", 547 | "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4", 548 | "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2", 549 | "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd", 550 | "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5", 551 | "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe", 552 | "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293", 553 | "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e", 554 | "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e", 555 | "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8" 556 | ], 557 | "markers": "python_full_version >= '3.7.0'", 558 | "version": "==3.3.0" 559 | }, 560 | "click": { 561 | "hashes": [ 562 | "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", 563 | "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de" 564 | ], 565 | "markers": "python_version >= '3.7'", 566 | "version": "==8.1.7" 567 | }, 568 | "coverage": { 569 | "extras": [ 570 | "toml" 571 | ], 572 | "hashes": [ 573 | "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1", 574 | "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63", 575 | "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9", 576 | "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312", 577 | "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3", 578 | "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb", 579 | "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25", 580 | "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92", 581 | "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda", 582 | "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148", 583 | "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6", 584 | "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216", 585 | "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a", 586 | "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640", 587 | "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836", 588 | "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c", 589 | "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f", 590 | "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2", 591 | "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901", 592 | "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed", 593 | "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a", 594 | "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074", 595 | "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc", 596 | "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84", 597 | "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083", 598 | "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f", 599 | "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c", 600 | "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c", 601 | "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637", 602 | "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2", 603 | "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82", 604 | "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f", 605 | "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce", 606 | "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef", 607 | "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f", 608 | "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611", 609 | "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c", 610 | "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76", 611 | "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9", 612 | "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce", 613 | "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9", 614 | "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf", 615 | "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf", 616 | "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9", 617 | "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6", 618 | "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2", 619 | "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a", 620 | "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a", 621 | "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf", 622 | "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738", 623 | "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a", 624 | "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4" 625 | ], 626 | "markers": "python_version >= '3.8'", 627 | "version": "==7.3.2" 628 | }, 629 | "flake8": { 630 | "hashes": [ 631 | "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d", 632 | "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d" 633 | ], 634 | "index": "pypi", 635 | "version": "==4.0.1" 636 | }, 637 | "idna": { 638 | "hashes": [ 639 | "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", 640 | "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" 641 | ], 642 | "markers": "python_version >= '3.5'", 643 | "version": "==3.4" 644 | }, 645 | "iniconfig": { 646 | "hashes": [ 647 | "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", 648 | "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" 649 | ], 650 | "markers": "python_version >= '3.7'", 651 | "version": "==2.0.0" 652 | }, 653 | "isort": { 654 | "hashes": [ 655 | "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7", 656 | "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951" 657 | ], 658 | "index": "pypi", 659 | "version": "==5.10.1" 660 | }, 661 | "mccabe": { 662 | "hashes": [ 663 | "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", 664 | "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" 665 | ], 666 | "version": "==0.6.1" 667 | }, 668 | "mypy": { 669 | "hashes": [ 670 | "sha256:006be38474216b833eca29ff6b73e143386f352e10e9c2fbe76aa8549e5554f5", 671 | "sha256:03c6cc893e7563e7b2949b969e63f02c000b32502a1b4d1314cabe391aa87d66", 672 | "sha256:0e9f70df36405c25cc530a86eeda1e0867863d9471fe76d1273c783df3d35c2e", 673 | "sha256:1ece702f29270ec6af25db8cf6185c04c02311c6bb21a69f423d40e527b75c56", 674 | "sha256:3e09f1f983a71d0672bbc97ae33ee3709d10c779beb613febc36805a6e28bb4e", 675 | "sha256:439c726a3b3da7ca84a0199a8ab444cd8896d95012c4a6c4a0d808e3147abf5d", 676 | "sha256:5a0b53747f713f490affdceef835d8f0cb7285187a6a44c33821b6d1f46ed813", 677 | "sha256:5f1332964963d4832a94bebc10f13d3279be3ce8f6c64da563d6ee6e2eeda932", 678 | "sha256:63e85a03770ebf403291ec50097954cc5caf2a9205c888ce3a61bd3f82e17569", 679 | "sha256:64759a273d590040a592e0f4186539858c948302c653c2eac840c7a3cd29e51b", 680 | "sha256:697540876638ce349b01b6786bc6094ccdaba88af446a9abb967293ce6eaa2b0", 681 | "sha256:9940e6916ed9371809b35b2154baf1f684acba935cd09928952310fbddaba648", 682 | "sha256:9f5f5a74085d9a81a1f9c78081d60a0040c3efb3f28e5c9912b900adf59a16e6", 683 | "sha256:a5ea0875a049de1b63b972456542f04643daf320d27dc592d7c3d9cd5d9bf950", 684 | "sha256:b117650592e1782819829605a193360a08aa99f1fc23d1d71e1a75a142dc7e15", 685 | "sha256:b24be97351084b11582fef18d79004b3e4db572219deee0212078f7cf6352723", 686 | "sha256:b88f784e9e35dcaa075519096dc947a388319cb86811b6af621e3523980f1c8a", 687 | "sha256:bdd5ca340beffb8c44cb9dc26697628d1b88c6bddf5c2f6eb308c46f269bb6f3", 688 | "sha256:d5aaf1edaa7692490f72bdb9fbd941fbf2e201713523bdb3f4038be0af8846c6", 689 | "sha256:e999229b9f3198c0c880d5e269f9f8129c8862451ce53a011326cad38b9ccd24", 690 | "sha256:f4a21d01fc0ba4e31d82f0fff195682e29f9401a8bdb7173891070eb260aeb3b", 691 | "sha256:f4b794db44168a4fc886e3450201365c9526a522c46ba089b55e1f11c163750d", 692 | "sha256:f730d56cb924d371c26b8eaddeea3cc07d78ff51c521c6d04899ac6904b75492" 693 | ], 694 | "index": "pypi", 695 | "version": "== 0.961" 696 | }, 697 | "mypy-extensions": { 698 | "hashes": [ 699 | "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", 700 | "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782" 701 | ], 702 | "markers": "python_version >= '3.5'", 703 | "version": "==1.0.0" 704 | }, 705 | "packaging": { 706 | "hashes": [ 707 | "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5", 708 | "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7" 709 | ], 710 | "markers": "python_version >= '3.7'", 711 | "version": "==23.2" 712 | }, 713 | "pathspec": { 714 | "hashes": [ 715 | "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20", 716 | "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3" 717 | ], 718 | "markers": "python_version >= '3.7'", 719 | "version": "==0.11.2" 720 | }, 721 | "platformdirs": { 722 | "hashes": [ 723 | "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3", 724 | "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e" 725 | ], 726 | "markers": "python_version >= '3.7'", 727 | "version": "==3.11.0" 728 | }, 729 | "pluggy": { 730 | "hashes": [ 731 | "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12", 732 | "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7" 733 | ], 734 | "markers": "python_version >= '3.8'", 735 | "version": "==1.3.0" 736 | }, 737 | "py": { 738 | "hashes": [ 739 | "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", 740 | "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378" 741 | ], 742 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 743 | "version": "==1.11.0" 744 | }, 745 | "pycodestyle": { 746 | "hashes": [ 747 | "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20", 748 | "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f" 749 | ], 750 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 751 | "version": "==2.8.0" 752 | }, 753 | "pyflakes": { 754 | "hashes": [ 755 | "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c", 756 | "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e" 757 | ], 758 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 759 | "version": "==2.4.0" 760 | }, 761 | "pytest": { 762 | "hashes": [ 763 | "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7", 764 | "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39" 765 | ], 766 | "index": "pypi", 767 | "version": "==7.1.3" 768 | }, 769 | "pytest-cov": { 770 | "hashes": [ 771 | "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6", 772 | "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470" 773 | ], 774 | "index": "pypi", 775 | "version": "==3.0.0" 776 | }, 777 | "pyyaml": { 778 | "hashes": [ 779 | "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5", 780 | "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc", 781 | "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df", 782 | "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741", 783 | "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206", 784 | "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27", 785 | "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595", 786 | "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62", 787 | "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98", 788 | "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696", 789 | "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290", 790 | "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9", 791 | "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d", 792 | "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6", 793 | "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867", 794 | "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47", 795 | "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486", 796 | "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6", 797 | "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3", 798 | "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007", 799 | "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938", 800 | "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0", 801 | "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c", 802 | "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735", 803 | "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d", 804 | "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28", 805 | "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4", 806 | "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba", 807 | "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8", 808 | "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5", 809 | "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd", 810 | "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3", 811 | "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0", 812 | "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515", 813 | "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c", 814 | "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c", 815 | "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924", 816 | "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34", 817 | "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43", 818 | "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859", 819 | "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673", 820 | "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54", 821 | "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a", 822 | "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b", 823 | "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab", 824 | "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa", 825 | "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c", 826 | "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585", 827 | "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d", 828 | "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f" 829 | ], 830 | "markers": "python_version >= '3.6'", 831 | "version": "==6.0.1" 832 | }, 833 | "requests": { 834 | "hashes": [ 835 | "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa", 836 | "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf" 837 | ], 838 | "index": "pypi", 839 | "version": "==2.28.2" 840 | }, 841 | "responses": { 842 | "hashes": [ 843 | "sha256:8a3a5915713483bf353b6f4079ba8b2a29029d1d1090a503c70b0dc5d9d0c7bd", 844 | "sha256:c4d9aa9fc888188f0c673eff79a8dadbe2e75b7fe879dc80a221a06e0a68138f" 845 | ], 846 | "index": "pypi", 847 | "version": "==0.23.1" 848 | }, 849 | "tomli": { 850 | "hashes": [ 851 | "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", 852 | "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" 853 | ], 854 | "markers": "python_full_version < '3.11.0a7'", 855 | "version": "==2.0.1" 856 | }, 857 | "types-pyyaml": { 858 | "hashes": [ 859 | "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062", 860 | "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24" 861 | ], 862 | "version": "==6.0.12.12" 863 | }, 864 | "types-requests": { 865 | "hashes": [ 866 | "sha256:0d580652ce903f643f8c3b494dd01d29367ea57cea0c7ad7f65cf3169092edb0", 867 | "sha256:cc1aba862575019306b2ed134eb1ea994cab1c887a22e18d3383e6dd42e9789b" 868 | ], 869 | "index": "pypi", 870 | "version": "==2.28.11.17" 871 | }, 872 | "types-urllib3": { 873 | "hashes": [ 874 | "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f", 875 | "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e" 876 | ], 877 | "version": "==1.26.25.14" 878 | }, 879 | "typing-extensions": { 880 | "hashes": [ 881 | "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0", 882 | "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef" 883 | ], 884 | "markers": "python_version >= '3.8'", 885 | "version": "==4.8.0" 886 | }, 887 | "urllib3": { 888 | "hashes": [ 889 | "sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21", 890 | "sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b" 891 | ], 892 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", 893 | "version": "==1.26.17" 894 | } 895 | } 896 | } 897 | --------------------------------------------------------------------------------