├── beam ├── aws │ ├── __init__.py │ ├── models.py │ ├── bastion.py │ └── utils.py ├── settings.toml ├── __main__.py ├── __init__.py ├── config.py ├── exceptions.py ├── hosts.py ├── config_loader.py ├── runner.py ├── eks.py ├── ssm.py ├── selector.py ├── utils.py ├── main.py └── _version.py ├── .bandit ├── poetry.toml ├── .gitattributes ├── setup.py ├── mypy.ini ├── .pylintrc ├── CHANGELOG.md ├── setup.cfg ├── .github └── workflows │ └── release.yml ├── .flake8 ├── pyproject.toml ├── README.md ├── requirements.txt ├── requirements-dev.txt ├── .gitignore ├── CODE_OF_CONDUCT.md ├── LICENSE └── versioneer.py /beam/aws/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /beam/settings.toml: -------------------------------------------------------------------------------- 1 | DEBUG = false 2 | -------------------------------------------------------------------------------- /.bandit: -------------------------------------------------------------------------------- 1 | [bandit] 2 | exclude = tests,.venv,venv 3 | -------------------------------------------------------------------------------- /poetry.toml: -------------------------------------------------------------------------------- 1 | [virtualenvs] 2 | in-project = true 3 | -------------------------------------------------------------------------------- /beam/__main__.py: -------------------------------------------------------------------------------- 1 | from beam.main import main 2 | 3 | main() 4 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.sh eol=lf 2 | beam/_version.py export-subst 3 | -------------------------------------------------------------------------------- /beam/__init__.py: -------------------------------------------------------------------------------- 1 | from .config import settings 2 | 3 | from . import _version 4 | __version__ = _version.get_versions()['version'] 5 | -------------------------------------------------------------------------------- /beam/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from dynaconf import Dynaconf 4 | 5 | ROOT = os.path.dirname(__file__) 6 | 7 | settings = Dynaconf( 8 | root_path=os.path.dirname(ROOT), 9 | envvar_prefix='BEAM', 10 | settings_files=['beam/settings.toml'], 11 | load_dotenv=True, 12 | ) 13 | -------------------------------------------------------------------------------- /beam/exceptions.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | 4 | class AdministratorRequiredError(PermissionError): 5 | """Raised when an administrator is required to perform an action.""" 6 | 7 | def __init__(self, message: Optional[str] = None) -> None: 8 | super().__init__(message or 'Administrator privileges are required to perform this action.') 9 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # type: ignore 2 | import setuptools 3 | 4 | import versioneer 5 | 6 | with open('requirements.txt', 'r', encoding='utf-8') as fh: 7 | requirements = [line.strip() for line in fh] 8 | 9 | setuptools.setup( 10 | name='beam', 11 | version=versioneer.get_version(), 12 | cmdclass=versioneer.get_cmdclass(), 13 | install_requires=requirements, 14 | ) 15 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | follow_imports = skip 3 | ignore_missing_imports = True 4 | # Disallow dynamic typing 5 | # disallow_any_unimported = True 6 | # disallow_any_expr = True 7 | # disallow_any_decorated = True 8 | # disallow_any_generics = True 9 | # disallow_any_explicit = True 10 | # disallow_subclassing_any = True 11 | 12 | # Disallow untyped definitions and calls 13 | # disallow_untyped_calls = True 14 | disallow_untyped_defs = True 15 | disallow_incomplete_defs = True 16 | check_untyped_defs = True 17 | # disallow_untyped_decorators = True 18 | 19 | # Configuring warnings 20 | warn_unused_ignores = True 21 | warn_no_return = True 22 | # warn_return_any = True 23 | warn_redundant_casts = True 24 | 25 | [mypy-tests.*] 26 | allow_untyped_defs = True 27 | 28 | [mypy-get_changed_files.*] 29 | ignore_errors = True 30 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | init-hook='import sys, os; sys.path.append(f"{os.getcwd()}/beam")' 3 | ignore=.venv,venv,_version.py,versioneer.py 4 | 5 | [MESSAGES CONTROL] 6 | disable=W0511, 7 | R, 8 | missing-timeout, 9 | line-too-long, 10 | too-few-public-methods, 11 | logging-fstring-interpolation, 12 | missing-module-docstring, 13 | missing-class-docstring, 14 | missing-function-docstring, 15 | too-many-instance-attributes, 16 | too-many-return-statements, 17 | trailing-whitespace, 18 | unspecified-encoding, 19 | import-error, 20 | use-dict-literal, 21 | broad-exception-caught, 22 | broad-exception-raised, 23 | wildcard-import 24 | 25 | [FORMAT] 26 | max-line-length=240 27 | 28 | good-names=id,dn,i,e,ex,db,vm 29 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | ## [0.1.3] - 2023-11-19 4 | 5 | - Fixed: RDS Cluster port-forwarding 6 | - Improve bastion to resources (EKS, RDS) resource matching based on VPC 7 | 8 | ## [0.1.2] - 2023-11-15 9 | 10 | - Improve AWS rate limit handling by starting AWS SSM port-forwarding session using boto3 client instead of AWS CLI 11 | 12 | ## [0.1.1] - 2023-09-04 13 | 14 | - Added basic documentation 15 | 16 | ## [0.1.0] - 2023-08-31 17 | 18 | Initial release 19 | 20 | ### Added 21 | 22 | - 23 | 24 | ### Changed 25 | 26 | - 27 | 28 | ### Fixed 29 | 30 | - 31 | 32 | [Unreleased]: https://github.com/entitleio/beam/compare/0.1.0...master 33 | [0.1.0]: https://github.com/entitleio/beam/releases/tag/0.1.0 34 | [0.1.1]: https://github.com/entitleio/beam/releases/tag/0.1.1 35 | [0.1.2]: https://github.com/entitleio/beam/releases/tag/0.1.2 36 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = beam 3 | license = Apache-2.0 4 | author = Entitle I.O, Avi Zetser, Dennis Zagiansky 5 | author_email = beam@entitle.io, avi@entitle.io, dennis@entitle.io 6 | maintainer = Avi Zetser, Dennis Zagiansky 7 | maintainer_email = avi@entitle.io, dennis@entitle.io 8 | description = AWS SSM Tool 9 | long_description = file: README.md 10 | classifiers = 11 | Programming Language :: Python :: 3 12 | Intended Audience :: Developers 13 | Intended Audience :: System Administrators 14 | Development Status :: 4 - Beta 15 | Operating System :: OS Independent 16 | Topic :: System :: Systems Administration 17 | Topic :: Utilities 18 | License :: OSI Approved :: Apache Software License 19 | 20 | [options] 21 | packages = find: 22 | python_requires = >=3.9 23 | include_package_data = True 24 | 25 | [options.packages.find] 26 | exclude = 27 | test 28 | test.* 29 | 30 | # Version Management for application 31 | # Read more at https://jacobtomlinson.dev/posts/2020/versioning-and-formatting-your-python-code/ 32 | [versioneer] 33 | VCS = git 34 | style = pep440 35 | versionfile_source = beam/_version.py 36 | versionfile_build = beam/_version.py 37 | tag_prefix = 38 | parentdir_prefix = beam- 39 | -------------------------------------------------------------------------------- /beam/aws/models.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | from typing import Optional 3 | 4 | import aws_sso_lib 5 | import boto3 6 | from dataclasses_json import DataClassJsonMixin, config 7 | 8 | from beam.utils import hash_val 9 | 10 | 11 | @dataclass 12 | class Boto3SessionConfig(DataClassJsonMixin): 13 | account_id: str 14 | sso_start_url: str 15 | sso_region: str 16 | role_name: str 17 | region: str 18 | _session: Optional[boto3.Session] = field(init=False, default=None, metadata=config(exclude=lambda x: True, 19 | encoder=lambda x: None, 20 | decoder=lambda x: None)) 21 | vpc_id: Optional[str] = None 22 | 23 | def get_session(self) -> boto3.Session: 24 | if self._session is None: 25 | self._session = aws_sso_lib.get_boto3_session(self.sso_start_url, account_id=self.account_id, 26 | role_name=self.role_name, 27 | sso_region=self.sso_region, region=self.region) 28 | return self._session 29 | 30 | 31 | @dataclass 32 | class AwsEksInstance: 33 | name: str 34 | endpoint: str 35 | arn: str 36 | vpc_id: Optional[str] = None 37 | 38 | 39 | @dataclass 40 | class AwsAccount: 41 | id: str 42 | name: str 43 | 44 | 45 | @dataclass 46 | class AwsRdsInstance: 47 | identifier: str 48 | endpoint: str 49 | port: int 50 | vpc_id: Optional[str] = None 51 | 52 | @property 53 | def local_port(self) -> int: 54 | return hash_val(self.endpoint) + 1024 * 16 55 | -------------------------------------------------------------------------------- /beam/hosts.py: -------------------------------------------------------------------------------- 1 | import platform 2 | import re 3 | 4 | from beam.exceptions import AdministratorRequiredError 5 | from beam.utils import logger 6 | 7 | 8 | def get_hosts_path() -> str: 9 | system = platform.system() 10 | if system == 'Windows': 11 | return r'C:\Windows\System32\drivers\etc\hosts' 12 | elif system in ['Linux', 'Darwin']: 13 | return '/etc/hosts' 14 | else: 15 | raise OSError(f'Unsupported system: {system}') 16 | 17 | 18 | def edit_hosts_entry(host: str, hostname: str = '127.0.0.1') -> bool: 19 | hosts_path = get_hosts_path() 20 | logger.debug(f'Appending to hosts file ({hosts_path}): {hostname} {host}') 21 | 22 | try: 23 | # first try to open with read-only to check if editing is required 24 | # if yes, open with write permissions 25 | with open(hosts_path, 'r') as file: 26 | if re.search(rf'({hostname})\s({host})', file.read()): 27 | return True 28 | 29 | logger.debug(f"Host '{host}' not found in hosts file, adding it") 30 | 31 | with open(hosts_path, 'a') as file: 32 | file.write(f'{hostname} {host}\n') 33 | logger.debug(f"Host '{host}' added to hosts file") 34 | except PermissionError as e: 35 | logger.exception(f'Permission error while editing the hosts file ({hosts_path})') 36 | raise AdministratorRequiredError('Unable to edit hosts file.' 37 | 'Please allow write permissions to the hosts file: sudo chmod 666 /etc/hosts' 38 | 'or running as administrator (e.g. sudo beam run)') from e 39 | except IOError: 40 | logger.exception('Error while editing the hosts file') 41 | return False 42 | 43 | return True 44 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - '*.*.*' 7 | 8 | workflow_dispatch: 9 | 10 | jobs: 11 | release: 12 | name: Release 13 | permissions: 14 | contents: write 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: Checkout code 18 | uses: actions/checkout@v3 19 | 20 | - name: Set up Python 21 | uses: actions/setup-python@v4 22 | with: 23 | python-version: "3.11" 24 | 25 | - name: Install Poetry 26 | uses: snok/install-poetry@v1 27 | with: 28 | version: 1.5.1 29 | virtualenvs-create: true 30 | virtualenvs-in-project: true 31 | installer-parallel: true 32 | 33 | - name: Run poetry install 34 | run: | 35 | set -ex 36 | poetry install 37 | 38 | - name: Build project for distribution 39 | run: poetry build 40 | 41 | - name: Check Version 42 | id: check-version 43 | run: | 44 | echo version=$(poetry version --short) 45 | echo version=$(poetry version --short) >> $GITHUB_OUTPUT 46 | [[ "$(poetry version --short)" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]] || echo prerelease=true >> $GITHUB_OUTPUT 47 | 48 | - name: Create Release 49 | uses: ncipollo/release-action@v1 50 | with: 51 | artifacts: "dist/*" 52 | omitName: true 53 | token: ${{ secrets.github_token }} 54 | draft: false 55 | prerelease: steps.check-version.outputs.prerelease == 'true' 56 | tag: ${{ steps.check-version.outputs.version }} 57 | 58 | # - name: Publish to PyPI 59 | # env: 60 | # POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }} 61 | # run: poetry publish 62 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | min_python_version = 3.11.0 3 | max-line-length = 240 4 | ban-relative-imports = true 5 | # flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy 6 | format-greedy = 1 7 | inline-quotes = single 8 | # Allow omission of a return type hint for __init__ if at least one argument is annotated 9 | # used by flake8-annotations 10 | mypy-init-return = true 11 | enable-extensions = TC, TC1 12 | type-checking-exempt-modules = typing, typing-extensions 13 | eradicate-whitelist-extend = ^-.*; 14 | extend-ignore = 15 | # E203: Whitespace before ':' (pycqa/pycodestyle#373) 16 | E203, 17 | # SIM106: Handle error-cases first 18 | SIM106, 19 | # ANN101: Missing type annotation for self in method 20 | ANN101, 21 | # ANN102: Missing type annotation for cls in classmethod 22 | ANN102, 23 | F403, 24 | # E125: continuation line with same indent as next logical line 25 | E125, 26 | 27 | per-file-ignores = 28 | # F401: Module imported by unused (non-implicit modules) 29 | # TC002: Move third-party import '...' into a type-checking block 30 | __init__.py:F401,TC002, 31 | # ANN201: Missing return type annotation for public function 32 | # E501: line too long 33 | tests/**:E501,Q000 34 | tests/test_*:ANN201 35 | tests/**/test_*:ANN201,E501,Q000 36 | 37 | 38 | exclude = 39 | .git, 40 | __pycache__, 41 | docs/source/conf.py, 42 | old, 43 | build, 44 | dist, 45 | .venv, 46 | venv, 47 | var 48 | versioneer.py, 49 | **/_version.py, 50 | 51 | extend-exclude = 52 | # Frozen and not subject to change in this repo: 53 | get-poetry.py, 54 | install-poetry.py, 55 | # External to the project's coding standards: 56 | tests/fixtures/*, 57 | tests/**/fixtures/*, 58 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "beam" 3 | version = "0.1.3" 4 | description = "AWS SSM made easy" 5 | authors = ["Entitle I.O", "Avi Zetser ", "Dennis Zagiansky "] 6 | maintainers = ["Avi Zetser ", "Dennis Zagiansky "] 7 | license = "Apache-2.0" 8 | readme = "README.md" 9 | homepage = "https://beam.entitle.io" 10 | repository = "https://github.com/entitleio/beam" 11 | 12 | classifiers = [ 13 | "Programming Language :: Python :: 3", 14 | "Intended Audience :: Developers", 15 | "Intended Audience :: System Administrators", 16 | "Development Status :: 4 - Beta", 17 | "Operating System :: OS Independent", 18 | "Topic :: System :: Systems Administration", 19 | "Topic :: Utilities", 20 | "License :: OSI Approved :: Apache Software License", 21 | ] 22 | 23 | [tool.poetry.dependencies] 24 | python = "^3.11" 25 | colorlog = "^6.7.0" 26 | boto3 = "^1.28.12" 27 | botocore = "^1.31.12" 28 | types-boto3 = "^1.0.2" 29 | aws-sso-lib = "^1.14.0" 30 | pyyaml = "^6.0.1" 31 | questionary = "^1.10.0" 32 | yamldataclassconfig = "^1.5.0" 33 | click = "^8.1.6" 34 | validators = "^0.20.0" 35 | rich = "^13.4.2" 36 | dynaconf = "^3.2.0" 37 | versioneer = "^0.29" 38 | 39 | [tool.poetry.group.dev.dependencies] 40 | pylint = "^2.13.8" 41 | parameterized = "^0.8.1" 42 | coverage = "^7.0" 43 | pytest = "^7.2.0" 44 | flake8 = "^6.0.0" 45 | mypy = "^1.0.0" 46 | types-requests = "^2.28.11.7" 47 | types-urllib3 = "^1.26.25.4" 48 | types-deprecated = "^1.2.9" 49 | types-retry = "^0.9.9" 50 | pre-commit = "^3.0.4" 51 | types-cachetools = "^5.3.0.2" 52 | bandit = "^1.7.5" 53 | flake8-quotes = "^3.3.2" 54 | types-pyyaml = "^6.0.12.9" 55 | 56 | [build-system] 57 | requires = ["setuptools", "poetry-core>=1.0.0", "versioneer[toml]"] 58 | build-backend = "poetry.core.masonry.api" 59 | 60 | [tool.poetry.scripts] 61 | beam = 'beam.main:main' 62 | 63 | [tool.versioneer] 64 | VCS = "git" 65 | style = "pep440" 66 | versionfile_source = "beam/_version.py" 67 | versionfile_build = "beam/_version.py" 68 | tag_prefix = "" 69 | parentdir_prefix = "beam-" 70 | -------------------------------------------------------------------------------- /beam/config_loader.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | from typing import Optional 3 | 4 | import yaml 5 | from dataclasses_json import DataClassJsonMixin 6 | 7 | from beam.aws.bastion import AwsBastion 8 | 9 | 10 | @dataclass 11 | class BeamEksConfig(DataClassJsonMixin): 12 | enabled: Optional[bool] = True 13 | tags: dict[str, str] = field(default_factory=dict) 14 | 15 | 16 | @dataclass 17 | class BeamRdsConfig(DataClassJsonMixin): 18 | enabled: Optional[bool] = True 19 | tags: dict[str, str] = field(default_factory=dict) 20 | 21 | 22 | @dataclass 23 | class BeamAwsConfig(DataClassJsonMixin): 24 | sso_url: str 25 | sso_region: str 26 | role: str 27 | accounts: list[str] = field(default_factory=list) 28 | regions: list[str] = field(default_factory=list) 29 | 30 | 31 | @dataclass 32 | class BeamBastionConfig(DataClassJsonMixin): 33 | tags: dict[str, str] = field(default_factory=lambda: {'Name': '*bastion*'}) 34 | 35 | @property 36 | def name(self) -> str: 37 | return self.tags.get('Name', '*bastion*') 38 | 39 | @property 40 | def other_tags(self) -> dict[str, str]: 41 | tags_without_name = self.tags.copy() 42 | tags_without_name.pop('Name') 43 | return tags_without_name 44 | 45 | 46 | @dataclass 47 | class BeamKubernetesConfig(DataClassJsonMixin): 48 | namespace: Optional[str] = 'default' 49 | 50 | 51 | @dataclass 52 | class BeamConfig(DataClassJsonMixin): 53 | # aws 54 | aws: BeamAwsConfig 55 | bastion: BeamBastionConfig 56 | kubernetes: BeamKubernetesConfig = field(default_factory=BeamKubernetesConfig) 57 | eks: BeamEksConfig = field(default_factory=BeamEksConfig) 58 | rds: BeamRdsConfig = field(default_factory=BeamRdsConfig) 59 | 60 | bastions: list[AwsBastion] = field(default_factory=list) 61 | 62 | @staticmethod 63 | def _parse_config(config: dict) -> 'BeamConfig': 64 | return BeamConfig( 65 | aws=BeamAwsConfig.from_dict(config['aws']), 66 | bastion=BeamBastionConfig.from_dict(config['bastion']), 67 | kubernetes=BeamKubernetesConfig.from_dict(config['kubernetes']), 68 | eks=BeamEksConfig.from_dict(config['eks']), 69 | rds=BeamRdsConfig.from_dict(config['rds']), 70 | ) 71 | 72 | @staticmethod 73 | def load_config(config_path: str) -> 'BeamConfig': 74 | with open(config_path, 'r') as file: 75 | config = yaml.safe_load(file) 76 | 77 | try: 78 | return BeamConfig.from_dict(config) 79 | except KeyError as e: 80 | raise KeyError(f'Missing key in config file: {e}') from e 81 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 | Beam Logo 4 | 5 |
6 | 7 | # Beam: Securely Connect to Your Infrastructure 8 | 9 | Beams helps you to connect easily & securely to internal AWS resources using AWS SSM Session Manager. 10 | 11 | **Currently supported infrastructure:** 12 | * AWS: SSM, EKS, RDS 13 | * _GCP: Coming soon 🎉_ 14 | 15 | ## Installation and initial configuration 16 | 17 | #### Step 1: Install Beam 18 | Start with installing beam 19 | ```shell 20 | pip install https://github.com/entitleio/beam/releases/latest/download/beam.tar.gz 21 | ``` 22 | 23 | #### Step 2: Configure SSO 24 | Run the following command to configure Single Sign-On (SSO): 25 | 26 | ```shell 27 | beam configure --sso-url SSO_URL --sso-region SSO_REGION 28 | ``` 29 | Follow the Single Sign-On (SSO) and Multi-Factor Authentication (MFA) prompts until you approve. 30 | 31 | #### Step 3: Select Accounts and Permissions 32 | - Select the accounts you want to access. 33 | - Choose the permission sets you require. 34 | 35 | #### Step 4: Specify Regions and Infrastructure 36 | - Select the regions where your infrastructure is located. 37 | - Specify the regular expression (regex) for your bastion host. 38 | - Choose your default Kubernetes namespace. 39 | - Decide if you want to use Amazon Elastic Kubernetes Service (EKS) and specify the regex. 40 | - Decide if you want to connect to Amazon Relational Database Service (RDS). 41 | 42 | #### Step 5: Approve Configuration 43 | Approve the configuration. This will generate a configuration file in your current user folder. 44 | 45 | #### Step 6: Run Beam 46 | Now you can run the following command: 47 | 48 | ```shell 49 | sudo beam run 50 | ``` 51 | *Note: The first run will take some time as it scans your entire infrastructure.* 52 | 53 | *Note: Beam requires sudo because it edits the hosts file.* 54 | 55 | Congratulations! You have successfully configured your DevOps environment. 56 | 57 | ## Documentation 58 | 59 | [Documentation] for the current version of Beam is available from the [official website]. 60 | 61 | ## Contribute 62 | 63 | Follow the [contributing guidelines](CONTRIBUTING.md) if you want to propose a change in Beam. 64 | 65 | ## Resources 66 | 67 | * [Releases][PyPI Releases] 68 | * [Official Website] 69 | * [Documentation] 70 | * [Issue Tracker] 71 | 72 | [PyPI]: https://pypi.org/project/beam/ 73 | [PyPI Releases]: https://pypi.org/project/beam/#history 74 | [Official Website]: https://beam.entitle.io 75 | [Documentation]: https://beam.entitle.io/docs 76 | [Issue Tracker]: https://github.com/entitleio/beam/issues 77 | [Contributing Documentation]: CONTRIBUTING.md 78 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | aws-error-utils==2.7.0 ; python_version >= "3.11" and python_version < "4" 2 | aws-sso-lib==1.14.0 ; python_version >= "3.11" and python_version < "4.0" 3 | boto3-stubs==1.29.0 ; python_version >= "3.11" and python_version < "4.0" 4 | boto3==1.29.0 ; python_version >= "3.11" and python_version < "4.0" 5 | botocore-stubs==1.32.0 ; python_version >= "3.11" and python_version < "4.0" 6 | botocore==1.32.0 ; python_version >= "3.11" and python_version < "4.0" 7 | click==8.1.7 ; python_version >= "3.11" and python_version < "4.0" 8 | colorama==0.4.6 ; python_version >= "3.11" and python_version < "4.0" and (sys_platform == "win32" or platform_system == "Windows") 9 | colorlog==6.7.0 ; python_version >= "3.11" and python_version < "4.0" 10 | dataclasses-json==0.6.2 ; python_version >= "3.11" and python_version < "4.0" 11 | decorator==5.1.1 ; python_version >= "3.11" and python_version < "4.0" 12 | dynaconf==3.2.4 ; python_version >= "3.11" and python_version < "4.0" 13 | jmespath==1.0.1 ; python_version >= "3.11" and python_version < "4.0" 14 | markdown-it-py==3.0.0 ; python_version >= "3.11" and python_version < "4.0" 15 | marshmallow==3.20.1 ; python_version >= "3.11" and python_version < "4.0" 16 | mdurl==0.1.2 ; python_version >= "3.11" and python_version < "4.0" 17 | mypy-extensions==1.0.0 ; python_version >= "3.11" and python_version < "4.0" 18 | packaging==23.2 ; python_version >= "3.11" and python_version < "4.0" 19 | prompt-toolkit==3.0.41 ; python_version >= "3.11" and python_version < "4.0" 20 | pygments==2.16.1 ; python_version >= "3.11" and python_version < "4.0" 21 | python-dateutil==2.8.2 ; python_version >= "3.11" and python_version < "4.0" 22 | pyyaml==6.0.1 ; python_version >= "3.11" and python_version < "4.0" 23 | questionary==1.10.0 ; python_version >= "3.11" and python_version < "4.0" 24 | rich==13.6.0 ; python_version >= "3.11" and python_version < "4.0" 25 | s3transfer==0.7.0 ; python_version >= "3.11" and python_version < "4.0" 26 | six==1.16.0 ; python_version >= "3.11" and python_version < "4.0" 27 | types-awscrt==0.19.12 ; python_version >= "3.11" and python_version < "4.0" 28 | types-boto3==1.0.2 ; python_version >= "3.11" and python_version < "4.0" 29 | types-s3transfer==0.7.0 ; python_version >= "3.11" and python_version < "4.0" 30 | typing-extensions==4.8.0 ; python_version >= "3.11" and python_version < "4.0" 31 | typing-inspect==0.9.0 ; python_version >= "3.11" and python_version < "4.0" 32 | urllib3==2.0.7 ; python_version >= "3.11" and python_version < "4.0" 33 | validators==0.20.0 ; python_version >= "3.11" and python_version < "4.0" 34 | versioneer==0.29 ; python_version >= "3.11" and python_version < "4.0" 35 | wcwidth==0.2.10 ; python_version >= "3.11" and python_version < "4.0" 36 | yamldataclassconfig==1.5.0 ; python_version >= "3.11" and python_version < "4.0" 37 | -------------------------------------------------------------------------------- /beam/runner.py: -------------------------------------------------------------------------------- 1 | import concurrent 2 | import os 3 | import subprocess 4 | from concurrent.futures import ThreadPoolExecutor 5 | 6 | import yaml 7 | from rich import print # pylint: disable=redefined-builtin 8 | 9 | from beam.aws.bastion import AwsBastion 10 | from beam.aws.utils import AwsOrganization 11 | from beam.config_loader import BeamConfig 12 | from beam.utils import logger 13 | 14 | 15 | class BeamRunner: 16 | def __init__(self, beam_config: BeamConfig, beam_config_path: str, organization: AwsOrganization, permission_set: str) -> None: 17 | self.beam_config = beam_config 18 | self.beam_config_path = beam_config_path 19 | self.aws_organization = organization 20 | self.permission_set = permission_set 21 | 22 | def scan_resources(self) -> list[AwsBastion]: 23 | bastions: list[AwsBastion] = [] 24 | 25 | for account in self.aws_organization.get_accounts(): 26 | if account.id not in self.beam_config.aws.accounts: 27 | logger.debug(f'Skipping account {account} as it is not in config') 28 | continue 29 | roles = {role[2] for role in self.aws_organization.get_all_roles(account)} 30 | if self.beam_config.aws.role not in roles: 31 | logger.debug(f'Skipping account {account} as role {self.beam_config.aws.role} is not in {roles}') 32 | continue 33 | logger.info(f'Found {len(roles)} roles: {roles}') 34 | logger.info(f'Processing account {account}') 35 | 36 | with ThreadPoolExecutor(max_workers=10) as executor: 37 | futures = [ 38 | executor.submit(self.aws_organization.process_account, account, self.permission_set, self.beam_config) 39 | ] 40 | for future in concurrent.futures.as_completed(futures): 41 | if res := future.result(): 42 | logger.info(f'Found {len(res)} bastions: {res}') 43 | bastions.extend(res) 44 | 45 | logger.info(f'Found {len(bastions)} bastions: {bastions}') 46 | 47 | # save bastions to local config to cache the scan 48 | self.beam_config.bastions = bastions 49 | beam_config_dict = self.beam_config.to_dict() 50 | os.makedirs(os.path.dirname(self.beam_config_path), exist_ok=True) 51 | with open(self.beam_config_path, 'w') as file: 52 | yaml.safe_dump(beam_config_dict, file, default_flow_style=False) 53 | 54 | return bastions 55 | 56 | def connect_to_resources(self, bastions: list[AwsBastion], is_eks_enabled: bool, is_rds_enabled: bool) -> list[subprocess.Popen]: 57 | processes: list[subprocess.Popen] = [] 58 | 59 | for bastion in bastions: 60 | try: 61 | logger.debug(f'Connecting to Bastion {bastion}') 62 | 63 | if is_eks_enabled: 64 | for eks_instance in bastion.eks_instances: 65 | logger.debug(f'Processing EKS {eks_instance}') 66 | if process := bastion.connect_to_eks(eks_instance, default_namespace=self.beam_config.kubernetes.namespace or 'default'): 67 | processes.append(process) 68 | 69 | if is_rds_enabled: 70 | for rds_instance in bastion.rds_instances: 71 | logger.debug(f'Processing RDS {rds_instance}') 72 | if process := bastion.connect_to_rds(rds_instance): 73 | processes.append(process) 74 | except PermissionError as e: 75 | print(f'[bold red]ERROR: {e}[/bold red]') 76 | 77 | return processes 78 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | astroid==2.15.8 ; python_version >= "3.11" and python_version < "4.0" 2 | bandit==1.7.5 ; python_version >= "3.11" and python_version < "4.0" 3 | cfgv==3.4.0 ; python_version >= "3.11" and python_version < "4.0" 4 | colorama==0.4.6 ; python_version >= "3.11" and python_version < "4.0" and (sys_platform == "win32" or platform_system == "Windows") 5 | coverage==7.3.2 ; python_version >= "3.11" and python_version < "4.0" 6 | dill==0.3.7 ; python_version >= "3.11" and python_version < "4.0" 7 | distlib==0.3.7 ; python_version >= "3.11" and python_version < "4.0" 8 | filelock==3.13.1 ; python_version >= "3.11" and python_version < "4.0" 9 | flake8-quotes==3.3.2 ; python_version >= "3.11" and python_version < "4.0" 10 | flake8==6.1.0 ; python_version >= "3.11" and python_version < "4.0" 11 | gitdb==4.0.11 ; python_version >= "3.11" and python_version < "4.0" 12 | gitpython==3.1.40 ; python_version >= "3.11" and python_version < "4.0" 13 | identify==2.5.31 ; python_version >= "3.11" and python_version < "4.0" 14 | iniconfig==2.0.0 ; python_version >= "3.11" and python_version < "4.0" 15 | isort==5.12.0 ; python_version >= "3.11" and python_version < "4.0" 16 | lazy-object-proxy==1.9.0 ; python_version >= "3.11" and python_version < "4.0" 17 | markdown-it-py==3.0.0 ; python_version >= "3.11" and python_version < "4.0" 18 | mccabe==0.7.0 ; python_version >= "3.11" and python_version < "4.0" 19 | mdurl==0.1.2 ; python_version >= "3.11" and python_version < "4.0" 20 | mypy-extensions==1.0.0 ; python_version >= "3.11" and python_version < "4.0" 21 | mypy==1.7.0 ; python_version >= "3.11" and python_version < "4.0" 22 | nodeenv==1.8.0 ; python_version >= "3.11" and python_version < "4.0" 23 | packaging==23.2 ; python_version >= "3.11" and python_version < "4.0" 24 | parameterized==0.8.1 ; python_version >= "3.11" and python_version < "4.0" 25 | pbr==6.0.0 ; python_version >= "3.11" and python_version < "4.0" 26 | platformdirs==3.11.0 ; python_version >= "3.11" and python_version < "4.0" 27 | pluggy==1.3.0 ; python_version >= "3.11" and python_version < "4.0" 28 | pre-commit==3.5.0 ; python_version >= "3.11" and python_version < "4.0" 29 | pycodestyle==2.11.1 ; python_version >= "3.11" and python_version < "4.0" 30 | pyflakes==3.1.0 ; python_version >= "3.11" and python_version < "4.0" 31 | pygments==2.16.1 ; python_version >= "3.11" and python_version < "4.0" 32 | pylint==2.17.7 ; python_version >= "3.11" and python_version < "4.0" 33 | pytest==7.4.3 ; python_version >= "3.11" and python_version < "4.0" 34 | pyyaml==6.0.1 ; python_version >= "3.11" and python_version < "4.0" 35 | rich==13.6.0 ; python_version >= "3.11" and python_version < "4.0" 36 | setuptools==68.2.2 ; python_version >= "3.11" and python_version < "4.0" 37 | smmap==5.0.1 ; python_version >= "3.11" and python_version < "4.0" 38 | stevedore==5.1.0 ; python_version >= "3.11" and python_version < "4.0" 39 | tomlkit==0.12.3 ; python_version >= "3.11" and python_version < "4.0" 40 | types-cachetools==5.3.0.7 ; python_version >= "3.11" and python_version < "4.0" 41 | types-deprecated==1.2.9.3 ; python_version >= "3.11" and python_version < "4.0" 42 | types-pyyaml==6.0.12.12 ; python_version >= "3.11" and python_version < "4.0" 43 | types-requests==2.31.0.10 ; python_version >= "3.11" and python_version < "4.0" 44 | types-retry==0.9.9.4 ; python_version >= "3.11" and python_version < "4.0" 45 | types-urllib3==1.26.25.14 ; python_version >= "3.11" and python_version < "4.0" 46 | typing-extensions==4.8.0 ; python_version >= "3.11" and python_version < "4.0" 47 | urllib3==2.0.7 ; python_version >= "3.11" and python_version < "4.0" 48 | virtualenv==20.24.6 ; python_version >= "3.11" and python_version < "4.0" 49 | wrapt==1.16.0 ; python_version >= "3.11" and python_version < "4.0" 50 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # PyCharm 2 | .idea/* 3 | !.idea/runConfigurations 4 | 5 | # Byte-compiled / optimized / DLL files 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # C extensions 11 | *.so 12 | 13 | # Distribution / packaging 14 | .Python 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | share/python-wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .nox/ 47 | .coverage 48 | .coverage.* 49 | .cache 50 | nosetests.xml 51 | coverage.xml 52 | *.cover 53 | *.py,cover 54 | .hypothesis/ 55 | .pytest_cache/ 56 | cover/ 57 | 58 | # Translations 59 | *.mo 60 | *.pot 61 | 62 | # Django stuff: 63 | *.log 64 | local_settings.py 65 | db.sqlite3 66 | db.sqlite3-journal 67 | 68 | # Flask stuff: 69 | instance/ 70 | .webassets-cache 71 | 72 | # Scrapy stuff: 73 | .scrapy 74 | 75 | # Sphinx documentation 76 | docs/_build/ 77 | 78 | # PyBuilder 79 | .pybuilder/ 80 | target/ 81 | 82 | # Jupyter Notebook 83 | .ipynb_checkpoints 84 | 85 | # IPython 86 | profile_default/ 87 | ipython_config.py 88 | 89 | # pyenv 90 | # For a library or package, you might want to ignore these files since the code is 91 | # intended to run in multiple environments; otherwise, check them in: 92 | # .python-version 93 | 94 | # pipenv 95 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 96 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 97 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 98 | # install all needed dependencies. 99 | #Pipfile.lock 100 | 101 | # poetry 102 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 103 | # This is especially recommended for binary packages to ensure reproducibility, and is more 104 | # commonly ignored for libraries. 105 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 106 | #poetry.lock 107 | 108 | # pdm 109 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 110 | #pdm.lock 111 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 112 | # in version control. 113 | # https://pdm.fming.dev/#use-with-ide 114 | .pdm.toml 115 | 116 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 117 | __pypackages__/ 118 | 119 | # Celery stuff 120 | celerybeat-schedule 121 | celerybeat.pid 122 | 123 | # SageMath parsed files 124 | *.sage.py 125 | 126 | # Environments 127 | .env 128 | .venv 129 | env/ 130 | venv/ 131 | ENV/ 132 | env.bak/ 133 | venv.bak/ 134 | 135 | # Spyder project settings 136 | .spyderproject 137 | .spyproject 138 | 139 | # Rope project settings 140 | .ropeproject 141 | 142 | # mkdocs documentation 143 | /site 144 | 145 | # mypy 146 | .mypy_cache/ 147 | .dmypy.json 148 | dmypy.json 149 | 150 | # Pyre type checker 151 | .pyre/ 152 | 153 | # pytype static type analyzer 154 | .pytype/ 155 | 156 | # Cython debug symbols 157 | cython_debug/ 158 | 159 | # PyCharm 160 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 161 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 162 | # and can be added to the global gitignore or merged into this file. For a more nuclear 163 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 164 | #.idea/ 165 | -------------------------------------------------------------------------------- /beam/aws/bastion.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=import-outside-toplevel 2 | import subprocess 3 | from dataclasses import dataclass, field 4 | from typing import Optional 5 | 6 | from dataclasses_json import DataClassJsonMixin 7 | 8 | from beam.aws.models import Boto3SessionConfig, AwsRdsInstance, AwsEksInstance 9 | from beam.eks import update_kubeconfig 10 | from beam.hosts import edit_hosts_entry 11 | from beam.ssm import start_ssm_forwarding_session 12 | from beam.utils import hash_val, logger 13 | 14 | 15 | @dataclass 16 | class AwsBastion(DataClassJsonMixin): 17 | # session details 18 | boto3_session_config: Boto3SessionConfig 19 | 20 | # bastion details 21 | instance_id: str 22 | name: str 23 | vpc_id: str 24 | rds_instances: list[AwsRdsInstance] = field(default_factory=list, init=True) 25 | eks_instances: list[AwsEksInstance] = field(default_factory=list, init=True) 26 | 27 | def get_eks_clusters(self) -> list[AwsEksInstance]: 28 | from beam.aws.utils import get_all_eks_clusters # local import is required to avoid circular imports 29 | eks_clusters = get_all_eks_clusters(self.boto3_session_config.get_session()) 30 | logger.debug(f"Found {len(eks_clusters)} EKS clusters for bastion='{self.name}': {eks_clusters}") 31 | 32 | return eks_clusters 33 | 34 | def connect_to_eks(self, eks_instance: AwsEksInstance, default_namespace: str = 'default') -> Optional[subprocess.Popen]: 35 | from beam.aws.utils import get_profile_name # local import is required to avoid circular imports 36 | 37 | session = self.boto3_session_config.get_session() 38 | account_id = self.boto3_session_config.account_id 39 | region = self.boto3_session_config.region 40 | bastion = self 41 | role = self.boto3_session_config.role_name 42 | 43 | logger.info(f'Connecting to EKS cluster {eks_instance.name}') 44 | cluster_endpoint_api = eks_instance.endpoint.replace('https://', '') 45 | edit_hosts_entry(cluster_endpoint_api) 46 | profile_name = get_profile_name(account_id, role) 47 | 48 | local_port = hash_val(eks_instance.endpoint) + (1024 * 16) 49 | 50 | update_kubeconfig(session, eks_instance.name, region, profile_name, local_port, default_namespace=default_namespace) 51 | process = start_ssm_forwarding_session(region, bastion.instance_id, 52 | cluster_endpoint_api, 443, 53 | local_port, profile_name) 54 | 55 | return process 56 | 57 | def connect_to_rds(self, rds_instance: AwsRdsInstance) -> Optional[subprocess.Popen]: 58 | from beam.aws.utils import get_profile_name # local import is required to avoid circular imports 59 | session = self.boto3_session_config.get_session() 60 | edit_hosts_entry(rds_instance.endpoint) 61 | 62 | profile_name = get_profile_name(self.boto3_session_config.account_id, self.boto3_session_config.role_name) 63 | logger.info(f"Connecting to RDS instance '{rds_instance.identifier}' ({rds_instance.endpoint}:{rds_instance.local_port})") 64 | process = start_ssm_forwarding_session(session.region_name, 65 | self.instance_id, 66 | rds_instance.endpoint, 67 | rds_instance.port, 68 | rds_instance.local_port, 69 | profile_name) 70 | 71 | return process 72 | 73 | def add_rds_instance(self, rds_instance: AwsRdsInstance) -> None: 74 | self.rds_instances.append(rds_instance) 75 | 76 | def add_eks_instance(self, eks_instance: AwsEksInstance) -> None: 77 | self.eks_instances.append(eks_instance) 78 | 79 | def __str__(self) -> str: 80 | return f"{self.name} (id='{self.instance_id}', region='{self.boto3_session_config.region}', vpc='{self.vpc_id}')" 81 | 82 | def __repr__(self) -> str: 83 | return self.__str__() 84 | -------------------------------------------------------------------------------- /beam/eks.py: -------------------------------------------------------------------------------- 1 | import os 2 | import platform 3 | from pathlib import Path 4 | from typing import Optional 5 | 6 | import boto3 7 | import yaml 8 | 9 | 10 | def update_kubeconfig(boto3_session: boto3.Session, 11 | cluster_name: str, 12 | cluster_region: str, 13 | cluster_profile: str, 14 | local_api_server_port: int, 15 | kubeconfig_path: Optional[str] = None, 16 | default_namespace: str = 'default') -> None: 17 | if not kubeconfig_path: 18 | kubeconfig_path = str(Path.home() / '.kube' / 'config') 19 | eks_client = boto3_session.client('eks') 20 | eks_cluster = eks_client.describe_cluster(name=cluster_name)['cluster'] 21 | 22 | if os.path.isfile(kubeconfig_path): 23 | with open(kubeconfig_path, 'r') as file: 24 | kubeconfig = yaml.safe_load(file) or {} 25 | else: 26 | os.makedirs(os.path.dirname(kubeconfig_path), exist_ok=True) 27 | with open(kubeconfig_path, 'w+'): 28 | # set permissions to 600 and ownership to (real) current user 29 | os.chmod(kubeconfig_path, 0o600) 30 | username = os.getlogin() 31 | if platform.system() != 'Windows': 32 | from pwd import getpwnam # this import doesn't work on Windows # pylint: disable=import-outside-toplevel 33 | uid = getpwnam(username).pw_uid 34 | gid = getpwnam(username).pw_uid 35 | os.chown(kubeconfig_path, uid, gid) 36 | kubeconfig = {} 37 | 38 | # clusters 39 | kubeconfig_clusters = kubeconfig.get('clusters', []) 40 | existing_cluster_without_target = [cluster for cluster in kubeconfig_clusters if 41 | not cluster.get('cluster', {}).get('server', '').startswith(eks_cluster['endpoint'])] 42 | 43 | account_id = eks_cluster['arn'].split(':')[4] 44 | cluster_name_in_kubeconfig = f'{account_id}:{cluster_region}:{cluster_name}' 45 | new_cluster = { 46 | 'cluster': { 47 | 'server': f"{eks_cluster['endpoint']}:{local_api_server_port}", 48 | 'certificate-authority-data': eks_cluster['certificateAuthority']['data'] 49 | }, 50 | 'name': cluster_name_in_kubeconfig 51 | } 52 | 53 | new_clusters = existing_cluster_without_target + [new_cluster] 54 | 55 | # contexts 56 | kubeconfig_contexts = kubeconfig.get('contexts', []) 57 | existing_contexts_without_target = [context for context in kubeconfig_contexts if 58 | not context.get('context', {}).get('cluster', '').startswith(cluster_name_in_kubeconfig)] 59 | existing_context: Optional[dict] = next((context for context in kubeconfig_contexts if 60 | context.get('context', {}).get('cluster', '').startswith(cluster_name_in_kubeconfig)), None) 61 | new_context = { 62 | 'context': { 63 | 'cluster': cluster_name_in_kubeconfig, 64 | 'user': cluster_name_in_kubeconfig, 65 | 'namespace': default_namespace, 66 | }, 67 | 'name': cluster_name_in_kubeconfig 68 | } 69 | 70 | # copy current namespace if set 71 | if namespace := existing_context and existing_context.get('context', {}).get('namespace'): 72 | new_context['context']['namespace'] = namespace # type: ignore 73 | 74 | new_contexts = existing_contexts_without_target + [new_context] 75 | 76 | # users 77 | kubeconfig_users = kubeconfig.get('users', []) 78 | existing_users_without_target = [user for user in kubeconfig_users if not user.get('name').startswith(cluster_name_in_kubeconfig)] 79 | new_user = { 80 | 'name': cluster_name_in_kubeconfig, 81 | 'user': { 82 | 'exec': { 83 | 'apiVersion': 'client.authentication.k8s.io/v1beta1', 84 | 'command': 'aws', 85 | 'args': [ 86 | '--region', cluster_region, 'eks', 'get-token', '--cluster-name', cluster_name, '--output', 'json' 87 | ], 88 | 'env': [ 89 | {'name': 'AWS_PROFILE', 'value': cluster_profile} 90 | ] 91 | } 92 | } 93 | } 94 | new_users = existing_users_without_target + [new_user] 95 | 96 | new_kubeconfig_file = {} 97 | kubeconfig_declarations = { 98 | 'apiVersion': 'v1', 99 | 'kind': 'Config', 100 | 'current-context': cluster_name_in_kubeconfig, 101 | 'preferences': {}, 102 | } 103 | new_kubeconfig_file.update(kubeconfig_declarations) 104 | new_kubeconfig_file['clusters'] = new_clusters 105 | new_kubeconfig_file['contexts'] = new_contexts 106 | new_kubeconfig_file['users'] = new_users 107 | 108 | config_text = yaml.dump(new_kubeconfig_file, default_flow_style=False) 109 | with open(kubeconfig_path, 'w+') as file: 110 | file.write(config_text) 111 | -------------------------------------------------------------------------------- /beam/ssm.py: -------------------------------------------------------------------------------- 1 | import json 2 | import subprocess 3 | from typing import Optional 4 | 5 | import boto3 6 | 7 | from beam.utils import logger, execute 8 | 9 | 10 | def start_ssm_forwarding_session(region: str, instance_id: str, host: str, remote_port: int, 11 | local_port: int, profile: str) -> Optional[subprocess.Popen]: 12 | """ 13 | Start a Secure Shell (SSM) session to an AWS EC2 instance and forward a local port to a remote port on the instance. 14 | 15 | Args: 16 | region (str): The AWS region where the instance is located. 17 | instance_id (str): The identifier of the AWS EC2 instance to connect to. 18 | host (str): The remote host or IP address on the AWS EC2 instance to forward traffic to. 19 | remote_port (int): The remote port on the AWS EC2 instance to which traffic will be forwarded. 20 | local_port (int): The local port on the user's machine from which traffic will be forwarded. 21 | profile (str): The AWS CLI profile to be used for the SSM session. 22 | 23 | Returns: 24 | bool: True if the SSM session and port forwarding were successfully initiated, False otherwise. 25 | 26 | Raises: 27 | TypeError: If any of the arguments are not of the expected type. 28 | ValueError: If remote_port or local_port are not within the valid port range (1-65535). 29 | ValueError: If the instance_id is not a valid AWS EC2 instance identifier. 30 | ValueError: If the provided host is not a valid hostname or IP address. 31 | 32 | Note: 33 | This function requires AWS CLI and boto3 to be properly installed and configured with valid credentials. 34 | 35 | Example: 36 | start_ssm_forwarding_session('us-west-2', 'i-0123456789abcdef0', 'localhost', 22, 2222, 'my-profile') 37 | """ 38 | logger.debug(f"Starting SSM session (instance_id='{instance_id}', remote_port={remote_port}, local_port={local_port})") 39 | 40 | if not isinstance(region, str): 41 | raise TypeError('region must be a string') 42 | if not isinstance(instance_id, str): 43 | raise TypeError('instance_id must be a string') 44 | if not isinstance(host, str): 45 | raise TypeError('host must be a string') 46 | if not isinstance(remote_port, int): 47 | raise TypeError('remote_port must be an integer') 48 | if not isinstance(local_port, int): 49 | raise TypeError('local_port must be an integer') 50 | if remote_port < 1 or remote_port > 65535: 51 | raise ValueError('remote_port must be between 1 and 65535') 52 | if local_port < 1 or local_port > 65535: 53 | raise ValueError('local_port must be between 1 and 65535') 54 | 55 | logger.debug( 56 | f' Starting SSM session to instance_id {instance_id} on port {remote_port} and local port {local_port}') 57 | try: 58 | session = boto3.Session(profile_name=profile, region_name=region) 59 | ssm_client = session.client('ssm') 60 | ssm_parameters = { 61 | 'host': [host], 62 | 'portNumber': [str(remote_port)], 63 | 'localPortNumber': [str(local_port)], 64 | } 65 | response = ssm_client.start_session( 66 | Target=instance_id, 67 | DocumentName='AWS-StartPortForwardingSessionToRemoteHost', 68 | Parameters=ssm_parameters 69 | ) 70 | 71 | return start_aws_ssm_plugin(response, ssm_parameters, profile, region, instance_id) 72 | except subprocess.CalledProcessError as e: 73 | logger.error(f'Error executing command: {e.cmd}. Return code: {e.returncode}. Output: {e.output}') 74 | 75 | return None 76 | 77 | 78 | def start_aws_ssm_plugin(create_session_response: dict, parameters: dict, profile: str, region: str, instance_id: str) \ 79 | -> Optional[subprocess.Popen]: 80 | """ 81 | Start the AWS SSM plugin to create a session and forward a local port to a remote port on an EC2 instance. 82 | 83 | Args: 84 | create_session_response: The response from creating an SSM session. 85 | parameters: The parameters for the SSM session. 86 | profile: The AWS CLI profile to be used for the SSM session. 87 | region: The AWS region where the instance is located. 88 | instance_id: The identifier of the EC2 instance to connect to. 89 | 90 | Returns: 91 | subprocess.Popen: The process for the SSM plugin command. 92 | 93 | Raises: 94 | subprocess.CalledProcessError: If there is an error executing the SSM plugin command. 95 | """ 96 | plugin_parameters = { 97 | 'Target': instance_id, 98 | 'DocumentName': 'AWS-StartPortForwardingSessionToRemoteHost', 99 | 'Parameters': parameters 100 | } 101 | 102 | command = [ 103 | 'session-manager-plugin', 104 | f"'{json.dumps(create_session_response)}'", 105 | region, 106 | 'StartSession', 107 | profile, 108 | f"'{json.dumps(plugin_parameters)}'", 109 | f'https://ssm.{region}.amazonaws.com' 110 | ] 111 | 112 | try: 113 | process = execute(' '.join(command)) 114 | return process 115 | except subprocess.CalledProcessError as e: 116 | logger.exception(f'Error executing command: {e.cmd} (return code: {e.returncode}) | Output: {e.output}') 117 | 118 | return None 119 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | 2 | # Contributor Covenant Code of Conduct 3 | 4 | ## Our Pledge 5 | 6 | We as members, contributors, and leaders pledge to make participation in our 7 | community a harassment-free experience for everyone, regardless of age, body 8 | size, visible or invisible disability, ethnicity, sex characteristics, gender 9 | identity and expression, level of experience, education, socio-economic status, 10 | nationality, personal appearance, race, caste, color, religion, or sexual 11 | identity and orientation. 12 | 13 | We pledge to act and interact in ways that contribute to an open, welcoming, 14 | diverse, inclusive, and healthy community. 15 | 16 | ## Our Standards 17 | 18 | Examples of behavior that contributes to a positive environment for our 19 | community include: 20 | 21 | * Demonstrating empathy and kindness toward other people 22 | * Being respectful of differing opinions, viewpoints, and experiences 23 | * Giving and gracefully accepting constructive feedback 24 | * Accepting responsibility and apologizing to those affected by our mistakes, 25 | and learning from the experience 26 | * Focusing on what is best not just for us as individuals, but for the overall 27 | community 28 | 29 | Examples of unacceptable behavior include: 30 | 31 | * The use of sexualized language or imagery, and sexual attention or advances of 32 | any kind 33 | * Trolling, insulting or derogatory comments, and personal or political attacks 34 | * Public or private harassment 35 | * Publishing others' private information, such as a physical or email address, 36 | without their explicit permission 37 | * Other conduct which could reasonably be considered inappropriate in a 38 | professional setting 39 | 40 | ## Enforcement Responsibilities 41 | 42 | Community leaders are responsible for clarifying and enforcing our standards of 43 | acceptable behavior and will take appropriate and fair corrective action in 44 | response to any behavior that they deem inappropriate, threatening, offensive, 45 | or harmful. 46 | 47 | Community leaders have the right and responsibility to remove, edit, or reject 48 | comments, commits, code, wiki edits, issues, and other contributions that are 49 | not aligned to this Code of Conduct, and will communicate reasons for moderation 50 | decisions when appropriate. 51 | 52 | ## Scope 53 | 54 | This Code of Conduct applies within all community spaces, and also applies when 55 | an individual is officially representing the community in public spaces. 56 | Examples of representing our community include using an official e-mail address, 57 | posting via an official social media account, or acting as an appointed 58 | representative at an online or offline event. 59 | 60 | ## Enforcement 61 | 62 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 63 | reported to the community leaders responsible for enforcement at 64 | [INSERT CONTACT METHOD]. 65 | All complaints will be reviewed and investigated promptly and fairly. 66 | 67 | All community leaders are obligated to respect the privacy and security of the 68 | reporter of any incident. 69 | 70 | ## Enforcement Guidelines 71 | 72 | Community leaders will follow these Community Impact Guidelines in determining 73 | the consequences for any action they deem in violation of this Code of Conduct: 74 | 75 | ### 1. Correction 76 | 77 | **Community Impact**: Use of inappropriate language or other behavior deemed 78 | unprofessional or unwelcome in the community. 79 | 80 | **Consequence**: A private, written warning from community leaders, providing 81 | clarity around the nature of the violation and an explanation of why the 82 | behavior was inappropriate. A public apology may be requested. 83 | 84 | ### 2. Warning 85 | 86 | **Community Impact**: A violation through a single incident or series of 87 | actions. 88 | 89 | **Consequence**: A warning with consequences for continued behavior. No 90 | interaction with the people involved, including unsolicited interaction with 91 | those enforcing the Code of Conduct, for a specified period of time. This 92 | includes avoiding interactions in community spaces as well as external channels 93 | like social media. Violating these terms may lead to a temporary or permanent 94 | ban. 95 | 96 | ### 3. Temporary Ban 97 | 98 | **Community Impact**: A serious violation of community standards, including 99 | sustained inappropriate behavior. 100 | 101 | **Consequence**: A temporary ban from any sort of interaction or public 102 | communication with the community for a specified period of time. No public or 103 | private interaction with the people involved, including unsolicited interaction 104 | with those enforcing the Code of Conduct, is allowed during this period. 105 | Violating these terms may lead to a permanent ban. 106 | 107 | ### 4. Permanent Ban 108 | 109 | **Community Impact**: Demonstrating a pattern of violation of community 110 | standards, including sustained inappropriate behavior, harassment of an 111 | individual, or aggression toward or disparagement of classes of individuals. 112 | 113 | **Consequence**: A permanent ban from any sort of public interaction within the 114 | community. 115 | 116 | ## Attribution 117 | 118 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 119 | version 2.1, available at 120 | [https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. 121 | 122 | Community Impact Guidelines were inspired by 123 | [Mozilla's code of conduct enforcement ladder][Mozilla CoC]. 124 | 125 | For answers to common questions about this code of conduct, see the FAQ at 126 | [https://www.contributor-covenant.org/faq][FAQ]. Translations are available at 127 | [https://www.contributor-covenant.org/translations][translations]. 128 | 129 | [homepage]: https://www.contributor-covenant.org 130 | [v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html 131 | [Mozilla CoC]: https://github.com/mozilla/diversity 132 | [FAQ]: https://www.contributor-covenant.org/faq 133 | [translations]: https://www.contributor-covenant.org/translations 134 | -------------------------------------------------------------------------------- /beam/selector.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | import aws_sso_lib 4 | import questionary 5 | import validators 6 | import yaml 7 | from questionary import Choice 8 | from rich import print # pylint: disable=redefined-builtin 9 | from rich.panel import Panel 10 | 11 | from beam.aws.utils import AwsOrganization 12 | from beam.config_loader import BeamConfig, BeamAwsConfig, BeamBastionConfig, BeamKubernetesConfig, BeamEksConfig, BeamRdsConfig 13 | 14 | AWS_REGIONS = [ 15 | 'us-east-1', 16 | 'us-east-2', 17 | 'us-west-1', 18 | 'us-west-2', 19 | 'ap-south-1', 20 | 'ap-south-2', 21 | 'ap-northeast-3', 22 | 'ap-northeast-2', 23 | 'ap-southeast-1', 24 | 'ap-southeast-2', 25 | 'ap-southeast-3', 26 | 'ap-southeast-4', 27 | 'ap-northeast-1', 28 | 'ap-east-1', 29 | 'ca-central-1', 30 | 'eu-central-1', 31 | 'eu-central-2', 32 | 'eu-west-1', 33 | 'eu-west-2', 34 | 'eu-west-3', 35 | 'eu-south-1', 36 | 'eu-south-2', 37 | 'eu-north-1', 38 | 'sa-east-1', 39 | 'me-central-1', 40 | 'il-central-1', 41 | 'af-south-1', 42 | 'me-south-1', 43 | ] 44 | 45 | 46 | def get_available_aws_regions(organization: AwsOrganization, role_name: str) -> list[str]: 47 | available_regions = organization.get_all_regions(role_name) 48 | return sorted(available_regions) or AWS_REGIONS 49 | 50 | 51 | def _validate_aws_sso_url(url: str) -> bool | str: 52 | if not validators.url(url): 53 | return 'Invalid URL' 54 | 55 | return True 56 | 57 | 58 | def ask_for_config(config_path: str, sso_url: str, sso_region: str) -> Optional[BeamConfig]: 59 | """ 60 | :param config_path: 61 | :param sso_url: 62 | :param sso_region: 63 | :return: BeamConfig 64 | """ 65 | 66 | if not (sso_url and sso_region): 67 | sso_url = questionary.text('What is your SSO URL?', validate=_validate_aws_sso_url).unsafe_ask() 68 | sso_region = questionary.select('What is your SSO region?', choices=AWS_REGIONS).unsafe_ask() 69 | 70 | aws_sso_lib.login(sso_url, sso_region) 71 | 72 | organization = AwsOrganization(sso_url, sso_region) 73 | available_aws_accounts = organization.get_accounts() 74 | 75 | aws_accounts = questionary.checkbox('What are your AWS accounts?', 76 | choices=[Choice(title=account.name, value=str(account.id)) for account in available_aws_accounts], 77 | validate=lambda x: True if bool(x) else 'Please select at least one account', 78 | ).unsafe_ask() 79 | 80 | all_available_aws_roles = {role[2] for account in available_aws_accounts for role in organization.get_all_roles(account)} 81 | print( 82 | 'Please choose your preferred Permission Set. ' 83 | '[bold bright_yellow]Notice that this Permission Set will be used to connect to all your accounts.[/bold bright_yellow]') 84 | aws_role = questionary.select('Permission Set', 85 | choices=all_available_aws_roles, 86 | ).unsafe_ask() 87 | 88 | aws_regions = questionary.checkbox('Please choose your regions', 89 | # choices=get_available_aws_regions(organization, aws_role), 90 | choices=AWS_REGIONS, 91 | validate=lambda x: True if bool(x) else 'Please select at least one region', 92 | ).unsafe_ask() 93 | 94 | aws = BeamAwsConfig(sso_url, sso_region, aws_role, aws_accounts, aws_regions) 95 | 96 | bastion_regex = questionary.text('What is the regex to detect your bastion?', '*bastion*').unsafe_ask() 97 | tags = dict(Name=bastion_regex) if bastion_regex else {} 98 | bastion = BeamBastionConfig(tags=tags) 99 | 100 | kubernetes_namespace = questionary.text('What is your preferred kubernetes namespace?', default='default').unsafe_ask() 101 | kubernetes = BeamKubernetesConfig(namespace=kubernetes_namespace) 102 | 103 | eks_enabled = questionary.confirm('Do you want to enable EKS?').unsafe_ask() 104 | eks = BeamEksConfig(enabled=eks_enabled) 105 | if eks_enabled: 106 | eks_regex = questionary.text('What is the regex to detect your EKS cluster? Leave empty to detect all').unsafe_ask() 107 | eks.tags = {'Name': eks_regex} if eks_regex else {} 108 | 109 | rds_enabled = questionary.confirm('Do you want to enable RDS?').unsafe_ask() 110 | rds = BeamRdsConfig(enabled=rds_enabled) 111 | if rds_enabled: 112 | rds_regex = questionary.text('What is the regex to detect your RDS instances? Leave empty to detect all').unsafe_ask() 113 | rds.tags = {'Name': rds_regex} if rds_regex else {} 114 | 115 | beam_config = BeamConfig(aws=aws, 116 | bastion=bastion, 117 | kubernetes=kubernetes, 118 | eks=eks, 119 | rds=rds, 120 | ) 121 | 122 | yaml_config = yaml.dump(beam_config.to_dict(), default_flow_style=False) 123 | 124 | yaml_config = '\n'.join([' ' + line for line in yaml_config.split('\n')]) # add \t before each line 125 | 126 | print('\n') 127 | print('\t[red]Please approve the following config:[/red]\n') 128 | print(Panel.fit(f'{yaml_config}', title=config_path, border_style='red')) 129 | print('[bold red]This will override your current config![/bold red]\n') 130 | 131 | approve = questionary.confirm('Approve?', 132 | auto_enter=False, 133 | default=False, 134 | style=questionary.Style([ 135 | ('question', 'fg:#cc5454 bold'), 136 | ]) 137 | ).unsafe_ask() 138 | if not approve: 139 | print('Doing nothing') 140 | return None 141 | 142 | return beam_config 143 | -------------------------------------------------------------------------------- /beam/utils.py: -------------------------------------------------------------------------------- 1 | import configparser 2 | import logging 3 | import os 4 | import platform 5 | import subprocess 6 | import urllib.request 7 | from pathlib import Path 8 | from typing import Optional 9 | 10 | import colorlog 11 | 12 | logger = logging.getLogger('beam') 13 | logger.setLevel(logging.DEBUG) 14 | 15 | # DEFAULT_FORMAT = '%(asctime)s %(log_color)s[%(levelname)s]%(reset)s [%(name)s] %(module)s:\t%(log_color)s%(message)s%(reset)s' 16 | DEFAULT_FORMAT = '%(log_color)s[%(levelname)s]%(reset)s [%(name)s] %(module)s: %(log_color)s%(message)s%(reset)s' 17 | 18 | console_handler = colorlog.StreamHandler() 19 | console_handler.setFormatter(colorlog.ColoredFormatter(DEFAULT_FORMAT, reset=True)) 20 | logger.addHandler(console_handler) 21 | 22 | 23 | def execute(command: str) -> subprocess.Popen: 24 | logger.debug(f'Executing command: {command}') 25 | process = subprocess.Popen(command, shell=True) 26 | return process 27 | 28 | 29 | def hash_val(input_string: str, siz: int = 1024) -> int: 30 | """Calculate the hash value of a string. 31 | 32 | Args: 33 | input_string (str): The string to be hashed. 34 | siz (int, optional): The size of the hash table. Defaults to 1024. 35 | 36 | Returns: 37 | int: The hash value of the string. 38 | """ 39 | hash_value = sum(ord(x) for x in input_string) 40 | return hash_value % siz 41 | 42 | 43 | def add_profile_to_aws_config(account_id: str, role: str, sso_url: str, default_region: str, sso_region: str, 44 | dont_override: bool = False, 45 | config_file_path: Optional[str] = None) -> str: 46 | """ 47 | Add a profile to the AWS config file. 48 | 49 | Args: 50 | account_id (str): The AWS account ID. 51 | role (str): The role name. 52 | sso_url (str): The SSO start URL. 53 | default_region (str): The default region. 54 | sso_region (str): The SSO region. 55 | dont_override (bool, optional): Whether to override an existing profile. Defaults to False. 56 | config_file_path (str, optional): The path to the AWS config file. Defaults to '~/.aws/config'. 57 | 58 | Returns: 59 | str: The profile name. 60 | """ 61 | config_file_path = config_file_path or os.path.join(str(Path.home()), '.aws', 'config') 62 | logger.debug(f'Adding profile to AWS config file: {account_id}-{role}') 63 | if not isinstance(account_id, str): 64 | raise TypeError("'account_id' must be a string.") 65 | if not isinstance(role, str): 66 | raise TypeError("'role' must be a string.") 67 | if not isinstance(sso_url, str): 68 | raise TypeError("'sso_url' must be a string.") 69 | if not isinstance(default_region, str): 70 | raise TypeError("'default_region' must be a string.") 71 | if not isinstance(sso_region, str): 72 | raise TypeError("'sso_region' must be a string.") 73 | 74 | if not os.path.exists(config_file_path): 75 | os.makedirs(os.path.dirname(config_file_path), exist_ok=True) 76 | 77 | parser = configparser.ConfigParser() 78 | parser.read(config_file_path) 79 | 80 | profile_name = f'{account_id}-{role}' 81 | section_name = f'profile {profile_name}' 82 | 83 | if section_name in parser.sections(): 84 | if dont_override: 85 | return profile_name 86 | else: 87 | parser.add_section(section_name) 88 | 89 | parser.set(section_name, 'sso_start_url', sso_url) 90 | parser.set(section_name, 'sso_region', sso_region) 91 | parser.set(section_name, 'sso_account_id', account_id) 92 | parser.set(section_name, 'sso_role_name', role) 93 | parser.set(section_name, 'region', default_region) 94 | parser.set(section_name, 'output', 'json') 95 | 96 | with open(config_file_path, 'w') as config_file: 97 | parser.write(config_file) 98 | 99 | return profile_name 100 | 101 | 102 | def get_home_directory() -> str: 103 | """Get the home directory. 104 | 105 | Returns: 106 | str: The home directory. 107 | """ 108 | system = platform.system() 109 | if system == 'Linux' and os.getenv('SUDO_USER'): 110 | return os.path.expanduser(f'~{os.getenv("SUDO_USER")}') 111 | return str(Path.home()) 112 | 113 | 114 | def get_username() -> str: 115 | system = platform.system() 116 | if system == 'Linux': 117 | if username := os.getenv('SUDO_USER') or os.getenv('USER') or os.getenv('USERNAME'): 118 | return username 119 | 120 | return os.getlogin() 121 | 122 | 123 | def validate_aws_installation() -> None: 124 | # pylint: disable=raise-missing-from 125 | logger.debug('Validating AWS CLI installation') 126 | try: 127 | aws_version = subprocess.check_output('aws --version', shell=True).decode('ascii').strip() 128 | except subprocess.CalledProcessError: 129 | raise Exception( 130 | 'Please install aws-cli: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html') 131 | 132 | if not aws_version.startswith('aws-cli/2.'): 133 | raise Exception('Please update aws-cli to version 2 or higher: ' 134 | 'https://docs.aws.amazon.com/cli/latest/userguide/cliv2-migration-instructions.html') 135 | elif int(aws_version.split('.')[1]) < 8: 136 | raise Exception('AWS cli version under 2.8 please update to the latest version: ' 137 | 'https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html') 138 | 139 | 140 | def validate_ssm_installation() -> None: 141 | # pylint: disable=raise-missing-from 142 | logger.debug('Validating SSM installation') 143 | try: 144 | ssm_plugin_output = subprocess.check_output('session-manager-plugin', shell=True).decode('ascii').strip() 145 | if not ssm_plugin_output.startswith('The Session Manager plugin was installed successfully'): 146 | raise Exception('Session Manager plugin not installed') 147 | except subprocess.CalledProcessError: 148 | raise Exception('Please install the Session Manager plugin for the AWS CLI: ' 149 | 'https://docs.aws.amazon.com/systems-manager/latest/userguide/session-manager-working-with-install-plugin.html') 150 | 151 | 152 | def validate_internet_connection() -> bool: 153 | logger.debug('Validating internet connection') 154 | try: 155 | urllib.request.urlopen('https://www.google.com') 156 | return True 157 | except Exception as e: 158 | raise Exception('No internet connection') from e 159 | 160 | 161 | def validate_prerequisites() -> None: 162 | validate_internet_connection() 163 | validate_aws_installation() 164 | validate_ssm_installation() 165 | -------------------------------------------------------------------------------- /beam/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | import functools 3 | import logging 4 | import os 5 | import sys 6 | from typing import Any, Callable 7 | 8 | import aws_sso_lib 9 | import click 10 | import yaml 11 | from click import Context 12 | from rich import print # pylint: disable=redefined-builtin 13 | from rich.align import Align 14 | from rich.panel import Panel 15 | from rich.pretty import Pretty 16 | 17 | from beam import settings, __version__ 18 | from beam.aws.utils import AwsOrganization 19 | from beam.config_loader import BeamConfig 20 | from beam.runner import BeamRunner 21 | from beam.selector import ask_for_config 22 | from beam.utils import logger, get_home_directory, validate_prerequisites 23 | 24 | DEFAULT_CONFIG_DIRECTORY = os.path.join(get_home_directory(), '.beam', 'config.yaml') 25 | 26 | 27 | def common_params(func: Callable) -> Callable: 28 | @click.option('--debug', '--verbose', default=False, help='Print debug logs', is_flag=True) 29 | @functools.wraps(func) 30 | def wrapper(*args, **kwargs) -> Any: # type: ignore 31 | if 'debug' in kwargs: 32 | if kwargs.get('debug', False): 33 | logger.setLevel(logging.DEBUG) 34 | settings.debug = True 35 | 36 | else: 37 | logger.setLevel(logging.WARNING) 38 | kwargs.pop('debug') 39 | 40 | return func(*args, **kwargs) 41 | 42 | return wrapper 43 | 44 | 45 | @click.group(invoke_without_command=True) 46 | @click.option('--version', help='Show version', is_flag=True) 47 | @click.pass_context 48 | def cli(ctx: Context = None, version: bool = False) -> None: # pylint: disable=redefined-outer-name 49 | if version: 50 | print_version() 51 | return 52 | 53 | if ctx.invoked_subcommand is None: 54 | ctx = click.get_current_context() 55 | click.echo(ctx.get_help()) 56 | ctx.exit() 57 | 58 | 59 | @cli.command() 60 | @click.option('--config', '-c', default=DEFAULT_CONFIG_DIRECTORY, help='Path to config file to generate') 61 | @click.option('--sso-url', '-c', help='AWS SSO URL') 62 | @click.option('--sso-region', '-c', help='AWS SSO Region') 63 | @common_params 64 | def configure(config: str, sso_url: str, sso_region: str) -> None: 65 | validate_prerequisites_and_exit() 66 | 67 | config = os.path.realpath(config) 68 | try: 69 | beam_config = ask_for_config(config_path=config, sso_url=sso_url, sso_region=sso_region) 70 | except KeyboardInterrupt: 71 | return 72 | 73 | if not beam_config: 74 | return 75 | 76 | os.makedirs(os.path.dirname(config), exist_ok=True) 77 | with open(config, 'w') as file: 78 | yaml.dump(beam_config.to_dict(), file) 79 | 80 | print(f'[bold green]:heavy_check_mark:[/bold green] Config saved to [bold italic bright_cyan]{config}[/bold italic bright_cyan]\n') 81 | 82 | 83 | @cli.command() 84 | @click.option('--config', '-c', default=DEFAULT_CONFIG_DIRECTORY, help='Path to config file') 85 | @click.option('--force-scan', '-f', default=False, help='Force scan of all accounts', is_flag=True) 86 | @click.option('--eks/--no-eks', default=True, help='Connect to EKS clusters') 87 | @click.option('--rds/--no-rds', default=True, help='Connect to RDS clusters') 88 | @common_params 89 | def run(config: str, force_scan: bool, eks: bool, rds: bool) -> None: 90 | validate_prerequisites_and_exit() 91 | 92 | config = os.path.realpath(config) 93 | print(Panel( 94 | Align.center( 95 | '[bold yellow3]Beam[/bold yellow3] by [bold magenta]Entitle[/bold magenta] :comet:' 96 | ), border_style='yellow3')) 97 | 98 | # TODO: Create custom Command class and manage logs from there, support multi log levels 99 | # see here https://github.com/pallets/click/issues/66#issuecomment-674322963 100 | 101 | try: 102 | with open(config, 'r') as file: 103 | config_dict = yaml.safe_load(file) 104 | beam_config = BeamConfig.from_dict(config_dict) 105 | except FileNotFoundError: 106 | logger.error(f'Config file not found: {config}', exc_info=True if settings.debug else None) 107 | return 108 | except Exception as e: 109 | logger.error(f'Failed to load Beam config: {e}', exc_info=True if settings.debug else None) 110 | return 111 | 112 | if settings.debug: 113 | print(Panel.fit(Pretty(beam_config), title=config, border_style='white')) 114 | 115 | organization = AwsOrganization(beam_config.aws.sso_url, beam_config.aws.sso_region) 116 | permission_set = beam_config.aws.role # TODO: ADD TO SELECTOR TO SELECT PS FOR EACH ACCOUNT OR ALL OF THEM 117 | 118 | aws_sso_lib.login(beam_config.aws.sso_url, beam_config.aws.sso_region) 119 | 120 | beam_runner = BeamRunner(beam_config, config, organization, permission_set) 121 | 122 | if force_scan or not beam_config.bastions: 123 | bastions = beam_runner.scan_resources() 124 | else: 125 | bastions = beam_config.bastions 126 | 127 | # print endpoints 128 | rds_endpoints = [f'{rds_instance.endpoint}:{rds_instance.local_port}' for bastion in bastions for rds_instance in bastion.rds_instances] 129 | endpoints_str = '\n'.join([f'[bold green]{endpoint}[/bold green]' for endpoint in rds_endpoints]) 130 | print(Panel.fit(f'{endpoints_str}', title='RDS (Database) Endpoints', border_style='green', padding=(1, 2, 1, 2))) 131 | 132 | processes = beam_runner.connect_to_resources(bastions, eks, rds) 133 | 134 | logger.debug('Finished Beam') 135 | 136 | try: 137 | [process.wait() for process in processes] 138 | except KeyboardInterrupt: 139 | logger.debug('Exiting Beam') 140 | for process in processes: 141 | process.kill() 142 | 143 | 144 | @cli.command() 145 | @common_params 146 | def stop() -> None: 147 | pass 148 | 149 | 150 | @cli.command() 151 | def version() -> None: 152 | print_version() 153 | 154 | 155 | def print_version() -> None: 156 | print(f'[bold yellow3]Beam[/bold yellow3] [white]{__version__}[/white] by [bold magenta]Entitle[/bold magenta] :comet:') 157 | 158 | 159 | def validate_prerequisites_and_exit() -> None: 160 | try: 161 | validate_prerequisites() 162 | except Exception as e: 163 | if settings.debug: 164 | logger.exception(f'Failed to validate prerequisites: {e}') 165 | else: 166 | print(f'[bold red]Error:[/bold red] {e}') 167 | sys.exit(1) 168 | 169 | 170 | def setup_yaml() -> None: 171 | def represent_none(self, _) -> Any: # type: ignore 172 | return self.represent_scalar('tag:yaml.org,2002:null', '') 173 | 174 | yaml.add_representer(type(None), represent_none) 175 | 176 | 177 | def main() -> None: 178 | setup_yaml() 179 | cli() 180 | 181 | 182 | if __name__ == '__main__': 183 | cli() 184 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /beam/aws/utils.py: -------------------------------------------------------------------------------- 1 | import concurrent 2 | import fnmatch 3 | from concurrent.futures import ThreadPoolExecutor 4 | from typing import List, Optional 5 | 6 | import aws_sso_lib 7 | import boto3 8 | 9 | from beam.aws.bastion import AwsBastion 10 | from beam.aws.models import AwsEksInstance, Boto3SessionConfig, AwsAccount, AwsRdsInstance 11 | from beam.config_loader import BeamConfig 12 | from beam.utils import add_profile_to_aws_config, logger 13 | 14 | 15 | class AwsOrganization: 16 | def __init__(self, sso_start_url: str, sso_region: str) -> None: 17 | self.sso_start_url = sso_start_url 18 | self.sso_region = sso_region 19 | self.accounts: list[tuple[str, str]] = [] 20 | 21 | def get_accounts(self) -> list[AwsAccount]: 22 | self.accounts = list(aws_sso_lib.list_available_accounts(self.sso_start_url, self.sso_region)) 23 | logger.debug(f'Found {len(self.accounts)} accounts: {self.accounts}') 24 | return [AwsAccount(account[0], account[1]) for account in self.accounts] 25 | 26 | def get_all_roles(self, account: AwsAccount) -> list[tuple[str, str, str]]: 27 | account_id = account.id 28 | roles = list(aws_sso_lib.list_available_roles(self.sso_start_url, self.sso_region, account_id)) 29 | logger.debug(f'Found {len(roles)} roles in account {account_id}: {roles}') 30 | return roles 31 | 32 | def get_session(self, account_id: str, permission_set_name: str, region: str) -> boto3.Session: 33 | return aws_sso_lib.get_boto3_session(self.sso_start_url, account_id=account_id, 34 | role_name=permission_set_name, 35 | sso_region=self.sso_region, region=region) 36 | 37 | def process_account(self, account: AwsAccount, role: str, beam_config: BeamConfig) -> list[AwsBastion]: 38 | logger.info(f'Processing account {account.id}') 39 | account_id = account.id 40 | session = aws_sso_lib.get_boto3_session(self.sso_start_url, account_id=account_id, 41 | role_name=role, 42 | sso_region=self.sso_region, region=self.sso_region) 43 | add_profile_to_aws_config(account_id, role, self.sso_start_url, session.region_name, session.region_name, False) 44 | 45 | bastions = [] 46 | 47 | for region in beam_config.aws.regions: 48 | session_config = Boto3SessionConfig(account_id, self.sso_start_url, self.sso_region, role, region) 49 | 50 | with ThreadPoolExecutor(max_workers=10) as executor: 51 | futures = [executor.submit(process_region, session_config, region, beam_config)] 52 | for future in concurrent.futures.as_completed(futures): 53 | if future.result(): 54 | bastions.extend(future.result()) 55 | 56 | return bastions 57 | 58 | def get_all_regions(self, permission_set_name: str) -> list[str]: 59 | regions: set[str] = set() 60 | for account in self.get_accounts(): 61 | session = aws_sso_lib.get_boto3_session(self.sso_start_url, self.sso_region, account.id, permission_set_name, region=self.sso_region) 62 | ec2_client = session.client('ec2') 63 | logger.debug(f'Retrieving AWS regions for account {account.id}') 64 | 65 | try: 66 | response = ec2_client.describe_regions() 67 | regions.update({region['RegionName'] for region in response['Regions']}) 68 | except Exception as e: 69 | logger.exception(f'An error occurred while retrieving AWS regions: {e}') 70 | 71 | return list(regions) 72 | 73 | 74 | def get_all_eks_clusters(session: boto3.Session, tags: Optional[dict[str, str]] = None) -> List[AwsEksInstance]: 75 | """ 76 | Retrieves a list of all EKS clusters in the account. 77 | :param session: boto3 session 78 | :param tags: tags to match, 'Name' can be used with wildcards 79 | :return: list of eks clusters 80 | """ 81 | tags = tags or {} 82 | client = session.client('eks') 83 | paginator = client.get_paginator('list_clusters') 84 | response_iterator = paginator.paginate() 85 | cluster_names = [] 86 | for response in response_iterator: 87 | cluster_names.extend(response['clusters']) 88 | 89 | eks_list = [] 90 | 91 | for cluster_name in cluster_names: 92 | try: 93 | response = client.describe_cluster(name=cluster_name) 94 | cluster = response['cluster'] 95 | 96 | if not match_tags(cluster['tags'], tags): 97 | continue 98 | 99 | eks_list.append(AwsEksInstance(cluster['name'], cluster['endpoint'], cluster['arn'], cluster['resourcesVpcConfig']['vpcId'])) 100 | except (client.exceptions.ResourceNotFoundException, client.exceptions.InvalidParameterException): 101 | logger.exception(f'Error describing cluster {cluster_name}') 102 | except Exception as e: 103 | logger.exception( 104 | f'Error describing cluster {cluster_name}') # decide whether to continue or stop execution based on the type of exception 105 | raise e 106 | 107 | return eks_list 108 | 109 | 110 | def get_all_rds_resources(session: boto3.Session, tags: Optional[dict[str, str]] = None) -> list[AwsRdsInstance]: 111 | """ 112 | Retrieves all RDS resources (instances and clusters) 113 | :param session: boto3 session 114 | :param tags: tags to match, 'Name' can be used with wildcards 115 | """ 116 | tags = tags or {} 117 | client = session.client('rds') 118 | 119 | # Retrieve instances using paginator 120 | paginator = client.get_paginator('describe_db_instances') 121 | response_iterator = paginator.paginate() 122 | instances = [instance for response in response_iterator for instance in response['DBInstances']] 123 | 124 | # Retrieve clusters using paginator 125 | paginator = client.get_paginator('describe_db_clusters') 126 | response_iterator = paginator.paginate() 127 | clusters = [cluster for response in response_iterator for cluster in response['DBClusters']] 128 | 129 | # Filter resources based on status 130 | available_instances = [i for i in instances if i['DBInstanceStatus'] == 'available'] 131 | available_clusters = [c for c in clusters if c['Status'] == 'available'] 132 | 133 | # Create a list of dictionaries for instances and clusters 134 | instance_resources: list[AwsRdsInstance] = [] 135 | 136 | name_regex = tags.pop('Name', None) 137 | 138 | for instance in available_instances: 139 | # apply user filtering 140 | if name_regex: 141 | if not fnmatch.fnmatch(instance['DBInstanceIdentifier'], name_regex): 142 | continue 143 | if not match_key_value_tags(instance['TagList'], tags): 144 | continue 145 | instance_resources.append(AwsRdsInstance(instance['DBInstanceIdentifier'], 146 | instance['Endpoint']['Address'], 147 | int(instance['Endpoint']['Port']), instance['DBSubnetGroup']['VpcId']) 148 | ) 149 | cluster_resources = [] 150 | for cluster in available_clusters: 151 | # apply user filtering 152 | if name_regex: 153 | if not fnmatch.fnmatch(cluster['DBClusterIdentifier'], name_regex): 154 | continue 155 | 156 | if not match_tags(cluster['TagList'], tags): 157 | continue 158 | 159 | cluster_resources.append(AwsRdsInstance(cluster['DBClusterIdentifier'], 160 | cluster['Endpoint'], 161 | int(cluster['Port'])) 162 | ) 163 | 164 | # Return the combined list of resources 165 | return instance_resources + cluster_resources 166 | 167 | 168 | def get_matching_ec2_instance(session: boto3.Session, session_config: Boto3SessionConfig, 169 | name_regex: Optional[str], 170 | filter_tags: Optional[dict[str, str]] = None) -> list[AwsBastion]: 171 | """ 172 | Retrieves a list of EC2 instances that match the given criteria. 173 | 174 | :param session: An instance of boto3.session.Session. 175 | :param session_config: An instance of Boto3SessionConfig. 176 | :param name_regex: A string representing the regular expression for matching instance names. 177 | :param filter_tags: A dictionary of tags to filter by. 178 | :returns: A list of bastion instances that match the given criteria. 179 | """ 180 | # TODO: should support multi-bastion per-region (e.g. for multiple VPCs) 181 | filter_tags = filter_tags or {} 182 | 183 | bastions = [] 184 | try: 185 | ec2_client = session.resource('ec2', region_name=session_config.region) 186 | filters = [{'Name': 'tag:' + tag_key, 'Values': [tag_value]} for tag_key, tag_value in filter_tags.items()] 187 | filters.append({'Name': 'instance-state-name', 'Values': ['running']}) # running instances only 188 | instances = list(ec2_client.instances.filter(Filters=filters)) 189 | 190 | for instance in instances: 191 | logger.debug(f'Found EC2 instance: {instance.instance_id}') 192 | instance_tags = {tag['Key']: tag['Value'] for tag in instance.tags} 193 | instance_name = instance_tags.get('Name', '') 194 | 195 | if not fnmatch.fnmatchcase(instance_name, name_regex): 196 | continue 197 | 198 | logger.debug(f'EC2 instance matched: {instance_name}') 199 | 200 | bastions.append( 201 | AwsBastion( 202 | boto3_session_config=session_config, 203 | instance_id=instance.instance_id, name=instance_name, vpc_id=instance.vpc_id) 204 | ) 205 | except Exception as e: 206 | logger.exception(f'Could not retrieve EC2 instances in region {session_config.region}: {e}') 207 | 208 | return bastions 209 | 210 | 211 | def get_profile_name(account_id: str, permission_set_name: str) -> str: 212 | return f'{account_id}-{permission_set_name}' 213 | 214 | 215 | def process_region(session_config: Boto3SessionConfig, region: str, beam_config: BeamConfig) -> list[AwsBastion]: 216 | logger.info(f'Processing account {session_config.account_id} in region {region}') 217 | boto3_session = session_config.get_session() 218 | 219 | ekss = get_all_eks_clusters(boto3_session, beam_config.eks.tags) 220 | rdss = get_all_rds_resources(boto3_session, beam_config.rds.tags) 221 | region_bastions = get_matching_ec2_instance(boto3_session, session_config, beam_config.bastion.name, beam_config.bastion.other_tags) 222 | 223 | if not region_bastions: 224 | return [] 225 | 226 | for bastion in region_bastions: 227 | for eks in ekss: 228 | if eks.vpc_id == bastion.vpc_id: 229 | bastion.add_eks_instance(eks) 230 | 231 | for rds in rdss: 232 | if rds.vpc_id == bastion.vpc_id: 233 | bastion.add_rds_instance(rds) 234 | 235 | return region_bastions 236 | 237 | 238 | def match_key_value_tags(actual_tags: list, desired_tags: dict) -> bool: 239 | for expected_tag_key, expected_tag_value in desired_tags.items(): 240 | if not any(actual_tag['Key'] == expected_tag_key and actual_tag['Value'] == expected_tag_value for actual_tag in actual_tags): 241 | return False 242 | 243 | return True 244 | 245 | 246 | def match_tags(actual_tags: dict[str, str], desired_tags: dict[str, str]) -> bool: 247 | if name_regex := desired_tags.get('Name'): 248 | if not fnmatch.fnmatchcase(actual_tags.get('Name', ''), name_regex): 249 | return False 250 | desired_tags.pop('Name') 251 | 252 | for expected_tag_key, expected_tag_value in desired_tags.items(): 253 | if not actual_tags.get(expected_tag_key) == expected_tag_value: 254 | return False 255 | 256 | return True 257 | -------------------------------------------------------------------------------- /beam/_version.py: -------------------------------------------------------------------------------- 1 | 2 | # This file helps to compute a version number in source trees obtained from 3 | # git-archive tarball (such as those provided by githubs download-from-tag 4 | # feature). Distribution tarballs (built by setup.py sdist) and build 5 | # directories (produced by setup.py build) will contain a much shorter file 6 | # that just contains the computed version number. 7 | 8 | # This file is released into the public domain. 9 | # Generated by versioneer-0.29 10 | # https://github.com/python-versioneer/python-versioneer 11 | 12 | """Git implementation of _version.py.""" 13 | 14 | import errno 15 | import os 16 | import re 17 | import subprocess 18 | import sys 19 | from typing import Any, Callable, Dict, List, Optional, Tuple 20 | import functools 21 | 22 | 23 | def get_keywords() -> Dict[str, str]: 24 | """Get the keywords needed to look up the version information.""" 25 | # these strings will be replaced by git during git-archive. 26 | # setup.py/versioneer.py will grep for the variable names, so they must 27 | # each be defined on a line of their own. _version.py will just call 28 | # get_keywords(). 29 | git_refnames = " (HEAD -> master, tag: 0.1.3)" 30 | git_full = "83ee6163a3ffcc87aff8aad6c74d95766b1b58eb" 31 | git_date = "2023-11-19 16:19:25 +0200" 32 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 33 | return keywords 34 | 35 | 36 | class VersioneerConfig: 37 | """Container for Versioneer configuration parameters.""" 38 | 39 | VCS: str 40 | style: str 41 | tag_prefix: str 42 | parentdir_prefix: str 43 | versionfile_source: str 44 | verbose: bool 45 | 46 | 47 | def get_config() -> VersioneerConfig: 48 | """Create, populate and return the VersioneerConfig() object.""" 49 | # these strings are filled in when 'setup.py versioneer' creates 50 | # _version.py 51 | cfg = VersioneerConfig() 52 | cfg.VCS = "git" 53 | cfg.style = "pep440" 54 | cfg.tag_prefix = "" 55 | cfg.parentdir_prefix = "" 56 | cfg.versionfile_source = "beam/_version.py" 57 | cfg.verbose = False 58 | return cfg 59 | 60 | 61 | class NotThisMethod(Exception): 62 | """Exception raised if a method is not valid for the current scenario.""" 63 | 64 | 65 | LONG_VERSION_PY: Dict[str, str] = {} 66 | HANDLERS: Dict[str, Dict[str, Callable]] = {} 67 | 68 | 69 | def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator 70 | """Create decorator to mark a method as the handler of a VCS.""" 71 | def decorate(f: Callable) -> Callable: 72 | """Store f in HANDLERS[vcs][method].""" 73 | if vcs not in HANDLERS: 74 | HANDLERS[vcs] = {} 75 | HANDLERS[vcs][method] = f 76 | return f 77 | return decorate 78 | 79 | 80 | def run_command( 81 | commands: List[str], 82 | args: List[str], 83 | cwd: Optional[str] = None, 84 | verbose: bool = False, 85 | hide_stderr: bool = False, 86 | env: Optional[Dict[str, str]] = None, 87 | ) -> Tuple[Optional[str], Optional[int]]: 88 | """Call the given command(s).""" 89 | assert isinstance(commands, list) 90 | process = None 91 | 92 | popen_kwargs: Dict[str, Any] = {} 93 | if sys.platform == "win32": 94 | # This hides the console window if pythonw.exe is used 95 | startupinfo = subprocess.STARTUPINFO() 96 | startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW 97 | popen_kwargs["startupinfo"] = startupinfo 98 | 99 | for command in commands: 100 | try: 101 | dispcmd = str([command] + args) 102 | # remember shell=False, so use git.cmd on windows, not just git 103 | process = subprocess.Popen([command] + args, cwd=cwd, env=env, 104 | stdout=subprocess.PIPE, 105 | stderr=(subprocess.PIPE if hide_stderr 106 | else None), **popen_kwargs) 107 | break 108 | except OSError as e: 109 | if e.errno == errno.ENOENT: 110 | continue 111 | if verbose: 112 | print("unable to run %s" % dispcmd) 113 | print(e) 114 | return None, None 115 | else: 116 | if verbose: 117 | print("unable to find command, tried %s" % (commands,)) 118 | return None, None 119 | stdout = process.communicate()[0].strip().decode() 120 | if process.returncode != 0: 121 | if verbose: 122 | print("unable to run %s (error)" % dispcmd) 123 | print("stdout was %s" % stdout) 124 | return None, process.returncode 125 | return stdout, process.returncode 126 | 127 | 128 | def versions_from_parentdir( 129 | parentdir_prefix: str, 130 | root: str, 131 | verbose: bool, 132 | ) -> Dict[str, Any]: 133 | """Try to determine the version from the parent directory name. 134 | 135 | Source tarballs conventionally unpack into a directory that includes both 136 | the project name and a version string. We will also support searching up 137 | two directory levels for an appropriately named parent directory 138 | """ 139 | rootdirs = [] 140 | 141 | for _ in range(3): 142 | dirname = os.path.basename(root) 143 | if dirname.startswith(parentdir_prefix): 144 | return {"version": dirname[len(parentdir_prefix):], 145 | "full-revisionid": None, 146 | "dirty": False, "error": None, "date": None} 147 | rootdirs.append(root) 148 | root = os.path.dirname(root) # up a level 149 | 150 | if verbose: 151 | print("Tried directories %s but none started with prefix %s" % 152 | (str(rootdirs), parentdir_prefix)) 153 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 154 | 155 | 156 | @register_vcs_handler("git", "get_keywords") 157 | def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: 158 | """Extract version information from the given file.""" 159 | # the code embedded in _version.py can just fetch the value of these 160 | # keywords. When used from setup.py, we don't want to import _version.py, 161 | # so we do it with a regexp instead. This function is not used from 162 | # _version.py. 163 | keywords: Dict[str, str] = {} 164 | try: 165 | with open(versionfile_abs, "r") as fobj: 166 | for line in fobj: 167 | if line.strip().startswith("git_refnames ="): 168 | mo = re.search(r'=\s*"(.*)"', line) 169 | if mo: 170 | keywords["refnames"] = mo.group(1) 171 | if line.strip().startswith("git_full ="): 172 | mo = re.search(r'=\s*"(.*)"', line) 173 | if mo: 174 | keywords["full"] = mo.group(1) 175 | if line.strip().startswith("git_date ="): 176 | mo = re.search(r'=\s*"(.*)"', line) 177 | if mo: 178 | keywords["date"] = mo.group(1) 179 | except OSError: 180 | pass 181 | return keywords 182 | 183 | 184 | @register_vcs_handler("git", "keywords") 185 | def git_versions_from_keywords( 186 | keywords: Dict[str, str], 187 | tag_prefix: str, 188 | verbose: bool, 189 | ) -> Dict[str, Any]: 190 | """Get version information from git keywords.""" 191 | if "refnames" not in keywords: 192 | raise NotThisMethod("Short version file found") 193 | date = keywords.get("date") 194 | if date is not None: 195 | # Use only the last line. Previous lines may contain GPG signature 196 | # information. 197 | date = date.splitlines()[-1] 198 | 199 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 200 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 201 | # -like" string, which we must then edit to make compliant), because 202 | # it's been around since git-1.5.3, and it's too difficult to 203 | # discover which version we're using, or to work around using an 204 | # older one. 205 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 206 | refnames = keywords["refnames"].strip() 207 | if refnames.startswith("$Format"): 208 | if verbose: 209 | print("keywords are unexpanded, not using") 210 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 211 | refs = {r.strip() for r in refnames.strip("()").split(",")} 212 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 213 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 214 | TAG = "tag: " 215 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} 216 | if not tags: 217 | # Either we're using git < 1.8.3, or there really are no tags. We use 218 | # a heuristic: assume all version tags have a digit. The old git %d 219 | # expansion behaves like git log --decorate=short and strips out the 220 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 221 | # between branches and tags. By ignoring refnames without digits, we 222 | # filter out many common branch names like "release" and 223 | # "stabilization", as well as "HEAD" and "master". 224 | tags = {r for r in refs if re.search(r'\d', r)} 225 | if verbose: 226 | print("discarding '%s', no digits" % ",".join(refs - tags)) 227 | if verbose: 228 | print("likely tags: %s" % ",".join(sorted(tags))) 229 | for ref in sorted(tags): 230 | # sorting will prefer e.g. "2.0" over "2.0rc1" 231 | if ref.startswith(tag_prefix): 232 | r = ref[len(tag_prefix):] 233 | # Filter out refs that exactly match prefix or that don't start 234 | # with a number once the prefix is stripped (mostly a concern 235 | # when prefix is '') 236 | if not re.match(r'\d', r): 237 | continue 238 | if verbose: 239 | print("picking %s" % r) 240 | return {"version": r, 241 | "full-revisionid": keywords["full"].strip(), 242 | "dirty": False, "error": None, 243 | "date": date} 244 | # no suitable tags, so version is "0+unknown", but full hex is still there 245 | if verbose: 246 | print("no suitable tags, using unknown + full revision id") 247 | return {"version": "0+unknown", 248 | "full-revisionid": keywords["full"].strip(), 249 | "dirty": False, "error": "no suitable tags", "date": None} 250 | 251 | 252 | @register_vcs_handler("git", "pieces_from_vcs") 253 | def git_pieces_from_vcs( 254 | tag_prefix: str, 255 | root: str, 256 | verbose: bool, 257 | runner: Callable = run_command 258 | ) -> Dict[str, Any]: 259 | """Get version from 'git describe' in the root of the source tree. 260 | 261 | This only gets called if the git-archive 'subst' keywords were *not* 262 | expanded, and _version.py hasn't already been rewritten with a short 263 | version string, meaning we're inside a checked out source tree. 264 | """ 265 | GITS = ["git"] 266 | if sys.platform == "win32": 267 | GITS = ["git.cmd", "git.exe"] 268 | 269 | # GIT_DIR can interfere with correct operation of Versioneer. 270 | # It may be intended to be passed to the Versioneer-versioned project, 271 | # but that should not change where we get our version from. 272 | env = os.environ.copy() 273 | env.pop("GIT_DIR", None) 274 | runner = functools.partial(runner, env=env) 275 | 276 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, 277 | hide_stderr=not verbose) 278 | if rc != 0: 279 | if verbose: 280 | print("Directory %s not under git control" % root) 281 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 282 | 283 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 284 | # if there isn't one, this yields HEX[-dirty] (no NUM) 285 | describe_out, rc = runner(GITS, [ 286 | "describe", "--tags", "--dirty", "--always", "--long", 287 | "--match", f"{tag_prefix}[[:digit:]]*" 288 | ], cwd=root) 289 | # --long was added in git-1.5.5 290 | if describe_out is None: 291 | raise NotThisMethod("'git describe' failed") 292 | describe_out = describe_out.strip() 293 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) 294 | if full_out is None: 295 | raise NotThisMethod("'git rev-parse' failed") 296 | full_out = full_out.strip() 297 | 298 | pieces: Dict[str, Any] = {} 299 | pieces["long"] = full_out 300 | pieces["short"] = full_out[:7] # maybe improved later 301 | pieces["error"] = None 302 | 303 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], 304 | cwd=root) 305 | # --abbrev-ref was added in git-1.6.3 306 | if rc != 0 or branch_name is None: 307 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") 308 | branch_name = branch_name.strip() 309 | 310 | if branch_name == "HEAD": 311 | # If we aren't exactly on a branch, pick a branch which represents 312 | # the current commit. If all else fails, we are on a branchless 313 | # commit. 314 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) 315 | # --contains was added in git-1.5.4 316 | if rc != 0 or branches is None: 317 | raise NotThisMethod("'git branch --contains' returned error") 318 | branches = branches.split("\n") 319 | 320 | # Remove the first line if we're running detached 321 | if "(" in branches[0]: 322 | branches.pop(0) 323 | 324 | # Strip off the leading "* " from the list of branches. 325 | branches = [branch[2:] for branch in branches] 326 | if "master" in branches: 327 | branch_name = "master" 328 | elif not branches: 329 | branch_name = None 330 | else: 331 | # Pick the first branch that is returned. Good or bad. 332 | branch_name = branches[0] 333 | 334 | pieces["branch"] = branch_name 335 | 336 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 337 | # TAG might have hyphens. 338 | git_describe = describe_out 339 | 340 | # look for -dirty suffix 341 | dirty = git_describe.endswith("-dirty") 342 | pieces["dirty"] = dirty 343 | if dirty: 344 | git_describe = git_describe[:git_describe.rindex("-dirty")] 345 | 346 | # now we have TAG-NUM-gHEX or HEX 347 | 348 | if "-" in git_describe: 349 | # TAG-NUM-gHEX 350 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 351 | if not mo: 352 | # unparsable. Maybe git-describe is misbehaving? 353 | pieces["error"] = ("unable to parse git-describe output: '%s'" 354 | % describe_out) 355 | return pieces 356 | 357 | # tag 358 | full_tag = mo.group(1) 359 | if not full_tag.startswith(tag_prefix): 360 | if verbose: 361 | fmt = "tag '%s' doesn't start with prefix '%s'" 362 | print(fmt % (full_tag, tag_prefix)) 363 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 364 | % (full_tag, tag_prefix)) 365 | return pieces 366 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 367 | 368 | # distance: number of commits since tag 369 | pieces["distance"] = int(mo.group(2)) 370 | 371 | # commit: short hex revision ID 372 | pieces["short"] = mo.group(3) 373 | 374 | else: 375 | # HEX: no tags 376 | pieces["closest-tag"] = None 377 | out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) 378 | pieces["distance"] = len(out.split()) # total number of commits 379 | 380 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 381 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() 382 | # Use only the last line. Previous lines may contain GPG signature 383 | # information. 384 | date = date.splitlines()[-1] 385 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 386 | 387 | return pieces 388 | 389 | 390 | def plus_or_dot(pieces: Dict[str, Any]) -> str: 391 | """Return a + if we don't already have one, else return a .""" 392 | if "+" in pieces.get("closest-tag", ""): 393 | return "." 394 | return "+" 395 | 396 | 397 | def render_pep440(pieces: Dict[str, Any]) -> str: 398 | """Build up version string, with post-release "local version identifier". 399 | 400 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 401 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 402 | 403 | Exceptions: 404 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 405 | """ 406 | if pieces["closest-tag"]: 407 | rendered = pieces["closest-tag"] 408 | if pieces["distance"] or pieces["dirty"]: 409 | rendered += plus_or_dot(pieces) 410 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 411 | if pieces["dirty"]: 412 | rendered += ".dirty" 413 | else: 414 | # exception #1 415 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 416 | pieces["short"]) 417 | if pieces["dirty"]: 418 | rendered += ".dirty" 419 | return rendered 420 | 421 | 422 | def render_pep440_branch(pieces: Dict[str, Any]) -> str: 423 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . 424 | 425 | The ".dev0" means not master branch. Note that .dev0 sorts backwards 426 | (a feature branch will appear "older" than the master branch). 427 | 428 | Exceptions: 429 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] 430 | """ 431 | if pieces["closest-tag"]: 432 | rendered = pieces["closest-tag"] 433 | if pieces["distance"] or pieces["dirty"]: 434 | if pieces["branch"] != "master": 435 | rendered += ".dev0" 436 | rendered += plus_or_dot(pieces) 437 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 438 | if pieces["dirty"]: 439 | rendered += ".dirty" 440 | else: 441 | # exception #1 442 | rendered = "0" 443 | if pieces["branch"] != "master": 444 | rendered += ".dev0" 445 | rendered += "+untagged.%d.g%s" % (pieces["distance"], 446 | pieces["short"]) 447 | if pieces["dirty"]: 448 | rendered += ".dirty" 449 | return rendered 450 | 451 | 452 | def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: 453 | """Split pep440 version string at the post-release segment. 454 | 455 | Returns the release segments before the post-release and the 456 | post-release version number (or -1 if no post-release segment is present). 457 | """ 458 | vc = str.split(ver, ".post") 459 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None 460 | 461 | 462 | def render_pep440_pre(pieces: Dict[str, Any]) -> str: 463 | """TAG[.postN.devDISTANCE] -- No -dirty. 464 | 465 | Exceptions: 466 | 1: no tags. 0.post0.devDISTANCE 467 | """ 468 | if pieces["closest-tag"]: 469 | if pieces["distance"]: 470 | # update the post release segment 471 | tag_version, post_version = pep440_split_post(pieces["closest-tag"]) 472 | rendered = tag_version 473 | if post_version is not None: 474 | rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) 475 | else: 476 | rendered += ".post0.dev%d" % (pieces["distance"]) 477 | else: 478 | # no commits, use the tag as the version 479 | rendered = pieces["closest-tag"] 480 | else: 481 | # exception #1 482 | rendered = "0.post0.dev%d" % pieces["distance"] 483 | return rendered 484 | 485 | 486 | def render_pep440_post(pieces: Dict[str, Any]) -> str: 487 | """TAG[.postDISTANCE[.dev0]+gHEX] . 488 | 489 | The ".dev0" means dirty. Note that .dev0 sorts backwards 490 | (a dirty tree will appear "older" than the corresponding clean one), 491 | but you shouldn't be releasing software with -dirty anyways. 492 | 493 | Exceptions: 494 | 1: no tags. 0.postDISTANCE[.dev0] 495 | """ 496 | if pieces["closest-tag"]: 497 | rendered = pieces["closest-tag"] 498 | if pieces["distance"] or pieces["dirty"]: 499 | rendered += ".post%d" % pieces["distance"] 500 | if pieces["dirty"]: 501 | rendered += ".dev0" 502 | rendered += plus_or_dot(pieces) 503 | rendered += "g%s" % pieces["short"] 504 | else: 505 | # exception #1 506 | rendered = "0.post%d" % pieces["distance"] 507 | if pieces["dirty"]: 508 | rendered += ".dev0" 509 | rendered += "+g%s" % pieces["short"] 510 | return rendered 511 | 512 | 513 | def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: 514 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . 515 | 516 | The ".dev0" means not master branch. 517 | 518 | Exceptions: 519 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] 520 | """ 521 | if pieces["closest-tag"]: 522 | rendered = pieces["closest-tag"] 523 | if pieces["distance"] or pieces["dirty"]: 524 | rendered += ".post%d" % pieces["distance"] 525 | if pieces["branch"] != "master": 526 | rendered += ".dev0" 527 | rendered += plus_or_dot(pieces) 528 | rendered += "g%s" % pieces["short"] 529 | if pieces["dirty"]: 530 | rendered += ".dirty" 531 | else: 532 | # exception #1 533 | rendered = "0.post%d" % pieces["distance"] 534 | if pieces["branch"] != "master": 535 | rendered += ".dev0" 536 | rendered += "+g%s" % pieces["short"] 537 | if pieces["dirty"]: 538 | rendered += ".dirty" 539 | return rendered 540 | 541 | 542 | def render_pep440_old(pieces: Dict[str, Any]) -> str: 543 | """TAG[.postDISTANCE[.dev0]] . 544 | 545 | The ".dev0" means dirty. 546 | 547 | Exceptions: 548 | 1: no tags. 0.postDISTANCE[.dev0] 549 | """ 550 | if pieces["closest-tag"]: 551 | rendered = pieces["closest-tag"] 552 | if pieces["distance"] or pieces["dirty"]: 553 | rendered += ".post%d" % pieces["distance"] 554 | if pieces["dirty"]: 555 | rendered += ".dev0" 556 | else: 557 | # exception #1 558 | rendered = "0.post%d" % pieces["distance"] 559 | if pieces["dirty"]: 560 | rendered += ".dev0" 561 | return rendered 562 | 563 | 564 | def render_git_describe(pieces: Dict[str, Any]) -> str: 565 | """TAG[-DISTANCE-gHEX][-dirty]. 566 | 567 | Like 'git describe --tags --dirty --always'. 568 | 569 | Exceptions: 570 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 571 | """ 572 | if pieces["closest-tag"]: 573 | rendered = pieces["closest-tag"] 574 | if pieces["distance"]: 575 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 576 | else: 577 | # exception #1 578 | rendered = pieces["short"] 579 | if pieces["dirty"]: 580 | rendered += "-dirty" 581 | return rendered 582 | 583 | 584 | def render_git_describe_long(pieces: Dict[str, Any]) -> str: 585 | """TAG-DISTANCE-gHEX[-dirty]. 586 | 587 | Like 'git describe --tags --dirty --always -long'. 588 | The distance/hash is unconditional. 589 | 590 | Exceptions: 591 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 592 | """ 593 | if pieces["closest-tag"]: 594 | rendered = pieces["closest-tag"] 595 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 596 | else: 597 | # exception #1 598 | rendered = pieces["short"] 599 | if pieces["dirty"]: 600 | rendered += "-dirty" 601 | return rendered 602 | 603 | 604 | def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: 605 | """Render the given version pieces into the requested style.""" 606 | if pieces["error"]: 607 | return {"version": "unknown", 608 | "full-revisionid": pieces.get("long"), 609 | "dirty": None, 610 | "error": pieces["error"], 611 | "date": None} 612 | 613 | if not style or style == "default": 614 | style = "pep440" # the default 615 | 616 | if style == "pep440": 617 | rendered = render_pep440(pieces) 618 | elif style == "pep440-branch": 619 | rendered = render_pep440_branch(pieces) 620 | elif style == "pep440-pre": 621 | rendered = render_pep440_pre(pieces) 622 | elif style == "pep440-post": 623 | rendered = render_pep440_post(pieces) 624 | elif style == "pep440-post-branch": 625 | rendered = render_pep440_post_branch(pieces) 626 | elif style == "pep440-old": 627 | rendered = render_pep440_old(pieces) 628 | elif style == "git-describe": 629 | rendered = render_git_describe(pieces) 630 | elif style == "git-describe-long": 631 | rendered = render_git_describe_long(pieces) 632 | else: 633 | raise ValueError("unknown style '%s'" % style) 634 | 635 | return {"version": rendered, "full-revisionid": pieces["long"], 636 | "dirty": pieces["dirty"], "error": None, 637 | "date": pieces.get("date")} 638 | 639 | 640 | def get_versions() -> Dict[str, Any]: 641 | """Get version information or return default if unable to do so.""" 642 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 643 | # __file__, we can work backwards from there to the root. Some 644 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 645 | # case we can only use expanded keywords. 646 | 647 | cfg = get_config() 648 | verbose = cfg.verbose 649 | 650 | try: 651 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 652 | verbose) 653 | except NotThisMethod: 654 | pass 655 | 656 | try: 657 | root = os.path.realpath(__file__) 658 | # versionfile_source is the relative path from the top of the source 659 | # tree (where the .git directory might live) to this file. Invert 660 | # this to find the root from __file__. 661 | for _ in cfg.versionfile_source.split('/'): 662 | root = os.path.dirname(root) 663 | except NameError: 664 | return {"version": "0+unknown", "full-revisionid": None, 665 | "dirty": None, 666 | "error": "unable to find root of source tree", 667 | "date": None} 668 | 669 | try: 670 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 671 | return render(pieces, cfg.style) 672 | except NotThisMethod: 673 | pass 674 | 675 | try: 676 | if cfg.parentdir_prefix: 677 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 678 | except NotThisMethod: 679 | pass 680 | 681 | return {"version": "0+unknown", "full-revisionid": None, 682 | "dirty": None, 683 | "error": "unable to compute version", "date": None} 684 | -------------------------------------------------------------------------------- /versioneer.py: -------------------------------------------------------------------------------- 1 | 2 | # Version: 0.29 3 | 4 | """The Versioneer - like a rocketeer, but for versions. 5 | 6 | The Versioneer 7 | ============== 8 | 9 | * like a rocketeer, but for versions! 10 | * https://github.com/python-versioneer/python-versioneer 11 | * Brian Warner 12 | * License: Public Domain (Unlicense) 13 | * Compatible with: Python 3.7, 3.8, 3.9, 3.10, 3.11 and pypy3 14 | * [![Latest Version][pypi-image]][pypi-url] 15 | * [![Build Status][travis-image]][travis-url] 16 | 17 | This is a tool for managing a recorded version number in setuptools-based 18 | python projects. The goal is to remove the tedious and error-prone "update 19 | the embedded version string" step from your release process. Making a new 20 | release should be as easy as recording a new tag in your version-control 21 | system, and maybe making new tarballs. 22 | 23 | 24 | ## Quick Install 25 | 26 | Versioneer provides two installation modes. The "classic" vendored mode installs 27 | a copy of versioneer into your repository. The experimental build-time dependency mode 28 | is intended to allow you to skip this step and simplify the process of upgrading. 29 | 30 | ### Vendored mode 31 | 32 | * `pip install versioneer` to somewhere in your $PATH 33 | * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is 34 | available, so you can also use `conda install -c conda-forge versioneer` 35 | * add a `[tool.versioneer]` section to your `pyproject.toml` or a 36 | `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) 37 | * Note that you will need to add `tomli; python_version < "3.11"` to your 38 | build-time dependencies if you use `pyproject.toml` 39 | * run `versioneer install --vendor` in your source tree, commit the results 40 | * verify version information with `python setup.py version` 41 | 42 | ### Build-time dependency mode 43 | 44 | * `pip install versioneer` to somewhere in your $PATH 45 | * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is 46 | available, so you can also use `conda install -c conda-forge versioneer` 47 | * add a `[tool.versioneer]` section to your `pyproject.toml` or a 48 | `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) 49 | * add `versioneer` (with `[toml]` extra, if configuring in `pyproject.toml`) 50 | to the `requires` key of the `build-system` table in `pyproject.toml`: 51 | ```toml 52 | [build-system] 53 | requires = ["setuptools", "versioneer[toml]"] 54 | build-backend = "setuptools.build_meta" 55 | ``` 56 | * run `versioneer install --no-vendor` in your source tree, commit the results 57 | * verify version information with `python setup.py version` 58 | 59 | ## Version Identifiers 60 | 61 | Source trees come from a variety of places: 62 | 63 | * a version-control system checkout (mostly used by developers) 64 | * a nightly tarball, produced by build automation 65 | * a snapshot tarball, produced by a web-based VCS browser, like github's 66 | "tarball from tag" feature 67 | * a release tarball, produced by "setup.py sdist", distributed through PyPI 68 | 69 | Within each source tree, the version identifier (either a string or a number, 70 | this tool is format-agnostic) can come from a variety of places: 71 | 72 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows 73 | about recent "tags" and an absolute revision-id 74 | * the name of the directory into which the tarball was unpacked 75 | * an expanded VCS keyword ($Id$, etc) 76 | * a `_version.py` created by some earlier build step 77 | 78 | For released software, the version identifier is closely related to a VCS 79 | tag. Some projects use tag names that include more than just the version 80 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool 81 | needs to strip the tag prefix to extract the version identifier. For 82 | unreleased software (between tags), the version identifier should provide 83 | enough information to help developers recreate the same tree, while also 84 | giving them an idea of roughly how old the tree is (after version 1.2, before 85 | version 1.3). Many VCS systems can report a description that captures this, 86 | for example `git describe --tags --dirty --always` reports things like 87 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 88 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has 89 | uncommitted changes). 90 | 91 | The version identifier is used for multiple purposes: 92 | 93 | * to allow the module to self-identify its version: `myproject.__version__` 94 | * to choose a name and prefix for a 'setup.py sdist' tarball 95 | 96 | ## Theory of Operation 97 | 98 | Versioneer works by adding a special `_version.py` file into your source 99 | tree, where your `__init__.py` can import it. This `_version.py` knows how to 100 | dynamically ask the VCS tool for version information at import time. 101 | 102 | `_version.py` also contains `$Revision$` markers, and the installation 103 | process marks `_version.py` to have this marker rewritten with a tag name 104 | during the `git archive` command. As a result, generated tarballs will 105 | contain enough information to get the proper version. 106 | 107 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to 108 | the top level of your source tree, next to `setup.py` and the `setup.cfg` 109 | that configures it. This overrides several distutils/setuptools commands to 110 | compute the version when invoked, and changes `setup.py build` and `setup.py 111 | sdist` to replace `_version.py` with a small static file that contains just 112 | the generated version data. 113 | 114 | ## Installation 115 | 116 | See [INSTALL.md](./INSTALL.md) for detailed installation instructions. 117 | 118 | ## Version-String Flavors 119 | 120 | Code which uses Versioneer can learn about its version string at runtime by 121 | importing `_version` from your main `__init__.py` file and running the 122 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can 123 | import the top-level `versioneer.py` and run `get_versions()`. 124 | 125 | Both functions return a dictionary with different flavors of version 126 | information: 127 | 128 | * `['version']`: A condensed version string, rendered using the selected 129 | style. This is the most commonly used value for the project's version 130 | string. The default "pep440" style yields strings like `0.11`, 131 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section 132 | below for alternative styles. 133 | 134 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the 135 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". 136 | 137 | * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the 138 | commit date in ISO 8601 format. This will be None if the date is not 139 | available. 140 | 141 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that 142 | this is only accurate if run in a VCS checkout, otherwise it is likely to 143 | be False or None 144 | 145 | * `['error']`: if the version string could not be computed, this will be set 146 | to a string describing the problem, otherwise it will be None. It may be 147 | useful to throw an exception in setup.py if this is set, to avoid e.g. 148 | creating tarballs with a version string of "unknown". 149 | 150 | Some variants are more useful than others. Including `full-revisionid` in a 151 | bug report should allow developers to reconstruct the exact code being tested 152 | (or indicate the presence of local changes that should be shared with the 153 | developers). `version` is suitable for display in an "about" box or a CLI 154 | `--version` output: it can be easily compared against release notes and lists 155 | of bugs fixed in various releases. 156 | 157 | The installer adds the following text to your `__init__.py` to place a basic 158 | version in `YOURPROJECT.__version__`: 159 | 160 | from ._version import get_versions 161 | __version__ = get_versions()['version'] 162 | del get_versions 163 | 164 | ## Styles 165 | 166 | The setup.cfg `style=` configuration controls how the VCS information is 167 | rendered into a version string. 168 | 169 | The default style, "pep440", produces a PEP440-compliant string, equal to the 170 | un-prefixed tag name for actual releases, and containing an additional "local 171 | version" section with more detail for in-between builds. For Git, this is 172 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags 173 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the 174 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and 175 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released 176 | software (exactly equal to a known tag), the identifier will only contain the 177 | stripped tag, e.g. "0.11". 178 | 179 | Other styles are available. See [details.md](details.md) in the Versioneer 180 | source tree for descriptions. 181 | 182 | ## Debugging 183 | 184 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend 185 | to return a version of "0+unknown". To investigate the problem, run `setup.py 186 | version`, which will run the version-lookup code in a verbose mode, and will 187 | display the full contents of `get_versions()` (including the `error` string, 188 | which may help identify what went wrong). 189 | 190 | ## Known Limitations 191 | 192 | Some situations are known to cause problems for Versioneer. This details the 193 | most significant ones. More can be found on Github 194 | [issues page](https://github.com/python-versioneer/python-versioneer/issues). 195 | 196 | ### Subprojects 197 | 198 | Versioneer has limited support for source trees in which `setup.py` is not in 199 | the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are 200 | two common reasons why `setup.py` might not be in the root: 201 | 202 | * Source trees which contain multiple subprojects, such as 203 | [Buildbot](https://github.com/buildbot/buildbot), which contains both 204 | "master" and "slave" subprojects, each with their own `setup.py`, 205 | `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI 206 | distributions (and upload multiple independently-installable tarballs). 207 | * Source trees whose main purpose is to contain a C library, but which also 208 | provide bindings to Python (and perhaps other languages) in subdirectories. 209 | 210 | Versioneer will look for `.git` in parent directories, and most operations 211 | should get the right version string. However `pip` and `setuptools` have bugs 212 | and implementation details which frequently cause `pip install .` from a 213 | subproject directory to fail to find a correct version string (so it usually 214 | defaults to `0+unknown`). 215 | 216 | `pip install --editable .` should work correctly. `setup.py install` might 217 | work too. 218 | 219 | Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in 220 | some later version. 221 | 222 | [Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking 223 | this issue. The discussion in 224 | [PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the 225 | issue from the Versioneer side in more detail. 226 | [pip PR#3176](https://github.com/pypa/pip/pull/3176) and 227 | [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve 228 | pip to let Versioneer work correctly. 229 | 230 | Versioneer-0.16 and earlier only looked for a `.git` directory next to the 231 | `setup.cfg`, so subprojects were completely unsupported with those releases. 232 | 233 | ### Editable installs with setuptools <= 18.5 234 | 235 | `setup.py develop` and `pip install --editable .` allow you to install a 236 | project into a virtualenv once, then continue editing the source code (and 237 | test) without re-installing after every change. 238 | 239 | "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a 240 | convenient way to specify executable scripts that should be installed along 241 | with the python package. 242 | 243 | These both work as expected when using modern setuptools. When using 244 | setuptools-18.5 or earlier, however, certain operations will cause 245 | `pkg_resources.DistributionNotFound` errors when running the entrypoint 246 | script, which must be resolved by re-installing the package. This happens 247 | when the install happens with one version, then the egg_info data is 248 | regenerated while a different version is checked out. Many setup.py commands 249 | cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into 250 | a different virtualenv), so this can be surprising. 251 | 252 | [Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes 253 | this one, but upgrading to a newer version of setuptools should probably 254 | resolve it. 255 | 256 | 257 | ## Updating Versioneer 258 | 259 | To upgrade your project to a new release of Versioneer, do the following: 260 | 261 | * install the new Versioneer (`pip install -U versioneer` or equivalent) 262 | * edit `setup.cfg` and `pyproject.toml`, if necessary, 263 | to include any new configuration settings indicated by the release notes. 264 | See [UPGRADING](./UPGRADING.md) for details. 265 | * re-run `versioneer install --[no-]vendor` in your source tree, to replace 266 | `SRC/_version.py` 267 | * commit any changed files 268 | 269 | ## Future Directions 270 | 271 | This tool is designed to make it easily extended to other version-control 272 | systems: all VCS-specific components are in separate directories like 273 | src/git/ . The top-level `versioneer.py` script is assembled from these 274 | components by running make-versioneer.py . In the future, make-versioneer.py 275 | will take a VCS name as an argument, and will construct a version of 276 | `versioneer.py` that is specific to the given VCS. It might also take the 277 | configuration arguments that are currently provided manually during 278 | installation by editing setup.py . Alternatively, it might go the other 279 | direction and include code from all supported VCS systems, reducing the 280 | number of intermediate scripts. 281 | 282 | ## Similar projects 283 | 284 | * [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time 285 | dependency 286 | * [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of 287 | versioneer 288 | * [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools 289 | plugin 290 | 291 | ## License 292 | 293 | To make Versioneer easier to embed, all its code is dedicated to the public 294 | domain. The `_version.py` that it creates is also in the public domain. 295 | Specifically, both are released under the "Unlicense", as described in 296 | https://unlicense.org/. 297 | 298 | [pypi-image]: https://img.shields.io/pypi/v/versioneer.svg 299 | [pypi-url]: https://pypi.python.org/pypi/versioneer/ 300 | [travis-image]: 301 | https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg 302 | [travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer 303 | 304 | """ 305 | # pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring 306 | # pylint:disable=missing-class-docstring,too-many-branches,too-many-statements 307 | # pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error 308 | # pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with 309 | # pylint:disable=attribute-defined-outside-init,too-many-arguments 310 | 311 | import configparser 312 | import errno 313 | import json 314 | import os 315 | import re 316 | import subprocess 317 | import sys 318 | from pathlib import Path 319 | from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Union 320 | from typing import NoReturn 321 | import functools 322 | 323 | have_tomllib = True 324 | if sys.version_info >= (3, 11): 325 | import tomllib 326 | else: 327 | try: 328 | import tomli as tomllib 329 | except ImportError: 330 | have_tomllib = False 331 | 332 | 333 | class VersioneerConfig: 334 | """Container for Versioneer configuration parameters.""" 335 | 336 | VCS: str 337 | style: str 338 | tag_prefix: str 339 | versionfile_source: str 340 | versionfile_build: Optional[str] 341 | parentdir_prefix: Optional[str] 342 | verbose: Optional[bool] 343 | 344 | 345 | def get_root() -> str: 346 | """Get the project root directory. 347 | 348 | We require that all commands are run from the project root, i.e. the 349 | directory that contains setup.py, setup.cfg, and versioneer.py . 350 | """ 351 | root = os.path.realpath(os.path.abspath(os.getcwd())) 352 | setup_py = os.path.join(root, "setup.py") 353 | pyproject_toml = os.path.join(root, "pyproject.toml") 354 | versioneer_py = os.path.join(root, "versioneer.py") 355 | if not ( 356 | os.path.exists(setup_py) 357 | or os.path.exists(pyproject_toml) 358 | or os.path.exists(versioneer_py) 359 | ): 360 | # allow 'python path/to/setup.py COMMAND' 361 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) 362 | setup_py = os.path.join(root, "setup.py") 363 | pyproject_toml = os.path.join(root, "pyproject.toml") 364 | versioneer_py = os.path.join(root, "versioneer.py") 365 | if not ( 366 | os.path.exists(setup_py) 367 | or os.path.exists(pyproject_toml) 368 | or os.path.exists(versioneer_py) 369 | ): 370 | err = ("Versioneer was unable to run the project root directory. " 371 | "Versioneer requires setup.py to be executed from " 372 | "its immediate directory (like 'python setup.py COMMAND'), " 373 | "or in a way that lets it use sys.argv[0] to find the root " 374 | "(like 'python path/to/setup.py COMMAND').") 375 | raise VersioneerBadRootError(err) 376 | try: 377 | # Certain runtime workflows (setup.py install/develop in a setuptools 378 | # tree) execute all dependencies in a single python process, so 379 | # "versioneer" may be imported multiple times, and python's shared 380 | # module-import table will cache the first one. So we can't use 381 | # os.path.dirname(__file__), as that will find whichever 382 | # versioneer.py was first imported, even in later projects. 383 | my_path = os.path.realpath(os.path.abspath(__file__)) 384 | me_dir = os.path.normcase(os.path.splitext(my_path)[0]) 385 | vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) 386 | if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals(): 387 | print("Warning: build in %s is using versioneer.py from %s" 388 | % (os.path.dirname(my_path), versioneer_py)) 389 | except NameError: 390 | pass 391 | return root 392 | 393 | 394 | def get_config_from_root(root: str) -> VersioneerConfig: 395 | """Read the project setup.cfg file to determine Versioneer config.""" 396 | # This might raise OSError (if setup.cfg is missing), or 397 | # configparser.NoSectionError (if it lacks a [versioneer] section), or 398 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at 399 | # the top of versioneer.py for instructions on writing your setup.cfg . 400 | root_pth = Path(root) 401 | pyproject_toml = root_pth / "pyproject.toml" 402 | setup_cfg = root_pth / "setup.cfg" 403 | section: Union[Dict[str, Any], configparser.SectionProxy, None] = None 404 | if pyproject_toml.exists() and have_tomllib: 405 | try: 406 | with open(pyproject_toml, 'rb') as fobj: 407 | pp = tomllib.load(fobj) 408 | section = pp['tool']['versioneer'] 409 | except (tomllib.TOMLDecodeError, KeyError) as e: 410 | print(f"Failed to load config from {pyproject_toml}: {e}") 411 | print("Try to load it from setup.cfg") 412 | if not section: 413 | parser = configparser.ConfigParser() 414 | with open(setup_cfg) as cfg_file: 415 | parser.read_file(cfg_file) 416 | parser.get("versioneer", "VCS") # raise error if missing 417 | 418 | section = parser["versioneer"] 419 | 420 | # `cast`` really shouldn't be used, but its simplest for the 421 | # common VersioneerConfig users at the moment. We verify against 422 | # `None` values elsewhere where it matters 423 | 424 | cfg = VersioneerConfig() 425 | cfg.VCS = section['VCS'] 426 | cfg.style = section.get("style", "") 427 | cfg.versionfile_source = cast(str, section.get("versionfile_source")) 428 | cfg.versionfile_build = section.get("versionfile_build") 429 | cfg.tag_prefix = cast(str, section.get("tag_prefix")) 430 | if cfg.tag_prefix in ("''", '""', None): 431 | cfg.tag_prefix = "" 432 | cfg.parentdir_prefix = section.get("parentdir_prefix") 433 | if isinstance(section, configparser.SectionProxy): 434 | # Make sure configparser translates to bool 435 | cfg.verbose = section.getboolean("verbose") 436 | else: 437 | cfg.verbose = section.get("verbose") 438 | 439 | return cfg 440 | 441 | 442 | class NotThisMethod(Exception): 443 | """Exception raised if a method is not valid for the current scenario.""" 444 | 445 | 446 | # these dictionaries contain VCS-specific tools 447 | LONG_VERSION_PY: Dict[str, str] = {} 448 | HANDLERS: Dict[str, Dict[str, Callable]] = {} 449 | 450 | 451 | def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator 452 | """Create decorator to mark a method as the handler of a VCS.""" 453 | def decorate(f: Callable) -> Callable: 454 | """Store f in HANDLERS[vcs][method].""" 455 | HANDLERS.setdefault(vcs, {})[method] = f 456 | return f 457 | return decorate 458 | 459 | 460 | def run_command( 461 | commands: List[str], 462 | args: List[str], 463 | cwd: Optional[str] = None, 464 | verbose: bool = False, 465 | hide_stderr: bool = False, 466 | env: Optional[Dict[str, str]] = None, 467 | ) -> Tuple[Optional[str], Optional[int]]: 468 | """Call the given command(s).""" 469 | assert isinstance(commands, list) 470 | process = None 471 | 472 | popen_kwargs: Dict[str, Any] = {} 473 | if sys.platform == "win32": 474 | # This hides the console window if pythonw.exe is used 475 | startupinfo = subprocess.STARTUPINFO() 476 | startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW 477 | popen_kwargs["startupinfo"] = startupinfo 478 | 479 | for command in commands: 480 | try: 481 | dispcmd = str([command] + args) 482 | # remember shell=False, so use git.cmd on windows, not just git 483 | process = subprocess.Popen([command] + args, cwd=cwd, env=env, 484 | stdout=subprocess.PIPE, 485 | stderr=(subprocess.PIPE if hide_stderr 486 | else None), **popen_kwargs) 487 | break 488 | except OSError as e: 489 | if e.errno == errno.ENOENT: 490 | continue 491 | if verbose: 492 | print("unable to run %s" % dispcmd) 493 | print(e) 494 | return None, None 495 | else: 496 | if verbose: 497 | print("unable to find command, tried %s" % (commands,)) 498 | return None, None 499 | stdout = process.communicate()[0].strip().decode() 500 | if process.returncode != 0: 501 | if verbose: 502 | print("unable to run %s (error)" % dispcmd) 503 | print("stdout was %s" % stdout) 504 | return None, process.returncode 505 | return stdout, process.returncode 506 | 507 | 508 | LONG_VERSION_PY['git'] = r''' 509 | # This file helps to compute a version number in source trees obtained from 510 | # git-archive tarball (such as those provided by githubs download-from-tag 511 | # feature). Distribution tarballs (built by setup.py sdist) and build 512 | # directories (produced by setup.py build) will contain a much shorter file 513 | # that just contains the computed version number. 514 | 515 | # This file is released into the public domain. 516 | # Generated by versioneer-0.29 517 | # https://github.com/python-versioneer/python-versioneer 518 | 519 | """Git implementation of _version.py.""" 520 | 521 | import errno 522 | import os 523 | import re 524 | import subprocess 525 | import sys 526 | from typing import Any, Callable, Dict, List, Optional, Tuple 527 | import functools 528 | 529 | 530 | def get_keywords() -> Dict[str, str]: 531 | """Get the keywords needed to look up the version information.""" 532 | # these strings will be replaced by git during git-archive. 533 | # setup.py/versioneer.py will grep for the variable names, so they must 534 | # each be defined on a line of their own. _version.py will just call 535 | # get_keywords(). 536 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" 537 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" 538 | git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" 539 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 540 | return keywords 541 | 542 | 543 | class VersioneerConfig: 544 | """Container for Versioneer configuration parameters.""" 545 | 546 | VCS: str 547 | style: str 548 | tag_prefix: str 549 | parentdir_prefix: str 550 | versionfile_source: str 551 | verbose: bool 552 | 553 | 554 | def get_config() -> VersioneerConfig: 555 | """Create, populate and return the VersioneerConfig() object.""" 556 | # these strings are filled in when 'setup.py versioneer' creates 557 | # _version.py 558 | cfg = VersioneerConfig() 559 | cfg.VCS = "git" 560 | cfg.style = "%(STYLE)s" 561 | cfg.tag_prefix = "%(TAG_PREFIX)s" 562 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" 563 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" 564 | cfg.verbose = False 565 | return cfg 566 | 567 | 568 | class NotThisMethod(Exception): 569 | """Exception raised if a method is not valid for the current scenario.""" 570 | 571 | 572 | LONG_VERSION_PY: Dict[str, str] = {} 573 | HANDLERS: Dict[str, Dict[str, Callable]] = {} 574 | 575 | 576 | def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator 577 | """Create decorator to mark a method as the handler of a VCS.""" 578 | def decorate(f: Callable) -> Callable: 579 | """Store f in HANDLERS[vcs][method].""" 580 | if vcs not in HANDLERS: 581 | HANDLERS[vcs] = {} 582 | HANDLERS[vcs][method] = f 583 | return f 584 | return decorate 585 | 586 | 587 | def run_command( 588 | commands: List[str], 589 | args: List[str], 590 | cwd: Optional[str] = None, 591 | verbose: bool = False, 592 | hide_stderr: bool = False, 593 | env: Optional[Dict[str, str]] = None, 594 | ) -> Tuple[Optional[str], Optional[int]]: 595 | """Call the given command(s).""" 596 | assert isinstance(commands, list) 597 | process = None 598 | 599 | popen_kwargs: Dict[str, Any] = {} 600 | if sys.platform == "win32": 601 | # This hides the console window if pythonw.exe is used 602 | startupinfo = subprocess.STARTUPINFO() 603 | startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW 604 | popen_kwargs["startupinfo"] = startupinfo 605 | 606 | for command in commands: 607 | try: 608 | dispcmd = str([command] + args) 609 | # remember shell=False, so use git.cmd on windows, not just git 610 | process = subprocess.Popen([command] + args, cwd=cwd, env=env, 611 | stdout=subprocess.PIPE, 612 | stderr=(subprocess.PIPE if hide_stderr 613 | else None), **popen_kwargs) 614 | break 615 | except OSError as e: 616 | if e.errno == errno.ENOENT: 617 | continue 618 | if verbose: 619 | print("unable to run %%s" %% dispcmd) 620 | print(e) 621 | return None, None 622 | else: 623 | if verbose: 624 | print("unable to find command, tried %%s" %% (commands,)) 625 | return None, None 626 | stdout = process.communicate()[0].strip().decode() 627 | if process.returncode != 0: 628 | if verbose: 629 | print("unable to run %%s (error)" %% dispcmd) 630 | print("stdout was %%s" %% stdout) 631 | return None, process.returncode 632 | return stdout, process.returncode 633 | 634 | 635 | def versions_from_parentdir( 636 | parentdir_prefix: str, 637 | root: str, 638 | verbose: bool, 639 | ) -> Dict[str, Any]: 640 | """Try to determine the version from the parent directory name. 641 | 642 | Source tarballs conventionally unpack into a directory that includes both 643 | the project name and a version string. We will also support searching up 644 | two directory levels for an appropriately named parent directory 645 | """ 646 | rootdirs = [] 647 | 648 | for _ in range(3): 649 | dirname = os.path.basename(root) 650 | if dirname.startswith(parentdir_prefix): 651 | return {"version": dirname[len(parentdir_prefix):], 652 | "full-revisionid": None, 653 | "dirty": False, "error": None, "date": None} 654 | rootdirs.append(root) 655 | root = os.path.dirname(root) # up a level 656 | 657 | if verbose: 658 | print("Tried directories %%s but none started with prefix %%s" %% 659 | (str(rootdirs), parentdir_prefix)) 660 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 661 | 662 | 663 | @register_vcs_handler("git", "get_keywords") 664 | def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: 665 | """Extract version information from the given file.""" 666 | # the code embedded in _version.py can just fetch the value of these 667 | # keywords. When used from setup.py, we don't want to import _version.py, 668 | # so we do it with a regexp instead. This function is not used from 669 | # _version.py. 670 | keywords: Dict[str, str] = {} 671 | try: 672 | with open(versionfile_abs, "r") as fobj: 673 | for line in fobj: 674 | if line.strip().startswith("git_refnames ="): 675 | mo = re.search(r'=\s*"(.*)"', line) 676 | if mo: 677 | keywords["refnames"] = mo.group(1) 678 | if line.strip().startswith("git_full ="): 679 | mo = re.search(r'=\s*"(.*)"', line) 680 | if mo: 681 | keywords["full"] = mo.group(1) 682 | if line.strip().startswith("git_date ="): 683 | mo = re.search(r'=\s*"(.*)"', line) 684 | if mo: 685 | keywords["date"] = mo.group(1) 686 | except OSError: 687 | pass 688 | return keywords 689 | 690 | 691 | @register_vcs_handler("git", "keywords") 692 | def git_versions_from_keywords( 693 | keywords: Dict[str, str], 694 | tag_prefix: str, 695 | verbose: bool, 696 | ) -> Dict[str, Any]: 697 | """Get version information from git keywords.""" 698 | if "refnames" not in keywords: 699 | raise NotThisMethod("Short version file found") 700 | date = keywords.get("date") 701 | if date is not None: 702 | # Use only the last line. Previous lines may contain GPG signature 703 | # information. 704 | date = date.splitlines()[-1] 705 | 706 | # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant 707 | # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 708 | # -like" string, which we must then edit to make compliant), because 709 | # it's been around since git-1.5.3, and it's too difficult to 710 | # discover which version we're using, or to work around using an 711 | # older one. 712 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 713 | refnames = keywords["refnames"].strip() 714 | if refnames.startswith("$Format"): 715 | if verbose: 716 | print("keywords are unexpanded, not using") 717 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 718 | refs = {r.strip() for r in refnames.strip("()").split(",")} 719 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 720 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 721 | TAG = "tag: " 722 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} 723 | if not tags: 724 | # Either we're using git < 1.8.3, or there really are no tags. We use 725 | # a heuristic: assume all version tags have a digit. The old git %%d 726 | # expansion behaves like git log --decorate=short and strips out the 727 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 728 | # between branches and tags. By ignoring refnames without digits, we 729 | # filter out many common branch names like "release" and 730 | # "stabilization", as well as "HEAD" and "master". 731 | tags = {r for r in refs if re.search(r'\d', r)} 732 | if verbose: 733 | print("discarding '%%s', no digits" %% ",".join(refs - tags)) 734 | if verbose: 735 | print("likely tags: %%s" %% ",".join(sorted(tags))) 736 | for ref in sorted(tags): 737 | # sorting will prefer e.g. "2.0" over "2.0rc1" 738 | if ref.startswith(tag_prefix): 739 | r = ref[len(tag_prefix):] 740 | # Filter out refs that exactly match prefix or that don't start 741 | # with a number once the prefix is stripped (mostly a concern 742 | # when prefix is '') 743 | if not re.match(r'\d', r): 744 | continue 745 | if verbose: 746 | print("picking %%s" %% r) 747 | return {"version": r, 748 | "full-revisionid": keywords["full"].strip(), 749 | "dirty": False, "error": None, 750 | "date": date} 751 | # no suitable tags, so version is "0+unknown", but full hex is still there 752 | if verbose: 753 | print("no suitable tags, using unknown + full revision id") 754 | return {"version": "0+unknown", 755 | "full-revisionid": keywords["full"].strip(), 756 | "dirty": False, "error": "no suitable tags", "date": None} 757 | 758 | 759 | @register_vcs_handler("git", "pieces_from_vcs") 760 | def git_pieces_from_vcs( 761 | tag_prefix: str, 762 | root: str, 763 | verbose: bool, 764 | runner: Callable = run_command 765 | ) -> Dict[str, Any]: 766 | """Get version from 'git describe' in the root of the source tree. 767 | 768 | This only gets called if the git-archive 'subst' keywords were *not* 769 | expanded, and _version.py hasn't already been rewritten with a short 770 | version string, meaning we're inside a checked out source tree. 771 | """ 772 | GITS = ["git"] 773 | if sys.platform == "win32": 774 | GITS = ["git.cmd", "git.exe"] 775 | 776 | # GIT_DIR can interfere with correct operation of Versioneer. 777 | # It may be intended to be passed to the Versioneer-versioned project, 778 | # but that should not change where we get our version from. 779 | env = os.environ.copy() 780 | env.pop("GIT_DIR", None) 781 | runner = functools.partial(runner, env=env) 782 | 783 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, 784 | hide_stderr=not verbose) 785 | if rc != 0: 786 | if verbose: 787 | print("Directory %%s not under git control" %% root) 788 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 789 | 790 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 791 | # if there isn't one, this yields HEX[-dirty] (no NUM) 792 | describe_out, rc = runner(GITS, [ 793 | "describe", "--tags", "--dirty", "--always", "--long", 794 | "--match", f"{tag_prefix}[[:digit:]]*" 795 | ], cwd=root) 796 | # --long was added in git-1.5.5 797 | if describe_out is None: 798 | raise NotThisMethod("'git describe' failed") 799 | describe_out = describe_out.strip() 800 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) 801 | if full_out is None: 802 | raise NotThisMethod("'git rev-parse' failed") 803 | full_out = full_out.strip() 804 | 805 | pieces: Dict[str, Any] = {} 806 | pieces["long"] = full_out 807 | pieces["short"] = full_out[:7] # maybe improved later 808 | pieces["error"] = None 809 | 810 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], 811 | cwd=root) 812 | # --abbrev-ref was added in git-1.6.3 813 | if rc != 0 or branch_name is None: 814 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") 815 | branch_name = branch_name.strip() 816 | 817 | if branch_name == "HEAD": 818 | # If we aren't exactly on a branch, pick a branch which represents 819 | # the current commit. If all else fails, we are on a branchless 820 | # commit. 821 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) 822 | # --contains was added in git-1.5.4 823 | if rc != 0 or branches is None: 824 | raise NotThisMethod("'git branch --contains' returned error") 825 | branches = branches.split("\n") 826 | 827 | # Remove the first line if we're running detached 828 | if "(" in branches[0]: 829 | branches.pop(0) 830 | 831 | # Strip off the leading "* " from the list of branches. 832 | branches = [branch[2:] for branch in branches] 833 | if "master" in branches: 834 | branch_name = "master" 835 | elif not branches: 836 | branch_name = None 837 | else: 838 | # Pick the first branch that is returned. Good or bad. 839 | branch_name = branches[0] 840 | 841 | pieces["branch"] = branch_name 842 | 843 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 844 | # TAG might have hyphens. 845 | git_describe = describe_out 846 | 847 | # look for -dirty suffix 848 | dirty = git_describe.endswith("-dirty") 849 | pieces["dirty"] = dirty 850 | if dirty: 851 | git_describe = git_describe[:git_describe.rindex("-dirty")] 852 | 853 | # now we have TAG-NUM-gHEX or HEX 854 | 855 | if "-" in git_describe: 856 | # TAG-NUM-gHEX 857 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 858 | if not mo: 859 | # unparsable. Maybe git-describe is misbehaving? 860 | pieces["error"] = ("unable to parse git-describe output: '%%s'" 861 | %% describe_out) 862 | return pieces 863 | 864 | # tag 865 | full_tag = mo.group(1) 866 | if not full_tag.startswith(tag_prefix): 867 | if verbose: 868 | fmt = "tag '%%s' doesn't start with prefix '%%s'" 869 | print(fmt %% (full_tag, tag_prefix)) 870 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" 871 | %% (full_tag, tag_prefix)) 872 | return pieces 873 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 874 | 875 | # distance: number of commits since tag 876 | pieces["distance"] = int(mo.group(2)) 877 | 878 | # commit: short hex revision ID 879 | pieces["short"] = mo.group(3) 880 | 881 | else: 882 | # HEX: no tags 883 | pieces["closest-tag"] = None 884 | out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) 885 | pieces["distance"] = len(out.split()) # total number of commits 886 | 887 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 888 | date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() 889 | # Use only the last line. Previous lines may contain GPG signature 890 | # information. 891 | date = date.splitlines()[-1] 892 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 893 | 894 | return pieces 895 | 896 | 897 | def plus_or_dot(pieces: Dict[str, Any]) -> str: 898 | """Return a + if we don't already have one, else return a .""" 899 | if "+" in pieces.get("closest-tag", ""): 900 | return "." 901 | return "+" 902 | 903 | 904 | def render_pep440(pieces: Dict[str, Any]) -> str: 905 | """Build up version string, with post-release "local version identifier". 906 | 907 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 908 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 909 | 910 | Exceptions: 911 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 912 | """ 913 | if pieces["closest-tag"]: 914 | rendered = pieces["closest-tag"] 915 | if pieces["distance"] or pieces["dirty"]: 916 | rendered += plus_or_dot(pieces) 917 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) 918 | if pieces["dirty"]: 919 | rendered += ".dirty" 920 | else: 921 | # exception #1 922 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], 923 | pieces["short"]) 924 | if pieces["dirty"]: 925 | rendered += ".dirty" 926 | return rendered 927 | 928 | 929 | def render_pep440_branch(pieces: Dict[str, Any]) -> str: 930 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . 931 | 932 | The ".dev0" means not master branch. Note that .dev0 sorts backwards 933 | (a feature branch will appear "older" than the master branch). 934 | 935 | Exceptions: 936 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] 937 | """ 938 | if pieces["closest-tag"]: 939 | rendered = pieces["closest-tag"] 940 | if pieces["distance"] or pieces["dirty"]: 941 | if pieces["branch"] != "master": 942 | rendered += ".dev0" 943 | rendered += plus_or_dot(pieces) 944 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) 945 | if pieces["dirty"]: 946 | rendered += ".dirty" 947 | else: 948 | # exception #1 949 | rendered = "0" 950 | if pieces["branch"] != "master": 951 | rendered += ".dev0" 952 | rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], 953 | pieces["short"]) 954 | if pieces["dirty"]: 955 | rendered += ".dirty" 956 | return rendered 957 | 958 | 959 | def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: 960 | """Split pep440 version string at the post-release segment. 961 | 962 | Returns the release segments before the post-release and the 963 | post-release version number (or -1 if no post-release segment is present). 964 | """ 965 | vc = str.split(ver, ".post") 966 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None 967 | 968 | 969 | def render_pep440_pre(pieces: Dict[str, Any]) -> str: 970 | """TAG[.postN.devDISTANCE] -- No -dirty. 971 | 972 | Exceptions: 973 | 1: no tags. 0.post0.devDISTANCE 974 | """ 975 | if pieces["closest-tag"]: 976 | if pieces["distance"]: 977 | # update the post release segment 978 | tag_version, post_version = pep440_split_post(pieces["closest-tag"]) 979 | rendered = tag_version 980 | if post_version is not None: 981 | rendered += ".post%%d.dev%%d" %% (post_version + 1, pieces["distance"]) 982 | else: 983 | rendered += ".post0.dev%%d" %% (pieces["distance"]) 984 | else: 985 | # no commits, use the tag as the version 986 | rendered = pieces["closest-tag"] 987 | else: 988 | # exception #1 989 | rendered = "0.post0.dev%%d" %% pieces["distance"] 990 | return rendered 991 | 992 | 993 | def render_pep440_post(pieces: Dict[str, Any]) -> str: 994 | """TAG[.postDISTANCE[.dev0]+gHEX] . 995 | 996 | The ".dev0" means dirty. Note that .dev0 sorts backwards 997 | (a dirty tree will appear "older" than the corresponding clean one), 998 | but you shouldn't be releasing software with -dirty anyways. 999 | 1000 | Exceptions: 1001 | 1: no tags. 0.postDISTANCE[.dev0] 1002 | """ 1003 | if pieces["closest-tag"]: 1004 | rendered = pieces["closest-tag"] 1005 | if pieces["distance"] or pieces["dirty"]: 1006 | rendered += ".post%%d" %% pieces["distance"] 1007 | if pieces["dirty"]: 1008 | rendered += ".dev0" 1009 | rendered += plus_or_dot(pieces) 1010 | rendered += "g%%s" %% pieces["short"] 1011 | else: 1012 | # exception #1 1013 | rendered = "0.post%%d" %% pieces["distance"] 1014 | if pieces["dirty"]: 1015 | rendered += ".dev0" 1016 | rendered += "+g%%s" %% pieces["short"] 1017 | return rendered 1018 | 1019 | 1020 | def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: 1021 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . 1022 | 1023 | The ".dev0" means not master branch. 1024 | 1025 | Exceptions: 1026 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] 1027 | """ 1028 | if pieces["closest-tag"]: 1029 | rendered = pieces["closest-tag"] 1030 | if pieces["distance"] or pieces["dirty"]: 1031 | rendered += ".post%%d" %% pieces["distance"] 1032 | if pieces["branch"] != "master": 1033 | rendered += ".dev0" 1034 | rendered += plus_or_dot(pieces) 1035 | rendered += "g%%s" %% pieces["short"] 1036 | if pieces["dirty"]: 1037 | rendered += ".dirty" 1038 | else: 1039 | # exception #1 1040 | rendered = "0.post%%d" %% pieces["distance"] 1041 | if pieces["branch"] != "master": 1042 | rendered += ".dev0" 1043 | rendered += "+g%%s" %% pieces["short"] 1044 | if pieces["dirty"]: 1045 | rendered += ".dirty" 1046 | return rendered 1047 | 1048 | 1049 | def render_pep440_old(pieces: Dict[str, Any]) -> str: 1050 | """TAG[.postDISTANCE[.dev0]] . 1051 | 1052 | The ".dev0" means dirty. 1053 | 1054 | Exceptions: 1055 | 1: no tags. 0.postDISTANCE[.dev0] 1056 | """ 1057 | if pieces["closest-tag"]: 1058 | rendered = pieces["closest-tag"] 1059 | if pieces["distance"] or pieces["dirty"]: 1060 | rendered += ".post%%d" %% pieces["distance"] 1061 | if pieces["dirty"]: 1062 | rendered += ".dev0" 1063 | else: 1064 | # exception #1 1065 | rendered = "0.post%%d" %% pieces["distance"] 1066 | if pieces["dirty"]: 1067 | rendered += ".dev0" 1068 | return rendered 1069 | 1070 | 1071 | def render_git_describe(pieces: Dict[str, Any]) -> str: 1072 | """TAG[-DISTANCE-gHEX][-dirty]. 1073 | 1074 | Like 'git describe --tags --dirty --always'. 1075 | 1076 | Exceptions: 1077 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1078 | """ 1079 | if pieces["closest-tag"]: 1080 | rendered = pieces["closest-tag"] 1081 | if pieces["distance"]: 1082 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 1083 | else: 1084 | # exception #1 1085 | rendered = pieces["short"] 1086 | if pieces["dirty"]: 1087 | rendered += "-dirty" 1088 | return rendered 1089 | 1090 | 1091 | def render_git_describe_long(pieces: Dict[str, Any]) -> str: 1092 | """TAG-DISTANCE-gHEX[-dirty]. 1093 | 1094 | Like 'git describe --tags --dirty --always -long'. 1095 | The distance/hash is unconditional. 1096 | 1097 | Exceptions: 1098 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1099 | """ 1100 | if pieces["closest-tag"]: 1101 | rendered = pieces["closest-tag"] 1102 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 1103 | else: 1104 | # exception #1 1105 | rendered = pieces["short"] 1106 | if pieces["dirty"]: 1107 | rendered += "-dirty" 1108 | return rendered 1109 | 1110 | 1111 | def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: 1112 | """Render the given version pieces into the requested style.""" 1113 | if pieces["error"]: 1114 | return {"version": "unknown", 1115 | "full-revisionid": pieces.get("long"), 1116 | "dirty": None, 1117 | "error": pieces["error"], 1118 | "date": None} 1119 | 1120 | if not style or style == "default": 1121 | style = "pep440" # the default 1122 | 1123 | if style == "pep440": 1124 | rendered = render_pep440(pieces) 1125 | elif style == "pep440-branch": 1126 | rendered = render_pep440_branch(pieces) 1127 | elif style == "pep440-pre": 1128 | rendered = render_pep440_pre(pieces) 1129 | elif style == "pep440-post": 1130 | rendered = render_pep440_post(pieces) 1131 | elif style == "pep440-post-branch": 1132 | rendered = render_pep440_post_branch(pieces) 1133 | elif style == "pep440-old": 1134 | rendered = render_pep440_old(pieces) 1135 | elif style == "git-describe": 1136 | rendered = render_git_describe(pieces) 1137 | elif style == "git-describe-long": 1138 | rendered = render_git_describe_long(pieces) 1139 | else: 1140 | raise ValueError("unknown style '%%s'" %% style) 1141 | 1142 | return {"version": rendered, "full-revisionid": pieces["long"], 1143 | "dirty": pieces["dirty"], "error": None, 1144 | "date": pieces.get("date")} 1145 | 1146 | 1147 | def get_versions() -> Dict[str, Any]: 1148 | """Get version information or return default if unable to do so.""" 1149 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 1150 | # __file__, we can work backwards from there to the root. Some 1151 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 1152 | # case we can only use expanded keywords. 1153 | 1154 | cfg = get_config() 1155 | verbose = cfg.verbose 1156 | 1157 | try: 1158 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 1159 | verbose) 1160 | except NotThisMethod: 1161 | pass 1162 | 1163 | try: 1164 | root = os.path.realpath(__file__) 1165 | # versionfile_source is the relative path from the top of the source 1166 | # tree (where the .git directory might live) to this file. Invert 1167 | # this to find the root from __file__. 1168 | for _ in cfg.versionfile_source.split('/'): 1169 | root = os.path.dirname(root) 1170 | except NameError: 1171 | return {"version": "0+unknown", "full-revisionid": None, 1172 | "dirty": None, 1173 | "error": "unable to find root of source tree", 1174 | "date": None} 1175 | 1176 | try: 1177 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 1178 | return render(pieces, cfg.style) 1179 | except NotThisMethod: 1180 | pass 1181 | 1182 | try: 1183 | if cfg.parentdir_prefix: 1184 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1185 | except NotThisMethod: 1186 | pass 1187 | 1188 | return {"version": "0+unknown", "full-revisionid": None, 1189 | "dirty": None, 1190 | "error": "unable to compute version", "date": None} 1191 | ''' 1192 | 1193 | 1194 | @register_vcs_handler("git", "get_keywords") 1195 | def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: 1196 | """Extract version information from the given file.""" 1197 | # the code embedded in _version.py can just fetch the value of these 1198 | # keywords. When used from setup.py, we don't want to import _version.py, 1199 | # so we do it with a regexp instead. This function is not used from 1200 | # _version.py. 1201 | keywords: Dict[str, str] = {} 1202 | try: 1203 | with open(versionfile_abs, "r") as fobj: 1204 | for line in fobj: 1205 | if line.strip().startswith("git_refnames ="): 1206 | mo = re.search(r'=\s*"(.*)"', line) 1207 | if mo: 1208 | keywords["refnames"] = mo.group(1) 1209 | if line.strip().startswith("git_full ="): 1210 | mo = re.search(r'=\s*"(.*)"', line) 1211 | if mo: 1212 | keywords["full"] = mo.group(1) 1213 | if line.strip().startswith("git_date ="): 1214 | mo = re.search(r'=\s*"(.*)"', line) 1215 | if mo: 1216 | keywords["date"] = mo.group(1) 1217 | except OSError: 1218 | pass 1219 | return keywords 1220 | 1221 | 1222 | @register_vcs_handler("git", "keywords") 1223 | def git_versions_from_keywords( 1224 | keywords: Dict[str, str], 1225 | tag_prefix: str, 1226 | verbose: bool, 1227 | ) -> Dict[str, Any]: 1228 | """Get version information from git keywords.""" 1229 | if "refnames" not in keywords: 1230 | raise NotThisMethod("Short version file found") 1231 | date = keywords.get("date") 1232 | if date is not None: 1233 | # Use only the last line. Previous lines may contain GPG signature 1234 | # information. 1235 | date = date.splitlines()[-1] 1236 | 1237 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 1238 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 1239 | # -like" string, which we must then edit to make compliant), because 1240 | # it's been around since git-1.5.3, and it's too difficult to 1241 | # discover which version we're using, or to work around using an 1242 | # older one. 1243 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 1244 | refnames = keywords["refnames"].strip() 1245 | if refnames.startswith("$Format"): 1246 | if verbose: 1247 | print("keywords are unexpanded, not using") 1248 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 1249 | refs = {r.strip() for r in refnames.strip("()").split(",")} 1250 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 1251 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 1252 | TAG = "tag: " 1253 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} 1254 | if not tags: 1255 | # Either we're using git < 1.8.3, or there really are no tags. We use 1256 | # a heuristic: assume all version tags have a digit. The old git %d 1257 | # expansion behaves like git log --decorate=short and strips out the 1258 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 1259 | # between branches and tags. By ignoring refnames without digits, we 1260 | # filter out many common branch names like "release" and 1261 | # "stabilization", as well as "HEAD" and "master". 1262 | tags = {r for r in refs if re.search(r'\d', r)} 1263 | if verbose: 1264 | print("discarding '%s', no digits" % ",".join(refs - tags)) 1265 | if verbose: 1266 | print("likely tags: %s" % ",".join(sorted(tags))) 1267 | for ref in sorted(tags): 1268 | # sorting will prefer e.g. "2.0" over "2.0rc1" 1269 | if ref.startswith(tag_prefix): 1270 | r = ref[len(tag_prefix):] 1271 | # Filter out refs that exactly match prefix or that don't start 1272 | # with a number once the prefix is stripped (mostly a concern 1273 | # when prefix is '') 1274 | if not re.match(r'\d', r): 1275 | continue 1276 | if verbose: 1277 | print("picking %s" % r) 1278 | return {"version": r, 1279 | "full-revisionid": keywords["full"].strip(), 1280 | "dirty": False, "error": None, 1281 | "date": date} 1282 | # no suitable tags, so version is "0+unknown", but full hex is still there 1283 | if verbose: 1284 | print("no suitable tags, using unknown + full revision id") 1285 | return {"version": "0+unknown", 1286 | "full-revisionid": keywords["full"].strip(), 1287 | "dirty": False, "error": "no suitable tags", "date": None} 1288 | 1289 | 1290 | @register_vcs_handler("git", "pieces_from_vcs") 1291 | def git_pieces_from_vcs( 1292 | tag_prefix: str, 1293 | root: str, 1294 | verbose: bool, 1295 | runner: Callable = run_command 1296 | ) -> Dict[str, Any]: 1297 | """Get version from 'git describe' in the root of the source tree. 1298 | 1299 | This only gets called if the git-archive 'subst' keywords were *not* 1300 | expanded, and _version.py hasn't already been rewritten with a short 1301 | version string, meaning we're inside a checked out source tree. 1302 | """ 1303 | GITS = ["git"] 1304 | if sys.platform == "win32": 1305 | GITS = ["git.cmd", "git.exe"] 1306 | 1307 | # GIT_DIR can interfere with correct operation of Versioneer. 1308 | # It may be intended to be passed to the Versioneer-versioned project, 1309 | # but that should not change where we get our version from. 1310 | env = os.environ.copy() 1311 | env.pop("GIT_DIR", None) 1312 | runner = functools.partial(runner, env=env) 1313 | 1314 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, 1315 | hide_stderr=not verbose) 1316 | if rc != 0: 1317 | if verbose: 1318 | print("Directory %s not under git control" % root) 1319 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 1320 | 1321 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 1322 | # if there isn't one, this yields HEX[-dirty] (no NUM) 1323 | describe_out, rc = runner(GITS, [ 1324 | "describe", "--tags", "--dirty", "--always", "--long", 1325 | "--match", f"{tag_prefix}[[:digit:]]*" 1326 | ], cwd=root) 1327 | # --long was added in git-1.5.5 1328 | if describe_out is None: 1329 | raise NotThisMethod("'git describe' failed") 1330 | describe_out = describe_out.strip() 1331 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) 1332 | if full_out is None: 1333 | raise NotThisMethod("'git rev-parse' failed") 1334 | full_out = full_out.strip() 1335 | 1336 | pieces: Dict[str, Any] = {} 1337 | pieces["long"] = full_out 1338 | pieces["short"] = full_out[:7] # maybe improved later 1339 | pieces["error"] = None 1340 | 1341 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], 1342 | cwd=root) 1343 | # --abbrev-ref was added in git-1.6.3 1344 | if rc != 0 or branch_name is None: 1345 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") 1346 | branch_name = branch_name.strip() 1347 | 1348 | if branch_name == "HEAD": 1349 | # If we aren't exactly on a branch, pick a branch which represents 1350 | # the current commit. If all else fails, we are on a branchless 1351 | # commit. 1352 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) 1353 | # --contains was added in git-1.5.4 1354 | if rc != 0 or branches is None: 1355 | raise NotThisMethod("'git branch --contains' returned error") 1356 | branches = branches.split("\n") 1357 | 1358 | # Remove the first line if we're running detached 1359 | if "(" in branches[0]: 1360 | branches.pop(0) 1361 | 1362 | # Strip off the leading "* " from the list of branches. 1363 | branches = [branch[2:] for branch in branches] 1364 | if "master" in branches: 1365 | branch_name = "master" 1366 | elif not branches: 1367 | branch_name = None 1368 | else: 1369 | # Pick the first branch that is returned. Good or bad. 1370 | branch_name = branches[0] 1371 | 1372 | pieces["branch"] = branch_name 1373 | 1374 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 1375 | # TAG might have hyphens. 1376 | git_describe = describe_out 1377 | 1378 | # look for -dirty suffix 1379 | dirty = git_describe.endswith("-dirty") 1380 | pieces["dirty"] = dirty 1381 | if dirty: 1382 | git_describe = git_describe[:git_describe.rindex("-dirty")] 1383 | 1384 | # now we have TAG-NUM-gHEX or HEX 1385 | 1386 | if "-" in git_describe: 1387 | # TAG-NUM-gHEX 1388 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 1389 | if not mo: 1390 | # unparsable. Maybe git-describe is misbehaving? 1391 | pieces["error"] = ("unable to parse git-describe output: '%s'" 1392 | % describe_out) 1393 | return pieces 1394 | 1395 | # tag 1396 | full_tag = mo.group(1) 1397 | if not full_tag.startswith(tag_prefix): 1398 | if verbose: 1399 | fmt = "tag '%s' doesn't start with prefix '%s'" 1400 | print(fmt % (full_tag, tag_prefix)) 1401 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 1402 | % (full_tag, tag_prefix)) 1403 | return pieces 1404 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 1405 | 1406 | # distance: number of commits since tag 1407 | pieces["distance"] = int(mo.group(2)) 1408 | 1409 | # commit: short hex revision ID 1410 | pieces["short"] = mo.group(3) 1411 | 1412 | else: 1413 | # HEX: no tags 1414 | pieces["closest-tag"] = None 1415 | out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) 1416 | pieces["distance"] = len(out.split()) # total number of commits 1417 | 1418 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 1419 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() 1420 | # Use only the last line. Previous lines may contain GPG signature 1421 | # information. 1422 | date = date.splitlines()[-1] 1423 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 1424 | 1425 | return pieces 1426 | 1427 | 1428 | def do_vcs_install(versionfile_source: str, ipy: Optional[str]) -> None: 1429 | """Git-specific installation logic for Versioneer. 1430 | 1431 | For Git, this means creating/changing .gitattributes to mark _version.py 1432 | for export-subst keyword substitution. 1433 | """ 1434 | GITS = ["git"] 1435 | if sys.platform == "win32": 1436 | GITS = ["git.cmd", "git.exe"] 1437 | files = [versionfile_source] 1438 | if ipy: 1439 | files.append(ipy) 1440 | if "VERSIONEER_PEP518" not in globals(): 1441 | try: 1442 | my_path = __file__ 1443 | if my_path.endswith((".pyc", ".pyo")): 1444 | my_path = os.path.splitext(my_path)[0] + ".py" 1445 | versioneer_file = os.path.relpath(my_path) 1446 | except NameError: 1447 | versioneer_file = "versioneer.py" 1448 | files.append(versioneer_file) 1449 | present = False 1450 | try: 1451 | with open(".gitattributes", "r") as fobj: 1452 | for line in fobj: 1453 | if line.strip().startswith(versionfile_source): 1454 | if "export-subst" in line.strip().split()[1:]: 1455 | present = True 1456 | break 1457 | except OSError: 1458 | pass 1459 | if not present: 1460 | with open(".gitattributes", "a+") as fobj: 1461 | fobj.write(f"{versionfile_source} export-subst\n") 1462 | files.append(".gitattributes") 1463 | run_command(GITS, ["add", "--"] + files) 1464 | 1465 | 1466 | def versions_from_parentdir( 1467 | parentdir_prefix: str, 1468 | root: str, 1469 | verbose: bool, 1470 | ) -> Dict[str, Any]: 1471 | """Try to determine the version from the parent directory name. 1472 | 1473 | Source tarballs conventionally unpack into a directory that includes both 1474 | the project name and a version string. We will also support searching up 1475 | two directory levels for an appropriately named parent directory 1476 | """ 1477 | rootdirs = [] 1478 | 1479 | for _ in range(3): 1480 | dirname = os.path.basename(root) 1481 | if dirname.startswith(parentdir_prefix): 1482 | return {"version": dirname[len(parentdir_prefix):], 1483 | "full-revisionid": None, 1484 | "dirty": False, "error": None, "date": None} 1485 | rootdirs.append(root) 1486 | root = os.path.dirname(root) # up a level 1487 | 1488 | if verbose: 1489 | print("Tried directories %s but none started with prefix %s" % 1490 | (str(rootdirs), parentdir_prefix)) 1491 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 1492 | 1493 | 1494 | SHORT_VERSION_PY = """ 1495 | # This file was generated by 'versioneer.py' (0.29) from 1496 | # revision-control system data, or from the parent directory name of an 1497 | # unpacked source archive. Distribution tarballs contain a pre-generated copy 1498 | # of this file. 1499 | 1500 | import json 1501 | 1502 | version_json = ''' 1503 | %s 1504 | ''' # END VERSION_JSON 1505 | 1506 | 1507 | def get_versions(): 1508 | return json.loads(version_json) 1509 | """ 1510 | 1511 | 1512 | def versions_from_file(filename: str) -> Dict[str, Any]: 1513 | """Try to determine the version from _version.py if present.""" 1514 | try: 1515 | with open(filename) as f: 1516 | contents = f.read() 1517 | except OSError: 1518 | raise NotThisMethod("unable to read _version.py") 1519 | mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", 1520 | contents, re.M | re.S) 1521 | if not mo: 1522 | mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", 1523 | contents, re.M | re.S) 1524 | if not mo: 1525 | raise NotThisMethod("no version_json in _version.py") 1526 | return json.loads(mo.group(1)) 1527 | 1528 | 1529 | def write_to_version_file(filename: str, versions: Dict[str, Any]) -> None: 1530 | """Write the given version number to the given _version.py file.""" 1531 | contents = json.dumps(versions, sort_keys=True, 1532 | indent=1, separators=(",", ": ")) 1533 | with open(filename, "w") as f: 1534 | f.write(SHORT_VERSION_PY % contents) 1535 | 1536 | print("set %s to '%s'" % (filename, versions["version"])) 1537 | 1538 | 1539 | def plus_or_dot(pieces: Dict[str, Any]) -> str: 1540 | """Return a + if we don't already have one, else return a .""" 1541 | if "+" in pieces.get("closest-tag", ""): 1542 | return "." 1543 | return "+" 1544 | 1545 | 1546 | def render_pep440(pieces: Dict[str, Any]) -> str: 1547 | """Build up version string, with post-release "local version identifier". 1548 | 1549 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 1550 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 1551 | 1552 | Exceptions: 1553 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 1554 | """ 1555 | if pieces["closest-tag"]: 1556 | rendered = pieces["closest-tag"] 1557 | if pieces["distance"] or pieces["dirty"]: 1558 | rendered += plus_or_dot(pieces) 1559 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 1560 | if pieces["dirty"]: 1561 | rendered += ".dirty" 1562 | else: 1563 | # exception #1 1564 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 1565 | pieces["short"]) 1566 | if pieces["dirty"]: 1567 | rendered += ".dirty" 1568 | return rendered 1569 | 1570 | 1571 | def render_pep440_branch(pieces: Dict[str, Any]) -> str: 1572 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . 1573 | 1574 | The ".dev0" means not master branch. Note that .dev0 sorts backwards 1575 | (a feature branch will appear "older" than the master branch). 1576 | 1577 | Exceptions: 1578 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] 1579 | """ 1580 | if pieces["closest-tag"]: 1581 | rendered = pieces["closest-tag"] 1582 | if pieces["distance"] or pieces["dirty"]: 1583 | if pieces["branch"] != "master": 1584 | rendered += ".dev0" 1585 | rendered += plus_or_dot(pieces) 1586 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 1587 | if pieces["dirty"]: 1588 | rendered += ".dirty" 1589 | else: 1590 | # exception #1 1591 | rendered = "0" 1592 | if pieces["branch"] != "master": 1593 | rendered += ".dev0" 1594 | rendered += "+untagged.%d.g%s" % (pieces["distance"], 1595 | pieces["short"]) 1596 | if pieces["dirty"]: 1597 | rendered += ".dirty" 1598 | return rendered 1599 | 1600 | 1601 | def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: 1602 | """Split pep440 version string at the post-release segment. 1603 | 1604 | Returns the release segments before the post-release and the 1605 | post-release version number (or -1 if no post-release segment is present). 1606 | """ 1607 | vc = str.split(ver, ".post") 1608 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None 1609 | 1610 | 1611 | def render_pep440_pre(pieces: Dict[str, Any]) -> str: 1612 | """TAG[.postN.devDISTANCE] -- No -dirty. 1613 | 1614 | Exceptions: 1615 | 1: no tags. 0.post0.devDISTANCE 1616 | """ 1617 | if pieces["closest-tag"]: 1618 | if pieces["distance"]: 1619 | # update the post release segment 1620 | tag_version, post_version = pep440_split_post(pieces["closest-tag"]) 1621 | rendered = tag_version 1622 | if post_version is not None: 1623 | rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) 1624 | else: 1625 | rendered += ".post0.dev%d" % (pieces["distance"]) 1626 | else: 1627 | # no commits, use the tag as the version 1628 | rendered = pieces["closest-tag"] 1629 | else: 1630 | # exception #1 1631 | rendered = "0.post0.dev%d" % pieces["distance"] 1632 | return rendered 1633 | 1634 | 1635 | def render_pep440_post(pieces: Dict[str, Any]) -> str: 1636 | """TAG[.postDISTANCE[.dev0]+gHEX] . 1637 | 1638 | The ".dev0" means dirty. Note that .dev0 sorts backwards 1639 | (a dirty tree will appear "older" than the corresponding clean one), 1640 | but you shouldn't be releasing software with -dirty anyways. 1641 | 1642 | Exceptions: 1643 | 1: no tags. 0.postDISTANCE[.dev0] 1644 | """ 1645 | if pieces["closest-tag"]: 1646 | rendered = pieces["closest-tag"] 1647 | if pieces["distance"] or pieces["dirty"]: 1648 | rendered += ".post%d" % pieces["distance"] 1649 | if pieces["dirty"]: 1650 | rendered += ".dev0" 1651 | rendered += plus_or_dot(pieces) 1652 | rendered += "g%s" % pieces["short"] 1653 | else: 1654 | # exception #1 1655 | rendered = "0.post%d" % pieces["distance"] 1656 | if pieces["dirty"]: 1657 | rendered += ".dev0" 1658 | rendered += "+g%s" % pieces["short"] 1659 | return rendered 1660 | 1661 | 1662 | def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: 1663 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . 1664 | 1665 | The ".dev0" means not master branch. 1666 | 1667 | Exceptions: 1668 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] 1669 | """ 1670 | if pieces["closest-tag"]: 1671 | rendered = pieces["closest-tag"] 1672 | if pieces["distance"] or pieces["dirty"]: 1673 | rendered += ".post%d" % pieces["distance"] 1674 | if pieces["branch"] != "master": 1675 | rendered += ".dev0" 1676 | rendered += plus_or_dot(pieces) 1677 | rendered += "g%s" % pieces["short"] 1678 | if pieces["dirty"]: 1679 | rendered += ".dirty" 1680 | else: 1681 | # exception #1 1682 | rendered = "0.post%d" % pieces["distance"] 1683 | if pieces["branch"] != "master": 1684 | rendered += ".dev0" 1685 | rendered += "+g%s" % pieces["short"] 1686 | if pieces["dirty"]: 1687 | rendered += ".dirty" 1688 | return rendered 1689 | 1690 | 1691 | def render_pep440_old(pieces: Dict[str, Any]) -> str: 1692 | """TAG[.postDISTANCE[.dev0]] . 1693 | 1694 | The ".dev0" means dirty. 1695 | 1696 | Exceptions: 1697 | 1: no tags. 0.postDISTANCE[.dev0] 1698 | """ 1699 | if pieces["closest-tag"]: 1700 | rendered = pieces["closest-tag"] 1701 | if pieces["distance"] or pieces["dirty"]: 1702 | rendered += ".post%d" % pieces["distance"] 1703 | if pieces["dirty"]: 1704 | rendered += ".dev0" 1705 | else: 1706 | # exception #1 1707 | rendered = "0.post%d" % pieces["distance"] 1708 | if pieces["dirty"]: 1709 | rendered += ".dev0" 1710 | return rendered 1711 | 1712 | 1713 | def render_git_describe(pieces: Dict[str, Any]) -> str: 1714 | """TAG[-DISTANCE-gHEX][-dirty]. 1715 | 1716 | Like 'git describe --tags --dirty --always'. 1717 | 1718 | Exceptions: 1719 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1720 | """ 1721 | if pieces["closest-tag"]: 1722 | rendered = pieces["closest-tag"] 1723 | if pieces["distance"]: 1724 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1725 | else: 1726 | # exception #1 1727 | rendered = pieces["short"] 1728 | if pieces["dirty"]: 1729 | rendered += "-dirty" 1730 | return rendered 1731 | 1732 | 1733 | def render_git_describe_long(pieces: Dict[str, Any]) -> str: 1734 | """TAG-DISTANCE-gHEX[-dirty]. 1735 | 1736 | Like 'git describe --tags --dirty --always -long'. 1737 | The distance/hash is unconditional. 1738 | 1739 | Exceptions: 1740 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1741 | """ 1742 | if pieces["closest-tag"]: 1743 | rendered = pieces["closest-tag"] 1744 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1745 | else: 1746 | # exception #1 1747 | rendered = pieces["short"] 1748 | if pieces["dirty"]: 1749 | rendered += "-dirty" 1750 | return rendered 1751 | 1752 | 1753 | def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: 1754 | """Render the given version pieces into the requested style.""" 1755 | if pieces["error"]: 1756 | return {"version": "unknown", 1757 | "full-revisionid": pieces.get("long"), 1758 | "dirty": None, 1759 | "error": pieces["error"], 1760 | "date": None} 1761 | 1762 | if not style or style == "default": 1763 | style = "pep440" # the default 1764 | 1765 | if style == "pep440": 1766 | rendered = render_pep440(pieces) 1767 | elif style == "pep440-branch": 1768 | rendered = render_pep440_branch(pieces) 1769 | elif style == "pep440-pre": 1770 | rendered = render_pep440_pre(pieces) 1771 | elif style == "pep440-post": 1772 | rendered = render_pep440_post(pieces) 1773 | elif style == "pep440-post-branch": 1774 | rendered = render_pep440_post_branch(pieces) 1775 | elif style == "pep440-old": 1776 | rendered = render_pep440_old(pieces) 1777 | elif style == "git-describe": 1778 | rendered = render_git_describe(pieces) 1779 | elif style == "git-describe-long": 1780 | rendered = render_git_describe_long(pieces) 1781 | else: 1782 | raise ValueError("unknown style '%s'" % style) 1783 | 1784 | return {"version": rendered, "full-revisionid": pieces["long"], 1785 | "dirty": pieces["dirty"], "error": None, 1786 | "date": pieces.get("date")} 1787 | 1788 | 1789 | class VersioneerBadRootError(Exception): 1790 | """The project root directory is unknown or missing key files.""" 1791 | 1792 | 1793 | def get_versions(verbose: bool = False) -> Dict[str, Any]: 1794 | """Get the project version from whatever source is available. 1795 | 1796 | Returns dict with two keys: 'version' and 'full'. 1797 | """ 1798 | if "versioneer" in sys.modules: 1799 | # see the discussion in cmdclass.py:get_cmdclass() 1800 | del sys.modules["versioneer"] 1801 | 1802 | root = get_root() 1803 | cfg = get_config_from_root(root) 1804 | 1805 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" 1806 | handlers = HANDLERS.get(cfg.VCS) 1807 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS 1808 | verbose = verbose or bool(cfg.verbose) # `bool()` used to avoid `None` 1809 | assert cfg.versionfile_source is not None, \ 1810 | "please set versioneer.versionfile_source" 1811 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" 1812 | 1813 | versionfile_abs = os.path.join(root, cfg.versionfile_source) 1814 | 1815 | # extract version from first of: _version.py, VCS command (e.g. 'git 1816 | # describe'), parentdir. This is meant to work for developers using a 1817 | # source checkout, for users of a tarball created by 'setup.py sdist', 1818 | # and for users of a tarball/zipball created by 'git archive' or github's 1819 | # download-from-tag feature or the equivalent in other VCSes. 1820 | 1821 | get_keywords_f = handlers.get("get_keywords") 1822 | from_keywords_f = handlers.get("keywords") 1823 | if get_keywords_f and from_keywords_f: 1824 | try: 1825 | keywords = get_keywords_f(versionfile_abs) 1826 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) 1827 | if verbose: 1828 | print("got version from expanded keyword %s" % ver) 1829 | return ver 1830 | except NotThisMethod: 1831 | pass 1832 | 1833 | try: 1834 | ver = versions_from_file(versionfile_abs) 1835 | if verbose: 1836 | print("got version from file %s %s" % (versionfile_abs, ver)) 1837 | return ver 1838 | except NotThisMethod: 1839 | pass 1840 | 1841 | from_vcs_f = handlers.get("pieces_from_vcs") 1842 | if from_vcs_f: 1843 | try: 1844 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose) 1845 | ver = render(pieces, cfg.style) 1846 | if verbose: 1847 | print("got version from VCS %s" % ver) 1848 | return ver 1849 | except NotThisMethod: 1850 | pass 1851 | 1852 | try: 1853 | if cfg.parentdir_prefix: 1854 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1855 | if verbose: 1856 | print("got version from parentdir %s" % ver) 1857 | return ver 1858 | except NotThisMethod: 1859 | pass 1860 | 1861 | if verbose: 1862 | print("unable to compute version") 1863 | 1864 | return {"version": "0+unknown", "full-revisionid": None, 1865 | "dirty": None, "error": "unable to compute version", 1866 | "date": None} 1867 | 1868 | 1869 | def get_version() -> str: 1870 | """Get the short version string for this project.""" 1871 | return get_versions()["version"] 1872 | 1873 | 1874 | def get_cmdclass(cmdclass: Optional[Dict[str, Any]] = None): 1875 | """Get the custom setuptools subclasses used by Versioneer. 1876 | 1877 | If the package uses a different cmdclass (e.g. one from numpy), it 1878 | should be provide as an argument. 1879 | """ 1880 | if "versioneer" in sys.modules: 1881 | del sys.modules["versioneer"] 1882 | # this fixes the "python setup.py develop" case (also 'install' and 1883 | # 'easy_install .'), in which subdependencies of the main project are 1884 | # built (using setup.py bdist_egg) in the same python process. Assume 1885 | # a main project A and a dependency B, which use different versions 1886 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in 1887 | # sys.modules by the time B's setup.py is executed, causing B to run 1888 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a 1889 | # sandbox that restores sys.modules to it's pre-build state, so the 1890 | # parent is protected against the child's "import versioneer". By 1891 | # removing ourselves from sys.modules here, before the child build 1892 | # happens, we protect the child from the parent's versioneer too. 1893 | # Also see https://github.com/python-versioneer/python-versioneer/issues/52 1894 | 1895 | cmds = {} if cmdclass is None else cmdclass.copy() 1896 | 1897 | # we add "version" to setuptools 1898 | from setuptools import Command 1899 | 1900 | class cmd_version(Command): 1901 | description = "report generated version string" 1902 | user_options: List[Tuple[str, str, str]] = [] 1903 | boolean_options: List[str] = [] 1904 | 1905 | def initialize_options(self) -> None: 1906 | pass 1907 | 1908 | def finalize_options(self) -> None: 1909 | pass 1910 | 1911 | def run(self) -> None: 1912 | vers = get_versions(verbose=True) 1913 | print("Version: %s" % vers["version"]) 1914 | print(" full-revisionid: %s" % vers.get("full-revisionid")) 1915 | print(" dirty: %s" % vers.get("dirty")) 1916 | print(" date: %s" % vers.get("date")) 1917 | if vers["error"]: 1918 | print(" error: %s" % vers["error"]) 1919 | cmds["version"] = cmd_version 1920 | 1921 | # we override "build_py" in setuptools 1922 | # 1923 | # most invocation pathways end up running build_py: 1924 | # distutils/build -> build_py 1925 | # distutils/install -> distutils/build ->.. 1926 | # setuptools/bdist_wheel -> distutils/install ->.. 1927 | # setuptools/bdist_egg -> distutils/install_lib -> build_py 1928 | # setuptools/install -> bdist_egg ->.. 1929 | # setuptools/develop -> ? 1930 | # pip install: 1931 | # copies source tree to a tempdir before running egg_info/etc 1932 | # if .git isn't copied too, 'git describe' will fail 1933 | # then does setup.py bdist_wheel, or sometimes setup.py install 1934 | # setup.py egg_info -> ? 1935 | 1936 | # pip install -e . and setuptool/editable_wheel will invoke build_py 1937 | # but the build_py command is not expected to copy any files. 1938 | 1939 | # we override different "build_py" commands for both environments 1940 | if 'build_py' in cmds: 1941 | _build_py: Any = cmds['build_py'] 1942 | else: 1943 | from setuptools.command.build_py import build_py as _build_py 1944 | 1945 | class cmd_build_py(_build_py): 1946 | def run(self) -> None: 1947 | root = get_root() 1948 | cfg = get_config_from_root(root) 1949 | versions = get_versions() 1950 | _build_py.run(self) 1951 | if getattr(self, "editable_mode", False): 1952 | # During editable installs `.py` and data files are 1953 | # not copied to build_lib 1954 | return 1955 | # now locate _version.py in the new build/ directory and replace 1956 | # it with an updated value 1957 | if cfg.versionfile_build: 1958 | target_versionfile = os.path.join(self.build_lib, 1959 | cfg.versionfile_build) 1960 | print("UPDATING %s" % target_versionfile) 1961 | write_to_version_file(target_versionfile, versions) 1962 | cmds["build_py"] = cmd_build_py 1963 | 1964 | if 'build_ext' in cmds: 1965 | _build_ext: Any = cmds['build_ext'] 1966 | else: 1967 | from setuptools.command.build_ext import build_ext as _build_ext 1968 | 1969 | class cmd_build_ext(_build_ext): 1970 | def run(self) -> None: 1971 | root = get_root() 1972 | cfg = get_config_from_root(root) 1973 | versions = get_versions() 1974 | _build_ext.run(self) 1975 | if self.inplace: 1976 | # build_ext --inplace will only build extensions in 1977 | # build/lib<..> dir with no _version.py to write to. 1978 | # As in place builds will already have a _version.py 1979 | # in the module dir, we do not need to write one. 1980 | return 1981 | # now locate _version.py in the new build/ directory and replace 1982 | # it with an updated value 1983 | if not cfg.versionfile_build: 1984 | return 1985 | target_versionfile = os.path.join(self.build_lib, 1986 | cfg.versionfile_build) 1987 | if not os.path.exists(target_versionfile): 1988 | print(f"Warning: {target_versionfile} does not exist, skipping " 1989 | "version update. This can happen if you are running build_ext " 1990 | "without first running build_py.") 1991 | return 1992 | print("UPDATING %s" % target_versionfile) 1993 | write_to_version_file(target_versionfile, versions) 1994 | cmds["build_ext"] = cmd_build_ext 1995 | 1996 | if "cx_Freeze" in sys.modules: # cx_freeze enabled? 1997 | from cx_Freeze.dist import build_exe as _build_exe # type: ignore 1998 | # nczeczulin reports that py2exe won't like the pep440-style string 1999 | # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. 2000 | # setup(console=[{ 2001 | # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION 2002 | # "product_version": versioneer.get_version(), 2003 | # ... 2004 | 2005 | class cmd_build_exe(_build_exe): 2006 | def run(self) -> None: 2007 | root = get_root() 2008 | cfg = get_config_from_root(root) 2009 | versions = get_versions() 2010 | target_versionfile = cfg.versionfile_source 2011 | print("UPDATING %s" % target_versionfile) 2012 | write_to_version_file(target_versionfile, versions) 2013 | 2014 | _build_exe.run(self) 2015 | os.unlink(target_versionfile) 2016 | with open(cfg.versionfile_source, "w") as f: 2017 | LONG = LONG_VERSION_PY[cfg.VCS] 2018 | f.write(LONG % 2019 | {"DOLLAR": "$", 2020 | "STYLE": cfg.style, 2021 | "TAG_PREFIX": cfg.tag_prefix, 2022 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 2023 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 2024 | }) 2025 | cmds["build_exe"] = cmd_build_exe 2026 | del cmds["build_py"] 2027 | 2028 | if 'py2exe' in sys.modules: # py2exe enabled? 2029 | try: 2030 | from py2exe.setuptools_buildexe import py2exe as _py2exe # type: ignore 2031 | except ImportError: 2032 | from py2exe.distutils_buildexe import py2exe as _py2exe # type: ignore 2033 | 2034 | class cmd_py2exe(_py2exe): 2035 | def run(self) -> None: 2036 | root = get_root() 2037 | cfg = get_config_from_root(root) 2038 | versions = get_versions() 2039 | target_versionfile = cfg.versionfile_source 2040 | print("UPDATING %s" % target_versionfile) 2041 | write_to_version_file(target_versionfile, versions) 2042 | 2043 | _py2exe.run(self) 2044 | os.unlink(target_versionfile) 2045 | with open(cfg.versionfile_source, "w") as f: 2046 | LONG = LONG_VERSION_PY[cfg.VCS] 2047 | f.write(LONG % 2048 | {"DOLLAR": "$", 2049 | "STYLE": cfg.style, 2050 | "TAG_PREFIX": cfg.tag_prefix, 2051 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 2052 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 2053 | }) 2054 | cmds["py2exe"] = cmd_py2exe 2055 | 2056 | # sdist farms its file list building out to egg_info 2057 | if 'egg_info' in cmds: 2058 | _egg_info: Any = cmds['egg_info'] 2059 | else: 2060 | from setuptools.command.egg_info import egg_info as _egg_info 2061 | 2062 | class cmd_egg_info(_egg_info): 2063 | def find_sources(self) -> None: 2064 | # egg_info.find_sources builds the manifest list and writes it 2065 | # in one shot 2066 | super().find_sources() 2067 | 2068 | # Modify the filelist and normalize it 2069 | root = get_root() 2070 | cfg = get_config_from_root(root) 2071 | self.filelist.append('versioneer.py') 2072 | if cfg.versionfile_source: 2073 | # There are rare cases where versionfile_source might not be 2074 | # included by default, so we must be explicit 2075 | self.filelist.append(cfg.versionfile_source) 2076 | self.filelist.sort() 2077 | self.filelist.remove_duplicates() 2078 | 2079 | # The write method is hidden in the manifest_maker instance that 2080 | # generated the filelist and was thrown away 2081 | # We will instead replicate their final normalization (to unicode, 2082 | # and POSIX-style paths) 2083 | from setuptools import unicode_utils 2084 | normalized = [unicode_utils.filesys_decode(f).replace(os.sep, '/') 2085 | for f in self.filelist.files] 2086 | 2087 | manifest_filename = os.path.join(self.egg_info, 'SOURCES.txt') 2088 | with open(manifest_filename, 'w') as fobj: 2089 | fobj.write('\n'.join(normalized)) 2090 | 2091 | cmds['egg_info'] = cmd_egg_info 2092 | 2093 | # we override different "sdist" commands for both environments 2094 | if 'sdist' in cmds: 2095 | _sdist: Any = cmds['sdist'] 2096 | else: 2097 | from setuptools.command.sdist import sdist as _sdist 2098 | 2099 | class cmd_sdist(_sdist): 2100 | def run(self) -> None: 2101 | versions = get_versions() 2102 | self._versioneer_generated_versions = versions 2103 | # unless we update this, the command will keep using the old 2104 | # version 2105 | self.distribution.metadata.version = versions["version"] 2106 | return _sdist.run(self) 2107 | 2108 | def make_release_tree(self, base_dir: str, files: List[str]) -> None: 2109 | root = get_root() 2110 | cfg = get_config_from_root(root) 2111 | _sdist.make_release_tree(self, base_dir, files) 2112 | # now locate _version.py in the new base_dir directory 2113 | # (remembering that it may be a hardlink) and replace it with an 2114 | # updated value 2115 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source) 2116 | print("UPDATING %s" % target_versionfile) 2117 | write_to_version_file(target_versionfile, 2118 | self._versioneer_generated_versions) 2119 | cmds["sdist"] = cmd_sdist 2120 | 2121 | return cmds 2122 | 2123 | 2124 | CONFIG_ERROR = """ 2125 | setup.cfg is missing the necessary Versioneer configuration. You need 2126 | a section like: 2127 | 2128 | [versioneer] 2129 | VCS = git 2130 | style = pep440 2131 | versionfile_source = src/myproject/_version.py 2132 | versionfile_build = myproject/_version.py 2133 | tag_prefix = 2134 | parentdir_prefix = myproject- 2135 | 2136 | You will also need to edit your setup.py to use the results: 2137 | 2138 | import versioneer 2139 | setup(version=versioneer.get_version(), 2140 | cmdclass=versioneer.get_cmdclass(), ...) 2141 | 2142 | Please read the docstring in ./versioneer.py for configuration instructions, 2143 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. 2144 | """ 2145 | 2146 | SAMPLE_CONFIG = """ 2147 | # See the docstring in versioneer.py for instructions. Note that you must 2148 | # re-run 'versioneer.py setup' after changing this section, and commit the 2149 | # resulting files. 2150 | 2151 | [versioneer] 2152 | #VCS = git 2153 | #style = pep440 2154 | #versionfile_source = 2155 | #versionfile_build = 2156 | #tag_prefix = 2157 | #parentdir_prefix = 2158 | 2159 | """ 2160 | 2161 | OLD_SNIPPET = """ 2162 | from ._version import get_versions 2163 | __version__ = get_versions()['version'] 2164 | del get_versions 2165 | """ 2166 | 2167 | INIT_PY_SNIPPET = """ 2168 | from . import {0} 2169 | __version__ = {0}.get_versions()['version'] 2170 | """ 2171 | 2172 | 2173 | def do_setup() -> int: 2174 | """Do main VCS-independent setup function for installing Versioneer.""" 2175 | root = get_root() 2176 | try: 2177 | cfg = get_config_from_root(root) 2178 | except (OSError, configparser.NoSectionError, 2179 | configparser.NoOptionError) as e: 2180 | if isinstance(e, (OSError, configparser.NoSectionError)): 2181 | print("Adding sample versioneer config to setup.cfg", 2182 | file=sys.stderr) 2183 | with open(os.path.join(root, "setup.cfg"), "a") as f: 2184 | f.write(SAMPLE_CONFIG) 2185 | print(CONFIG_ERROR, file=sys.stderr) 2186 | return 1 2187 | 2188 | print(" creating %s" % cfg.versionfile_source) 2189 | with open(cfg.versionfile_source, "w") as f: 2190 | LONG = LONG_VERSION_PY[cfg.VCS] 2191 | f.write(LONG % {"DOLLAR": "$", 2192 | "STYLE": cfg.style, 2193 | "TAG_PREFIX": cfg.tag_prefix, 2194 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 2195 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 2196 | }) 2197 | 2198 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), 2199 | "__init__.py") 2200 | maybe_ipy: Optional[str] = ipy 2201 | if os.path.exists(ipy): 2202 | try: 2203 | with open(ipy, "r") as f: 2204 | old = f.read() 2205 | except OSError: 2206 | old = "" 2207 | module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] 2208 | snippet = INIT_PY_SNIPPET.format(module) 2209 | if OLD_SNIPPET in old: 2210 | print(" replacing boilerplate in %s" % ipy) 2211 | with open(ipy, "w") as f: 2212 | f.write(old.replace(OLD_SNIPPET, snippet)) 2213 | elif snippet not in old: 2214 | print(" appending to %s" % ipy) 2215 | with open(ipy, "a") as f: 2216 | f.write(snippet) 2217 | else: 2218 | print(" %s unmodified" % ipy) 2219 | else: 2220 | print(" %s doesn't exist, ok" % ipy) 2221 | maybe_ipy = None 2222 | 2223 | # Make VCS-specific changes. For git, this means creating/changing 2224 | # .gitattributes to mark _version.py for export-subst keyword 2225 | # substitution. 2226 | do_vcs_install(cfg.versionfile_source, maybe_ipy) 2227 | return 0 2228 | 2229 | 2230 | def scan_setup_py() -> int: 2231 | """Validate the contents of setup.py against Versioneer's expectations.""" 2232 | found = set() 2233 | setters = False 2234 | errors = 0 2235 | with open("setup.py", "r") as f: 2236 | for line in f.readlines(): 2237 | if "import versioneer" in line: 2238 | found.add("import") 2239 | if "versioneer.get_cmdclass()" in line: 2240 | found.add("cmdclass") 2241 | if "versioneer.get_version()" in line: 2242 | found.add("get_version") 2243 | if "versioneer.VCS" in line: 2244 | setters = True 2245 | if "versioneer.versionfile_source" in line: 2246 | setters = True 2247 | if len(found) != 3: 2248 | print("") 2249 | print("Your setup.py appears to be missing some important items") 2250 | print("(but I might be wrong). Please make sure it has something") 2251 | print("roughly like the following:") 2252 | print("") 2253 | print(" import versioneer") 2254 | print(" setup( version=versioneer.get_version(),") 2255 | print(" cmdclass=versioneer.get_cmdclass(), ...)") 2256 | print("") 2257 | errors += 1 2258 | if setters: 2259 | print("You should remove lines like 'versioneer.VCS = ' and") 2260 | print("'versioneer.versionfile_source = ' . This configuration") 2261 | print("now lives in setup.cfg, and should be removed from setup.py") 2262 | print("") 2263 | errors += 1 2264 | return errors 2265 | 2266 | 2267 | def setup_command() -> NoReturn: 2268 | """Set up Versioneer and exit with appropriate error code.""" 2269 | errors = do_setup() 2270 | errors += scan_setup_py() 2271 | sys.exit(1 if errors else 0) 2272 | 2273 | 2274 | if __name__ == "__main__": 2275 | cmd = sys.argv[1] 2276 | if cmd == "setup": 2277 | setup_command() 2278 | --------------------------------------------------------------------------------