├── .bumpversion.cfg ├── .flake8 ├── .github └── workflows │ ├── release.yaml │ └── unittest.yaml ├── .gitignore ├── .pre-commit-config.yaml ├── CHANGELOG.md ├── Dockerfile ├── Makefile ├── dev_datadir ├── .gitkeep ├── ssh_host_rsa_key └── ssh_host_rsa_key.pub ├── docs └── images │ ├── do-preview.png │ ├── lobbyboy.graffle │ └── lobbyboy.png ├── lobbyboy ├── __init__.py ├── conf │ └── lobbyboy_config.toml ├── config.py ├── contrib │ ├── __init__.py │ └── provider │ │ ├── __init__.py │ │ ├── digitalocean.py │ │ ├── footloose.py │ │ ├── ignite.py │ │ ├── linode.py │ │ ├── multipass.py │ │ ├── vagrant.py │ │ └── vultr.py ├── exceptions.py ├── main.py ├── provider.py ├── scripts.py ├── server.py ├── server_killer.py ├── socket_handle.py └── utils.py ├── poetry.lock ├── pyproject.toml ├── readme.md └── tests ├── __init__.py ├── conftest.py ├── test_config.py ├── test_providers ├── __init__.py ├── conftest.py └── test_footloose.py └── test_utils.py /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 0.4.0 3 | commit = True 4 | tag = True 5 | 6 | [bumpversion:file:pyproject.toml] 7 | 8 | [bumpversion:file:lobbyboy/__init__.py] 9 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = D203,W503,W605,C901,E203 3 | exclude = .git,__pycache__,build,dist,venv 4 | max-complexity = 14 5 | max-line-length = 120 6 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - v* 7 | 8 | jobs: 9 | release-pypi: 10 | name: release-pypi 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - uses: actions/checkout@v2 15 | - uses: actions/setup-python@v1 16 | with: 17 | python-version: 3.7 18 | architecture: "x64" 19 | - name: Cache venv 20 | uses: actions/cache@v1 21 | with: 22 | path: venv 23 | # Look to see if there is a cache hit for the corresponding requirements file 24 | key: ubuntu-latest-poetryenv-${{ hashFiles('poetry.lock') }} 25 | - name: Install system dependencies 26 | run: sudo apt install -y libkrb5-dev 27 | - name: Install Dependencies 28 | run: | 29 | python3 -m venv venv 30 | . venv/bin/activate 31 | pip install -U pip poetry 32 | poetry install 33 | python -c "import sys; print(sys.version)" 34 | pip list 35 | - name: Poetry Build 36 | run: | 37 | . venv/bin/activate 38 | poetry build 39 | - name: Test Build 40 | run: | 41 | python3 -m venv fresh_env 42 | . fresh_env/bin/activate 43 | pip install dist/*.whl 44 | 45 | lobbyboy-server -h 46 | 47 | - name: Upload to Pypi 48 | env: 49 | PASSWORD: ${{ secrets.PYPI_TOKEN }} 50 | run: | 51 | . venv/bin/activate 52 | poetry publish --username __token__ --password ${PASSWORD} 53 | 54 | - name: Create Release 55 | id: create_release 56 | uses: actions/create-release@v1 57 | env: 58 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 59 | with: 60 | tag_name: ${{ github.ref }} 61 | release_name: ${{ github.ref }} 62 | draft: false 63 | prerelease: false 64 | 65 | push-iamge: 66 | runs-on: ubuntu-latest 67 | env: 68 | IMAGE_NAME: lobbyboy 69 | permissions: 70 | packages: write 71 | contents: read 72 | 73 | steps: 74 | - uses: actions/checkout@v2 75 | 76 | - name: Build image 77 | run: docker build . --file Dockerfile --tag $IMAGE_NAME 78 | 79 | - name: Log in to registry 80 | # This is where you will update the PAT to GITHUB_TOKEN 81 | run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.repository_owner }} --password-stdin 82 | 83 | - name: Push image 84 | run: | 85 | IMAGE_ID=ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME 86 | VERSION=latest 87 | docker tag $IMAGE_NAME $IMAGE_ID:$VERSION 88 | docker push $IMAGE_ID:$VERSION 89 | -------------------------------------------------------------------------------- /.github/workflows/unittest.yaml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: 7 | - main 8 | 9 | jobs: 10 | test: 11 | name: Pytest 12 | strategy: 13 | matrix: 14 | os: [ubuntu-latest] 15 | python: ["3.7", "3.8", "3.9", "3.10"] 16 | runs-on: ${{ matrix.os }} 17 | 18 | steps: 19 | - uses: actions/checkout@v2 20 | - uses: actions/setup-python@v2 21 | with: 22 | python-version: ${{ matrix.python }} 23 | architecture: "x64" 24 | - name: Install system dependencies 25 | run: sudo apt install -y libkrb5-dev 26 | - name: Cache venv 27 | uses: actions/cache@v2 28 | with: 29 | path: venv 30 | # Look to see if there is a cache hit for the corresponding requirements file 31 | key: poetryenv-${{ matrix.os }}-${{ matrix.python }}-${{ hashFiles('poetry.lock') }} 32 | - name: Install Dependencies 33 | run: | 34 | python3 -m venv venv 35 | . venv/bin/activate 36 | pip install -U pip setuptools poetry 37 | poetry install 38 | python -c "import sys; print(sys.version)" 39 | pip list 40 | - name: Pytest 41 | run: | 42 | . venv/bin/activate 43 | pytest --cov --cov-report=term --cov-report=xml 44 | - name: Upload to codecov.io 45 | uses: codecov/codecov-action@v1 46 | with: 47 | file: ./coverage.xml 48 | flags: unittests 49 | name: codecov-umbrella 50 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | Pipfile.lock 96 | 97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 98 | __pypackages__/ 99 | 100 | # Celery stuff 101 | celerybeat-schedule 102 | celerybeat.pid 103 | 104 | # SageMath parsed files 105 | *.sage.py 106 | 107 | # Environments 108 | .env 109 | .venv 110 | env/ 111 | venv/ 112 | ENV/ 113 | env.bak/ 114 | venv.bak/ 115 | 116 | # Spyder project settings 117 | .spyderproject 118 | .spyproject 119 | 120 | # Rope project settings 121 | .ropeproject 122 | 123 | # mkdocs documentation 124 | /site 125 | 126 | # mypy 127 | .mypy_cache/ 128 | .dmypy.json 129 | dmypy.json 130 | 131 | # Pyre type checker 132 | .pyre/ 133 | 134 | # pytype static type analyzer 135 | .pytype/ 136 | 137 | # Cython debug symbols 138 | cython_debug/ 139 | dev_datadir/* 140 | .idea 141 | 142 | # local config files 143 | config.toml 144 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | ci: 2 | autoupdate_schedule: monthly 3 | 4 | repos: 5 | - repo: https://github.com/PyCQA/bandit 6 | rev: 1.7.10 7 | hooks: 8 | - id: bandit 9 | additional_dependencies: ['toml'] 10 | # not support auto read config from `pyproject.toml` for now 11 | args: ['-c', "pyproject.toml"] 12 | 13 | - repo: https://github.com/pre-commit/pre-commit-hooks 14 | rev: v5.0.0 15 | hooks: 16 | - id: check-toml 17 | - id: check-yaml 18 | - id: end-of-file-fixer 19 | exclude: dev_datadir/ 20 | - id: trailing-whitespace 21 | - id: name-tests-test 22 | args: [--django] 23 | - id: check-added-large-files 24 | args: [ --enforce-all ] 25 | - id: check-merge-conflict 26 | 27 | - repo: https://github.com/codespell-project/codespell 28 | rev: v2.3.0 29 | hooks: 30 | - id: codespell 31 | 32 | - repo: https://github.com/pycqa/isort 33 | rev: 5.13.2 34 | hooks: 35 | - id: isort 36 | 37 | - repo: https://github.com/PyCQA/flake8 38 | rev: 7.1.1 39 | hooks: 40 | - id: flake8 41 | additional_dependencies: 42 | - flake8-bugbear 43 | 44 | - repo: https://github.com/psf/black 45 | rev: 24.10.0 46 | hooks: 47 | - id: black 48 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## 0.4.0 2 | 3 | **changelog won't be updated since 0.4.0, please check the release page:** 4 | 5 | https://github.com/lobbyboy-ssh/lobbyboy/releases 6 | 7 | - feature: support docker containers now! This is based on 8 | [footloose](https://github.com/weaveworks/footloose), it can create docker 9 | containers (or [ignite](https://github.com/weaveworks/ignite)) acting like a 10 | virtual machine, which can let you ssh to it. 11 | 12 | ## 0.3.0 13 | 14 | - feature: while waiting for providers doing the work for you (like creating a 15 | new VPS), Lobbyboy will show you the time pass by displaying `.` in terminal, 16 | then display the total time cost when the operation is done. (by @luxiaba) 17 | - new provider: linode is supported now! (by @luxiaba) 18 | - api change: the provider api is changed, including function name, args, 19 | returns. Please see the code for details. We will have the official docs soon. 20 | - refactor: the code of lobbyboy is now more readable and elegant now. (also by 21 | @luxiaba) 22 | 23 | ## 0.2.3 24 | 25 | - chore: Add unit test 26 | - feature: Provider DigitalOcean support choosing region/size/image when 27 | creating a droplet 28 | - feature: Provider DigitalOcean Dynamic detect new created server's ssh port is 29 | connectable. 30 | 31 | ## 0.2.2 32 | 33 | Hello world! 34 | 35 | This is the initial version of Lobbyboy. 36 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9-slim as builder 2 | 3 | RUN apt update && apt install -y gcc libkrb5-dev 4 | RUN pip3 install poetry && poetry config virtualenvs.in-project true 5 | WORKDIR /app 6 | COPY poetry.lock . 7 | COPY pyproject.toml . 8 | RUN poetry install --no-dev --no-root 9 | COPY lobbyboy/ /app/lobbyboy/ 10 | RUN .venv/bin/pip install --no-deps . 11 | 12 | 13 | FROM python:3.9-slim 14 | 15 | WORKDIR /app 16 | COPY --from=builder /app/.venv/ .venv/ 17 | ENV PATH ".venv/bin:$PATH" 18 | EXPOSE 12200 19 | CMD [ "/app/.venv/bin/lobbyboy-server", "-c", "config.toml" ] 20 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | lint: 2 | poetry run flake8 . 3 | poetry run black . 4 | -------------------------------------------------------------------------------- /dev_datadir/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lobbyboy-ssh/lobbyboy/dafbb5905bd1c43facda7001a36e11b3a6ede837/dev_datadir/.gitkeep -------------------------------------------------------------------------------- /dev_datadir/ssh_host_rsa_key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIG5AIBAAKCAYEApKlMbaTp9O/8hRVaip9PdCfjpXyGl6hsAD/s2pmquonbrj8o 3 | mPgxj8CwOgmqas44Cf7Zq38LJzp7op3DqHNnd3XpYXWxV4mpnNFrL5Un4TH0Bazb 4 | lneGwA333Y1r3iY3j/5FBR55Jeh7F/5RHPH2+fH5TJ0sv1GeAOtVvi3QWzda2/7D 5 | /Kkrcjfnqk5RsrzAY2sFc5z2M2+OfBMHpsPmTTTJnJ7YmFVnl4XIDww8z8Jpyt51 6 | t8kyi3NCTtHz0jFjnc4QKesAKcxa46kdT0uh3+t34TvNMBGsrSjHuTr0SmMG32US 7 | DTW47qzB79vkF/MCOFFJzqsj3NKW6x7JsErPTRLY+ShbHAmj0BVeL/jv9jFduY8D 8 | 2W3hHiLjXxIKCkYhkvZ6BL/UffM9/k2bjQ+SWNJ6DZ9sN3T7g5m0ytxWpskCv/T+ 9 | raYbEwf6Ufe3Bzg/c4uwJj1WqmyloNpE891Gn3k8WokQO+8SF/MU7jW6Bpcjdd1v 10 | 01Cj2qgAYuC7DKYlAgMBAAECggGBAInXJ6HJoghnNc76lzbgMhUcp1cLLWiblv2G 11 | hfJhHSjW6s2tFYEok+Sw+1U6bizGwOatLu5/hYirToBM7VOAeCbq0BnD+lc7aH9c 12 | WAwXIrFDrfFqSNBm8qiniM0w7BGlkHG7+29TmTsAqLrnLGO8snF9/KIEGUd3PXur 13 | hbWUiI0zg54SYWKLA1G0Nvy8X2vu0mbnWkL0LK3WP0Lic09JQutyuyufVdX/+J5B 14 | Yb4nEmkhaIppO8ZEaUmPjPyKiT92E4PDcVn404Yg5hPiUMYzsFoODvHi8iFokEHc 15 | nrpRoL3Nuktjx8GNTlavedsS9RNehSQ18+xfA3lFZF+Jxj2qaITlP0vVqGuXhHCG 16 | rBprtCKAu/F3ZhBIIOgMiDQNm8baizQJOic+3WSX70ISJsOTPp81xyt1C7iUavqz 17 | GtOb5NNb3WU9niSqsoyMxN+votoDPGpX8ZAFet8PAwQyUMB5nRTNDQbF5qfN6DQH 18 | I3Ff4g3KvYaAkso46Sc6Qa/Mst3+wQKBwQDap00xwQTlodscQ/tJcLH2fWBcSZBR 19 | 2e+iSAqbffzAsJQRRk8PuEwPmHZU9yFoKfqPp5A0UfO3MTdC7JBJP7XEFZ/O4LVK 20 | u9KZhfTk3navxh6RidWonnidAWJK6+txutnhIsbFoLO565FJqw0vRIgF7WDi8RYy 21 | OEtz3KmMdalMZusd67oC3EqLHVPccnZGFLse+50BwVneAeshwpC4afxgwkU+MVbt 22 | 9LqsnGWa8qBKwdbeztiIRU7wphSVR3AcAQ0CgcEAwMkrhjHQV3RPGMz4/qhe/RBQ 23 | beB1khk7ZxQ2zsPLG8RsIvPKGsxjXJMJnyU6j6on1XterL9qxcDkJXJzmkaaWt52 24 | ZAQBKIkDYAm6wtS/e4kXCPIw46XrcEwzAnIuJETUJWrZPKv2PXLz1oPGC57QCklA 25 | xKNrusgbrcGNwWqyi6CpyyQjr0rBkw2v6ShAg3DtlZSoUD+5LDJs1cRGvVRC8e0d 26 | AGMyN8q6HGEcm0P6/nRKfAUv52cJK7rSYZBbwQN5AoHACqcmCquFyo5VN45UWuKj 27 | uQeovzZSUrqUg0qRQbwkqVqxDBpp1bE7+3KJKWCU/xc0fvVCAFqRzjYWpCsUaI0z 28 | FOeczp6CFq8J9rLhGJmCHNRBPwQBJeL6q2Oa312ZLgRVgzOHja9jskY44Ob1kHz9 29 | R5SmKMhoNXAqtO1mUVKWOgO1ptKKUINdGmQGZvIoswdlin/lyWUbNMbRJubs2B9J 30 | zYkJiJssyhEnWyuPg7oYdyv0hQx83j7KNs1QqWk+nA6tAoHAOw8l20V1auVBufpJ 31 | KVKRTvtpeE+0FN7P+OMTa0JF/aqt6gM/XfhRVDHjV7LC/vIEo/zYYxl87zoJaoNm 32 | cNPCQ6Y1JKypYi6fLxOr+DXOt1ynBuGTIppYxx1A3zX1drzzNMdH57NSiz6k/ol9 33 | 6V9sokoQomzh+uWEvTU3ZRJ3whilC4t1tySyqocd5dKj+OBmmFVjPrpW0vhbqR0p 34 | AqVGoPCf2teIsdjULQ9/AP3RbZRYoSfBpiVFhPUFsEfWJAoZAoHBALTDsrcozVNb 35 | 16s4Y1b8NmPohPUZOP6AsDH+qFNRsxGzoiWuIfWy8hBLbNotP229CiFIV1RxgNnr 36 | ZQnJTBXszRENMOLIOMJCF8+IC9SJ52MtGfRVYoDcjqUbdwWxCQRuC7Cpn6kxD0OE 37 | UQXS6JFtPRWDJo0sSypaozoqC3Rjz1xCa//Sabcxny4cXM6AlGEPX1prZz2wDaR6 38 | QfPzgDCJEMh45eqeOitUE8awIjQUXuPf3Sl+7RPKUKfKJiYXEyFxPA== 39 | -----END RSA PRIVATE KEY----- 40 | -------------------------------------------------------------------------------- /dev_datadir/ssh_host_rsa_key.pub: -------------------------------------------------------------------------------- 1 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCkqUxtpOn07/yFFVqKn090J+OlfIaXqGwAP+zamaq6iduuPyiY+DGPwLA6CapqzjgJ/tmrfwsnOnuincOoc2d3delhdbFXiamc0WsvlSfhMfQFrNuWd4bADffdjWveJjeP/kUFHnkl6HsX/lEc8fb58flMnSy/UZ4A61W+LdBbN1rb/sP8qStyN+eqTlGyvMBjawVznPYzb458Ewemw+ZNNMmcntiYVWeXhcgPDDzPwmnK3nW3yTKLc0JO0fPSMWOdzhAp6wApzFrjqR1PS6Hf63fhO80wEaytKMe5OvRKYwbfZRINNbjurMHv2+QX8wI4UUnOqyPc0pbrHsmwSs9NEtj5KFscCaPQFV4v+O/2MV25jwPZbeEeIuNfEgoKRiGS9noEv9R98z3+TZuND5JY0noNn2w3dPuDmbTK3FamyQK/9P6tphsTB/pR97cHOD9zi7AmPVaqbKWg2kTz3UafeTxaiRA77xIX8xTuNboGlyN13W/TUKPaqABi4LsMpiU= -------------------------------------------------------------------------------- /docs/images/do-preview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lobbyboy-ssh/lobbyboy/dafbb5905bd1c43facda7001a36e11b3a6ede837/docs/images/do-preview.png -------------------------------------------------------------------------------- /docs/images/lobbyboy.graffle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lobbyboy-ssh/lobbyboy/dafbb5905bd1c43facda7001a36e11b3a6ede837/docs/images/lobbyboy.graffle -------------------------------------------------------------------------------- /docs/images/lobbyboy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lobbyboy-ssh/lobbyboy/dafbb5905bd1c43facda7001a36e11b3a6ede837/docs/images/lobbyboy.png -------------------------------------------------------------------------------- /lobbyboy/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.4.0" 2 | -------------------------------------------------------------------------------- /lobbyboy/conf/lobbyboy_config.toml: -------------------------------------------------------------------------------- 1 | data_dir = "./dev_datadir" 2 | listen_port = 12200 3 | listen_ip = "0.0.0.0" 4 | min_destroy_interval = "1m" 5 | servers_file = "available_servers_db.json" 6 | 7 | # CRITICAL 8 | # ERROR 9 | # WARNING 10 | # INFO 11 | # DEBUG 12 | log_level = "DEBUG" 13 | 14 | [user.Gustave] 15 | # client pub keys for ssh to lobbyboy server. 16 | # change this config will take effect immediately, no need to restart lobby 17 | # one key per line 18 | authorized_keys = """ 19 | """ 20 | password = "Fiennes" 21 | 22 | [provider.digitalocean] 23 | load_module = "lobbyboy.contrib.provider.digitalocean::DigitalOceanProvider" 24 | 25 | # set to false to disable this provider 26 | enable = true 27 | 28 | # lobbyboy will destroy the server for you. 29 | # when: 30 | # 1. there is no active session(if you ssh to that server bot throughing 31 | # lobbyboy, then it doesn't count) 32 | # 2. server exist at least ``min_life_to_live`` time 33 | # 3. current time + ``destroy_interval`` + 5m(in case of network latency) will 34 | # enter next bill unit 35 | # For e.g. 36 | # You pay DigitalOcean's VPS(they call droplet) based on hourly usage, say if 37 | # you use a VPS for 30 minutes, and logout, lobbyboy will destroy this server 38 | # after 24mins, in case you want to use it again, and you don't pay extra money 39 | # for that 40 | # 41 | # if set ``min_life_to_live`` to "0s", the server will be destroy immediately 42 | # after everyone logout. 43 | # 44 | # supprtted format: 45 | # - 50s for 50 seconds 46 | # - 55m for 55 minutes 47 | # - 1h for one hour 48 | # - 2d for 2 days 49 | min_life_to_live = "0s" 50 | 51 | # some cloud service providers support hourly/dayily/monthly/... charging 52 | # you can set this as you bill cycle so that lobbyboy knows when to check or destroy the server. 53 | bill_time_unit = "0s" 54 | 55 | # before the end of the current billing cycle, the safe time left for the destroy action can 56 | # prevent the delay of destroy, eg: network problems, etc. 57 | destroy_safe_time = '5m' 58 | 59 | # set server name prefix, if not set, will use server name directly, if set, connect prefix and server name with '-' 60 | # eg: 61 | # server name: myServer 62 | # server_name_prefix: lobbyboy -> lobbyboy-myServer 63 | # not set server_name_prefix -> myServer 64 | server_name_prefix = 'lobbyboy' 65 | 66 | # Will try to read from environment variables first, ``DIGITALOCEAN_TOKEN`` 67 | # if empty, then use this config 68 | # api_token = "" 69 | 70 | # ssh keys, added when create a new server 71 | # it is a list, every item is a key, each one can be: 72 | # * public_key string 73 | # * digitalocean ssh key id string 74 | # * public_key fingerprint string (you need to setup from digitalocean first) 75 | # extra_ssh_keys = [ 76 | # ] 77 | 78 | # quick choose a favorite droplet template to create 79 | # format: regions-slug:size-slug:image-slug 80 | favorite_instance_types = [ 81 | "sgp1:s-1vcpu-1gb:ubuntu-21-04-x64", 82 | "sgp1:s-1vcpu-1gb:freebsd-12-x64-zfs", 83 | "sfo1:s-1vcpu-1gb:fedora-33-x64", 84 | ] 85 | 86 | [provider.vagrant] 87 | load_module = "lobbyboy.contrib.provider.vagrant::VagrantProvider" 88 | enable = true 89 | min_life_to_live = "1h" 90 | bill_time_unit = "1h" 91 | 92 | server_name_prefix = 'lobbyboy' 93 | vagrantfile=""" 94 | Vagrant.configure("2") do |config| 95 | config.vm.define "{boxname}" 96 | config.vm.box = "bento/ubuntu-20.04" 97 | config.vm.provider "virtualbox" do |v| 98 | v.name = "{boxname}" 99 | v.memory = 1024 100 | v.cpus = 12 101 | end 102 | end 103 | """ 104 | 105 | [provider.linode] 106 | load_module = "lobbyboy.contrib.provider.linode::LinodeProvider" 107 | enable = true 108 | min_life_to_live = "1h" 109 | bill_time_unit = "1h" 110 | 111 | # Will try to read from environment variables first, ``LINODE_TOKEN`` 112 | # if empty, then use this config 113 | # api_token = "" 114 | 115 | # format: regions.id:type.id:image.id 116 | # see [Linode Region](https://www.linode.com/docs/api/regions/#regions-list) 117 | # [Linode Type](https://www.linode.com/docs/api/linode-types/#types-list) 118 | # [Linode Image](https://www.linode.com/docs/api/images/#images-list) 119 | favorite_instance_types = [ 120 | "ap-south:g6-nanode-1:linode/centos7", 121 | ] 122 | 123 | [provider.footloose] 124 | load_module = "lobbyboy.contrib.provider.footloose::FootlooseProvider" 125 | enable = true 126 | min_life_to_live = "0" 127 | bill_time_unit = "0" 128 | footloose_config = """ 129 | cluster: 130 | name: footloose-{server_name} 131 | privateKey: cluster-key 132 | machines: 133 | - count: 1 134 | spec: 135 | backend: docker 136 | image: quay.io/footloose/centos7:latest 137 | name: {server_name}%d 138 | portMappings: 139 | - containerPort: 22 140 | """ 141 | 142 | [provider.ignite] 143 | load_module = "lobbyboy.contrib.provider.ignite::IgniteProvider" 144 | enable = true 145 | min_life_to_live = "0" 146 | bill_time_unit = "0" 147 | 148 | [provider.multipass] 149 | load_module = "lobbyboy.contrib.provider.multipass::MultipassProvider" 150 | enable = true 151 | min_life_to_live = "0" 152 | bill_time_unit = "0" 153 | 154 | [provider.vultr] 155 | load_module = "lobbyboy.contrib.provider.vultr::VultrProvider" 156 | min_life_to_live = "1h" 157 | bill_time_unit = "1h" 158 | 159 | # Will try to read from environment variables first, ``VULTR_TOKEN`` 160 | # if empty, then use this config 161 | # api_token = "" 162 | 163 | # format: regions.id:plan.id:image.id 164 | favorite_instance_types = [ 165 | "sgp:vc2-1c-1gb:362", 166 | ] 167 | -------------------------------------------------------------------------------- /lobbyboy/config.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import time 4 | from collections import OrderedDict 5 | from dataclasses import asdict, dataclass, field, fields 6 | from json import JSONDecodeError 7 | from pathlib import Path 8 | from typing import Any, Dict, List, Optional 9 | from typing import OrderedDict as typeOrderedDict 10 | from typing import Tuple, Type 11 | 12 | import toml 13 | 14 | from lobbyboy.exceptions import InvalidConfigException 15 | from lobbyboy.utils import confirm_dc_type, encoder_factory, import_class 16 | 17 | logger = logging.getLogger(__name__) 18 | 19 | 20 | @dataclass 21 | class LBConfigUser: 22 | authorized_keys: str = None 23 | password: Optional[str] = None 24 | 25 | def auth_key_pairs(self) -> List[Tuple]: 26 | """ 27 | 28 | Returns: 29 | tuple: (key_type, key_data) 30 | """ 31 | if not self.authorized_keys: 32 | return [] 33 | return [tuple(ssh_key.split(maxsplit=1)) for ssh_key in self.authorized_keys.split("\n") if ssh_key] 34 | 35 | 36 | @dataclass 37 | class LBConfigProvider: 38 | """ 39 | The basic configuration set of provider. 40 | 41 | If a `provider` needs to configure custom fields, it should inherit this. 42 | """ 43 | 44 | load_module: str = None 45 | enable: bool = True 46 | min_life_to_live: str = None 47 | bill_time_unit: str = None 48 | destroy_safe_time: str = None 49 | server_name_prefix: str = None 50 | api_token: str = None 51 | extra_ssh_keys: List[str] = field(default_factory=list) 52 | 53 | 54 | @dataclass 55 | class LBServerMeta: 56 | provider_name: str 57 | workspace: Path 58 | server_name: str 59 | server_host: str = "127.0.0.1" 60 | server_user: str = "root" 61 | server_port: int = 22 62 | created_timestamp: int = field(default_factory=lambda: int(time.time())) 63 | # TODO support below features when create server 64 | # extra ssh args when connect to this server by ssh, eg: ["-o", "ProxyCommand=$jumpServer"] 65 | ssh_extra_args: List[str] = field(default_factory=list) 66 | # indicate whether this server is managed by us or not. 67 | manage: bool = True 68 | 69 | def __post_init__(self): 70 | self.confirm_data_type() 71 | 72 | def confirm_data_type(self): 73 | if self.workspace: 74 | self.workspace = Path(self.workspace) 75 | 76 | @property 77 | def live_sec(self) -> int: 78 | return int(time.time()) - self.created_timestamp 79 | 80 | 81 | @dataclass 82 | class LBConfig: 83 | _file: Path 84 | _raw: Dict = field(default_factory=dict) 85 | data_dir: Path = None 86 | listen_port: int = None 87 | listen_ip: str = None 88 | min_destroy_interval: str = None 89 | servers_file: str = None 90 | log_level: str = None 91 | user: Dict[str, Type[LBConfigUser]] = field(default_factory=dict) 92 | provider: Dict[str, LBConfigProvider] = field(default_factory=dict) 93 | # Hold all providers class. 94 | _provider_cls: Dict[str, Any] = field(default_factory=dict) 95 | 96 | @classmethod 97 | def load(cls, config_file: Path) -> "LBConfig": 98 | """Load config from file.""" 99 | logger.debug(f"Loading LB configs from {str(config_file)}.") 100 | 101 | raw_config: Dict = toml.load(config_file) # noqa 102 | 103 | # update it from config file 104 | config_file_file = { 105 | f.name: raw_config[f.name] for f in fields(cls) if f.name in raw_config and not f.name.startswith("_") 106 | } 107 | config = cls(**config_file_file, _file=config_file, _raw=raw_config) 108 | 109 | config.confirm_data_type() 110 | 111 | # validation 112 | is_valid, reason = config.validate() 113 | if not is_valid: 114 | raise InvalidConfigException(reason) 115 | return config 116 | 117 | def reload(self) -> "LBConfig": 118 | return self.load(self._file) 119 | 120 | def validate(self) -> Tuple[bool, Optional[str]]: 121 | """ 122 | 123 | Returns: 124 | tuple(bool, str): (config_is_valid, reason) 125 | """ 126 | if self.data_dir is None: 127 | return False, "missing required config: please check 'data_dir' in your config file." 128 | # TODO, config validator 129 | return True, None 130 | 131 | def confirm_data_type(self): 132 | if self._file: 133 | self._file = Path(self._file) 134 | if self.data_dir: 135 | self.data_dir = Path(self.data_dir) 136 | self.user = {u: confirm_dc_type(config, LBConfigUser) for u, config in self.user.items()} 137 | 138 | # Initialize the configuration with each provider's own config class. 139 | config: Dict 140 | for name, config in self.provider.items(): 141 | if not config.get("enable", True): 142 | continue 143 | 144 | load_module = config.get("load_module", "") 145 | provider_cls = import_class(load_module) 146 | if not provider_cls: 147 | raise InvalidConfigException( 148 | f'Invalid `load_module` config for {name}, it must be in format "module_path::class_name", ' 149 | f"please check your config and whether file exists." 150 | ) 151 | self.provider[name] = confirm_dc_type(config, provider_cls.config) 152 | self._provider_cls[name] = provider_cls 153 | 154 | @property 155 | def provider_cls(self): 156 | return self._provider_cls 157 | 158 | @property 159 | def servers_db_path(self) -> Path: 160 | return self.data_dir.joinpath(self.servers_file) 161 | 162 | 163 | def load_local_servers(servers_db_path: Path) -> typeOrderedDict[str, LBServerMeta]: 164 | """ 165 | load from `servers_file` config, return result 166 | """ 167 | servers_json = [] 168 | try: 169 | with open(servers_db_path, "r+") as f: 170 | content = f.read() 171 | if content: 172 | servers_json = json.loads(content) 173 | except (FileNotFoundError, JSONDecodeError) as e: 174 | logger.error(f"Error when reading local db {str(servers_db_path)}, {str(e)}") 175 | return OrderedDict() 176 | logger.debug(f"open server_json, find {len(servers_json)} available_servers: {servers_json}") 177 | d = OrderedDict() 178 | for i in servers_json: 179 | server = LBServerMeta(**i) 180 | d[server.server_name] = server 181 | return d 182 | 183 | 184 | def update_local_servers( 185 | servers_db_path: Path, 186 | new: List[LBServerMeta] = None, 187 | deleted: List[LBServerMeta] = None, 188 | ) -> Dict[str, LBServerMeta]: 189 | local_servers = load_local_servers(servers_db_path) 190 | 191 | _add_servers = new or [] 192 | local_servers.update({server.server_name: server for server in _add_servers}) 193 | 194 | _remove_servers = deleted or [] 195 | for server in _remove_servers: 196 | local_servers.pop(server.server_name, None) 197 | 198 | with open(servers_db_path, "w+") as f: 199 | c = [asdict(i) for i in local_servers.values()] # type: ignore 200 | f.write(json.dumps(c, default=encoder_factory())) 201 | return local_servers 202 | -------------------------------------------------------------------------------- /lobbyboy/contrib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lobbyboy-ssh/lobbyboy/dafbb5905bd1c43facda7001a36e11b3a6ede837/lobbyboy/contrib/__init__.py -------------------------------------------------------------------------------- /lobbyboy/contrib/provider/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lobbyboy-ssh/lobbyboy/dafbb5905bd1c43facda7001a36e11b3a6ede837/lobbyboy/contrib/provider/__init__.py -------------------------------------------------------------------------------- /lobbyboy/contrib/provider/digitalocean.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from dataclasses import dataclass, field 4 | from pathlib import Path 5 | from typing import List, Tuple 6 | 7 | from digitalocean import Droplet, Image, Manager, Region, Size 8 | from paramiko.channel import Channel 9 | 10 | from lobbyboy.config import LBConfigProvider, LBServerMeta 11 | from lobbyboy.provider import BaseProvider 12 | from lobbyboy.utils import choose_option, dict_factory, port_is_open, send_to_channel 13 | 14 | logger = logging.getLogger(__name__) 15 | ENV_TOKEN_NAME = "DIGITALOCEAN_TOKEN" # nosec: false B105(hardcoded_password_string) by bandit 16 | 17 | 18 | @dataclass 19 | class DigitaloceanConfig(LBConfigProvider): 20 | favorite_instance_types: List[str] = field(default_factory=list) 21 | 22 | 23 | class DigitalOceanProvider(BaseProvider): 24 | config = DigitaloceanConfig 25 | 26 | def __init__(self, name: str, config: LBConfigProvider, workspace: Path): 27 | super().__init__(name, config, workspace) 28 | self.__token = os.getenv(ENV_TOKEN_NAME) or config.api_token 29 | 30 | @staticmethod 31 | def droplet_is_up(uninitialized_droplet: Droplet) -> bool: 32 | actions = uninitialized_droplet.get_actions() 33 | logger.info(f"create server actions: {actions}") 34 | for action in actions: 35 | action.load() 36 | # Once it shows "completed", droplet is up and running 37 | if action.status == "completed": 38 | logger.debug(f"create droplet(name: {uninitialized_droplet.name}) result: {action.status}.") 39 | return True 40 | return False 41 | 42 | def create_server(self, channel: Channel) -> LBServerMeta: 43 | region, size, image = self._ask_user_customize_server(channel) 44 | server_name = self.generate_default_server_name() 45 | server_workspace = self.get_server_workspace(server_name) 46 | server_workspace.mkdir(exist_ok=True, parents=True) 47 | 48 | logger.info(f"create {self.name} server {server_name} workspace: {server_workspace}.") 49 | send_to_channel(channel, f"Generate server {server_name} workspace {server_workspace} done.") 50 | 51 | # confirm ssh key pairs 52 | ssh_keys = self.collection_ssh_keys(save_path=server_workspace) 53 | logger.info(f"prepare ssh key pairs for server {server_name} done.") 54 | 55 | logger.info( 56 | f"going to create a new droplet in digitalocean... " 57 | f"server name={server_name}, region={region}, image={image}, size_slug={size}" 58 | ) 59 | droplet: Droplet = Droplet( 60 | token=self.__token, 61 | name=server_name, 62 | region=region, 63 | image=image, 64 | size_slug=size, 65 | ssh_keys=ssh_keys, 66 | ) 67 | send_to_channel(channel, "Waiting for server to created...") 68 | droplet.create() 69 | self.time_process_action(channel, self.droplet_is_up, uninitialized_droplet=droplet) 70 | return self.prepare_after_server_created(channel, droplet, server_workspace, server_name) 71 | 72 | def prepare_after_server_created( 73 | self, channel: Channel, droplet: Droplet, workspace: Path, server_name: str 74 | ) -> LBServerMeta: 75 | # todo: support use startup script after server created 76 | # load full information from digitalocean before use this droplet to other operations 77 | droplet.load() 78 | send_to_channel(channel, f"New server {server_name} (IP: {droplet.ip_address}) created!") 79 | 80 | # save server info to local first 81 | droplet_meta = dict_factory(droplet.__dict__, ignore_fields=["tokens"], ignore_rule=lambda x: x.startswith("_")) 82 | self.save_raw_server(droplet_meta, workspace) 83 | 84 | # wait for server to startup(check port is alive or not) 85 | send_to_channel(channel, "Waiting for server to boot...") 86 | self.time_process_action(channel, port_is_open, ip=droplet.ip_address) 87 | send_to_channel(channel, f"Server {server_name} has boot successfully!") 88 | 89 | return LBServerMeta( 90 | provider_name=self.name, 91 | server_name=server_name, 92 | workspace=workspace, 93 | server_host=droplet.ip_address, 94 | ) 95 | 96 | def _ask_user_customize_server(self, channel: Channel) -> Tuple[str, str, str]: 97 | manually_create_choice = "Manually choose a new droplet to create.." 98 | options = [manually_create_choice, *self.provider_config.favorite_instance_types] 99 | user_selected_idx = choose_option(channel, options, ask_prompt="Please choose new droplet to create: ") 100 | user_selected = options[user_selected_idx] 101 | logger.info(f"choose droplet, user selected: {user_selected_idx}: {user_selected}") 102 | if user_selected_idx == 0: 103 | return self._manually_create_new_droplet(channel) 104 | region, size, image = user_selected.split(":") 105 | return region, size, image 106 | 107 | def _manually_create_new_droplet(self, channel) -> Tuple[str, str, str]: 108 | do_manager = Manager(token=self.__token) 109 | 110 | send_to_channel(channel, "Fetching metadata from digitalocean...") 111 | regions: List[Region] = do_manager.get_all_regions() 112 | sizes: List[Size] = do_manager.get_all_sizes() 113 | _images: List[Image] = do_manager.get_all_images() 114 | # backup image is not usable 115 | images = [i for i in _images if i.slug is not None] 116 | 117 | region_slugs = [f"{r.name} ({r.slug})" for r in regions] 118 | selected_region_idx = choose_option(channel, region_slugs, ask_prompt="Please choose region: ") 119 | selected_region: Region = regions[selected_region_idx] 120 | 121 | size_slugs = [s.slug for s in sizes] 122 | selected_size_idx = choose_option(channel, size_slugs, ask_prompt="Please choose droplet size: ") 123 | selected_size: Size = sizes[selected_size_idx] 124 | 125 | size_images = [f"{i.distribution}: {i.name} ({i.slug})" for i in images] 126 | selected_image_idx = choose_option(channel, size_images, ask_prompt="Please choose droplet image: ") 127 | selected_size_images: Image = images[selected_image_idx] 128 | 129 | return selected_region.slug, selected_size.slug, selected_size_images.slug 130 | 131 | def destroy_server(self, meta: LBServerMeta, channel: Channel = None) -> bool: 132 | logger.info(f"try to destroy {meta.server_name} under workspace {meta.workspace}...") 133 | if channel: 134 | send_to_channel(channel, f"Destroy server {meta.server_name}...") 135 | data = self.load_raw_server(meta.workspace) 136 | 137 | droplet = Droplet.get_object(api_token=self.__token, droplet_id=data["id"]) 138 | logger.info(f"get object from digitalocean: {droplet}") 139 | result = droplet.destroy() 140 | logger.info(f"destroy droplet, result: {result}") 141 | if channel: 142 | send_to_channel(channel, f"Destroy server {meta.server_name} done.") 143 | return result 144 | -------------------------------------------------------------------------------- /lobbyboy/contrib/provider/footloose.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import subprocess 3 | import sys 4 | from dataclasses import dataclass 5 | from pathlib import Path 6 | from typing import List 7 | 8 | from paramiko import Channel 9 | 10 | from lobbyboy.config import LBConfigProvider, LBServerMeta 11 | from lobbyboy.exceptions import ProviderException 12 | from lobbyboy.provider import BaseProvider 13 | from lobbyboy.utils import send_to_channel 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | class FootlooseException(ProviderException): 19 | pass 20 | 21 | 22 | @dataclass 23 | class FootlooseConfig(LBConfigProvider): 24 | footloose_config: str = "" 25 | 26 | 27 | class FootlooseProvider(BaseProvider): 28 | config = FootlooseConfig 29 | 30 | def is_available(self) -> bool: 31 | if not self.check_command(["footloose", "-h"]): 32 | print( 33 | "footloose executable is not exist! " 34 | "Please install footloose via " 35 | "`GO111MODULE=on go get github.com/weaveworks/footloose`", 36 | file=sys.stderr, 37 | ) 38 | return False 39 | return True 40 | 41 | def create_server(self, channel: Channel) -> LBServerMeta: 42 | server_name = self.generate_default_server_name() 43 | logger.info("footloose generated server_name: %s", server_name) 44 | server_workspace = self.get_server_workspace(server_name) 45 | server_workspace.mkdir(exist_ok=True, parents=True) 46 | logger.info(f"create {self.name} server {server_name} workspace: {server_workspace}.") 47 | send_to_channel(channel, f"Generate server {server_name} workspace {server_workspace} done.") 48 | 49 | with open(server_workspace.joinpath("footloose.yaml"), "w+") as f: 50 | f.write(self.provider_config.footloose_config.format(server_name=server_name)) 51 | 52 | footloose_create = subprocess.Popen(["footloose", "create"], cwd=str(server_workspace)) 53 | logger.debug("footloose create process: %s", footloose_create.pid) 54 | 55 | def footloose_create_done(): 56 | return footloose_create.poll() is not None 57 | 58 | self.time_process_action(channel, footloose_create_done) 59 | if footloose_create.returncode != 0: 60 | raise FootlooseException("footloose create failed!") 61 | return LBServerMeta( 62 | provider_name=self.name, server_name=server_name, workspace=server_workspace, server_host="127.0.0.1" 63 | ) 64 | 65 | def ssh_server_command(self, meta: LBServerMeta, pri_key_path: Path = None) -> List[str]: 66 | command = ["cd {} && footloose ssh root@{}".format(meta.workspace, meta.server_name + "0")] 67 | 68 | logger.debug("get ssh to server command for footloose: %s", command) 69 | return command 70 | 71 | def destroy_server(self, meta: LBServerMeta, channel: Channel = None) -> bool: 72 | process = subprocess.run( 73 | ["footloose", "delete", "-c", meta.workspace.joinpath("footloose.yaml")], capture_output=True 74 | ) 75 | if process.returncode != 0: 76 | logger.error( 77 | "fail to delete footloose server %s, returncode: %s, stdout: %s, stderr: %s", 78 | meta, 79 | process.returncode, 80 | process.stdout, 81 | process.stderr, 82 | ) 83 | return False 84 | return True 85 | -------------------------------------------------------------------------------- /lobbyboy/contrib/provider/ignite.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import subprocess 3 | import sys 4 | from dataclasses import dataclass 5 | from pathlib import Path 6 | from typing import List 7 | 8 | from paramiko import Channel 9 | 10 | from lobbyboy.config import LBConfigProvider, LBServerMeta 11 | from lobbyboy.exceptions import ProviderException 12 | from lobbyboy.provider import BaseProvider 13 | from lobbyboy.utils import send_to_channel 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | class IgniteException(ProviderException): 19 | pass 20 | 21 | 22 | @dataclass 23 | class IgniteConfig(LBConfigProvider): 24 | image: str = "weaveworks/ignite-ubuntu" 25 | cpu: int = 1 26 | mem: str = "1GB" 27 | disk: str = "5GB" 28 | 29 | 30 | class IgniteProvider(BaseProvider): 31 | config = IgniteConfig 32 | 33 | def is_available(self) -> bool: 34 | if not self.check_command(["ignite", "-h"]): 35 | print( 36 | "ignite executable is not exist! " 37 | "Please install ignite via the instrution on " 38 | "https://ignite.readthedocs.io/en/stable/installation/", 39 | file=sys.stderr, 40 | ) 41 | return False 42 | return True 43 | 44 | def create_server(self, channel: Channel) -> LBServerMeta: 45 | server_name = self.generate_default_server_name() 46 | logger.info("ignite generated server_name: %s", server_name) 47 | server_workspace = self.get_server_workspace(server_name) 48 | server_workspace.mkdir(exist_ok=True, parents=True) 49 | logger.info(f"create {self.name} server {server_name} workspace: {server_workspace}.") 50 | send_to_channel(channel, f"Generate server {server_name} workspace {server_workspace} done.") 51 | 52 | ignite_create = subprocess.Popen( 53 | [ 54 | "ignite", 55 | "run", 56 | self.provider_config.image, 57 | "--name", 58 | server_name, 59 | "--cpus", 60 | str(self.provider_config.cpu), 61 | "--memory", 62 | self.provider_config.mem, 63 | "--size", 64 | self.provider_config.disk, 65 | "--ssh", 66 | ], 67 | cwd=str(server_workspace), 68 | ) 69 | logger.debug("ignite create process: %s", ignite_create.pid) 70 | 71 | def ignite_create_done(): 72 | return ignite_create.poll() is not None 73 | 74 | self.time_process_action(channel, ignite_create_done) 75 | if ignite_create.returncode != 0: 76 | raise IgniteException("ignite create failed!") 77 | return LBServerMeta( 78 | provider_name=self.name, server_name=server_name, workspace=server_workspace, server_host="127.0.0.1" 79 | ) 80 | 81 | def ssh_server_command(self, meta: LBServerMeta, pri_key_path: Path = None) -> List[str]: 82 | command = ["cd {} && ignite ssh {}".format(meta.workspace, meta.server_name)] 83 | 84 | logger.debug("get ssh to server command for ignite: %s", command) 85 | return command 86 | 87 | def destroy_server(self, meta: LBServerMeta, channel: Channel = None) -> bool: 88 | process = subprocess.run(["ignite", "rm", "-f", meta.server_name], capture_output=True) 89 | if process.returncode != 0: 90 | logger.error( 91 | "fail to delete ignite server %s, returncode: %s, stdout: %s, stderr: %s", 92 | meta, 93 | process.returncode, 94 | process.stdout, 95 | process.stderr, 96 | ) 97 | return False 98 | return True 99 | -------------------------------------------------------------------------------- /lobbyboy/contrib/provider/linode.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from dataclasses import dataclass, field 4 | from pathlib import Path 5 | from typing import List, Tuple 6 | 7 | from linode_api4 import Image, Instance, LinodeClient, Region, Type 8 | from paramiko.channel import Channel 9 | 10 | from lobbyboy.config import LBConfigProvider, LBServerMeta 11 | from lobbyboy.provider import BaseProvider 12 | from lobbyboy.utils import choose_option, port_is_open, send_to_channel 13 | 14 | logger = logging.getLogger(__name__) 15 | ENV_TOKEN_NAME = "LINODE_TOKEN" # nosec: false B105(hardcoded_password_string) by bandit 16 | 17 | 18 | @dataclass 19 | class LinodeConfig(LBConfigProvider): 20 | favorite_instance_types: List[str] = field(default_factory=list) 21 | 22 | 23 | class LinodeProvider(BaseProvider): 24 | config = LinodeConfig 25 | 26 | def __init__(self, name: str, config: LBConfigProvider, workspace: Path): 27 | super().__init__(name, config, workspace) 28 | self.__token = os.getenv(ENV_TOKEN_NAME) or config.api_token 29 | 30 | @staticmethod 31 | def linode_is_up(node: Instance) -> bool: 32 | try: 33 | is_success = node.stats == "running" 34 | logger.debug(f"linode(name: {node.label}) status: {node.stats}.") 35 | return is_success 36 | except Exception as e: 37 | logger.error(f"check linode {node.label} status failed: {str(e)}") 38 | return False 39 | 40 | def create_server(self, channel: Channel) -> LBServerMeta: 41 | region_id, type_id, image_id = self._ask_user_customize_server(channel) 42 | server_name = self.generate_default_server_name() 43 | server_workspace = self.get_server_workspace(server_name) 44 | server_workspace.mkdir(exist_ok=True, parents=True) 45 | 46 | logger.info(f"create {self.name} server {server_name} workspace: {server_workspace}.") 47 | send_to_channel(channel, f"Generate server {server_name} workspace {server_workspace} done.") 48 | 49 | # confirm ssh key pairs 50 | ssh_keys = self.collection_ssh_keys(save_path=server_workspace) 51 | logger.info(f"prepare ssh key pairs for server {server_name} done.") 52 | 53 | logger.info( 54 | f"going to create a new node in linode... " 55 | f"server name={server_name}, region={region_id}, image={image_id}, type={type_id}" 56 | ) 57 | 58 | send_to_channel(channel, "Waiting for server to created...") 59 | li = LinodeClient(token=self.__token) 60 | res = li.linode.instance_create(type_id, region_id, image_id, label=server_name, authorized_keys=ssh_keys) 61 | linode_instance: Instance = res if isinstance(res, Instance) else res[0] 62 | self.time_process_action(channel, self.linode_is_up, interval=5, node=linode_instance) 63 | return self.prepare_after_server_created(channel, linode_instance, server_workspace, server_name) 64 | 65 | def prepare_after_server_created( 66 | self, channel: Channel, instance: Instance, workspace: Path, server_name: str 67 | ) -> LBServerMeta: 68 | # todo: support use startup script after server created 69 | send_to_channel(channel, f"New server {server_name} (IP: {instance.ipv4}) created!") 70 | 71 | # save server info to local first 72 | # TODO `_serialize` information le less 73 | instance_info = instance._serialize() # noqa 74 | instance_info["id"] = instance.id 75 | self.save_raw_server(instance_info, workspace) # noqa 76 | 77 | # wait for server to startup(check port is alive or not) 78 | send_to_channel(channel, "Waiting for server to boot...") 79 | self.time_process_action(channel, port_is_open, ip=instance.ipv4[0]) 80 | send_to_channel(channel, f"Server {server_name} has boot successfully!") 81 | 82 | return LBServerMeta( 83 | provider_name=self.name, 84 | server_name=server_name, 85 | workspace=workspace, 86 | server_host=instance.ipv4[0], 87 | ) 88 | 89 | def _ask_user_customize_server(self, channel: Channel) -> Tuple[str, str, str]: 90 | manually_create_choice = "Manually choose a new linode to create.." 91 | options = [manually_create_choice, *self.provider_config.favorite_instance_types] 92 | user_selected_idx = choose_option(channel, options, ask_prompt="Please choose new linode to create: ") 93 | user_selected = options[user_selected_idx] 94 | logger.info(f"choose linode, user selected: {user_selected_idx}: {user_selected}") 95 | if user_selected_idx == 0: 96 | return self._manually_create_new_node(channel) 97 | region_id, type_id, image_id = user_selected.split(":") 98 | return region_id, type_id, image_id 99 | 100 | def _manually_create_new_node(self, channel) -> Tuple[str, str, str]: 101 | linode = LinodeClient(token=self.__token) 102 | 103 | send_to_channel(channel, "Fetching metadata from linode...") 104 | regions: List[Region] = [i for i in linode.regions()] 105 | types: List[Type] = [i for i in linode.linode.types()] 106 | images: List[Image] = [i for i in linode.images()] 107 | 108 | region_slugs = [f"{r.id:15} - {r.country:3} | status: {r.status:5}" for r in regions] 109 | selected_region_idx = choose_option(channel, region_slugs, ask_prompt="Please choose region: ") 110 | selected_region: Region = regions[selected_region_idx] 111 | 112 | size_slugs = [ 113 | ( 114 | f"{t.id:18} | Disk: {t.disk:10} | Mem: {t.memory:10} | Label: {t.label:35} | " 115 | f"Price($): hourly: {t.price.hourly:8}, monthly: {t.price.monthly:8}" 116 | ) 117 | for t in types 118 | ] 119 | selected_type_idx = choose_option(channel, size_slugs, ask_prompt="Please choose linode size: ") 120 | selected_type: Type = types[selected_type_idx] 121 | 122 | size_images = [ 123 | ( 124 | f"{i.id:30} | size: {round(i.size/1024, 2):5}G | created: {str(i.created)} | " 125 | f"Deprecated: {i.deprecated:3} | Provider: {i.created_by:8}" 126 | ) 127 | for i in images 128 | ] 129 | selected_image_idx = choose_option(channel, size_images, ask_prompt="Please choose linode image: ") 130 | selected_size_images: Image = images[selected_image_idx] 131 | 132 | return selected_region.id, selected_type.id, selected_size_images.id 133 | 134 | def destroy_server(self, meta: LBServerMeta, channel: Channel = None) -> bool: 135 | logger.info(f"try to destroy {meta.server_name} under workspace {meta.workspace}...") 136 | if channel: 137 | send_to_channel(channel, f"Destroy server {meta.server_name}...") 138 | data = self.load_raw_server(meta.workspace) 139 | 140 | instance: Instance = LinodeClient(token=self.__token).linode.instances(Instance.id == data["id"]).first() 141 | logger.info(f"get object from linode: {instance}") 142 | result = instance.delete() 143 | logger.info(f"destroy linode, result: {result}") 144 | if channel: 145 | send_to_channel(channel, f"Destroy server {meta.server_name} done.") 146 | return result 147 | -------------------------------------------------------------------------------- /lobbyboy/contrib/provider/multipass.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import subprocess 4 | import sys 5 | from dataclasses import dataclass 6 | from pathlib import Path 7 | from typing import List 8 | 9 | from paramiko import Channel 10 | 11 | from lobbyboy.config import LBConfigProvider, LBServerMeta 12 | from lobbyboy.exceptions import ProviderException 13 | from lobbyboy.provider import BaseProvider 14 | from lobbyboy.utils import send_to_channel 15 | 16 | logger = logging.getLogger(__name__) 17 | 18 | 19 | class MultipassException(ProviderException): 20 | pass 21 | 22 | 23 | @dataclass 24 | class MultipassConfig(LBConfigProvider): 25 | server_name_prefix: str = "lobbyboy" 26 | image: str = "release:20.04" 27 | cpu: int = 1 28 | mem: str = "1GB" 29 | disk: str = "5GB" 30 | 31 | 32 | class MultipassProvider(BaseProvider): 33 | config = MultipassConfig 34 | 35 | def is_available(self) -> bool: 36 | if not self.check_command(["multipass", "-h"]): 37 | print( 38 | "multipass executable is not exist! " "Please install it via `snap install multipass`", file=sys.stderr 39 | ) 40 | return False 41 | return True 42 | 43 | def create_server(self, channel: Channel) -> LBServerMeta: 44 | server_name = self.generate_default_server_name() 45 | logger.info("multipass generated server_name: %s", server_name) 46 | server_workspace = self.get_server_workspace(server_name) 47 | server_workspace.mkdir(exist_ok=True, parents=True) 48 | logger.info(f"create {self.name} server {server_name} workspace: {server_workspace}.") 49 | send_to_channel(channel, f"Generate server {server_name} workspace {server_workspace} done.") 50 | 51 | multipass_create = subprocess.Popen( 52 | [ 53 | "multipass", 54 | "launch", 55 | self.provider_config.image, 56 | "--name", 57 | server_name, 58 | "--cpus", 59 | str(self.provider_config.cpu), 60 | "--mem", 61 | self.provider_config.mem, 62 | "--disk", 63 | self.provider_config.disk, 64 | ], 65 | cwd=str(server_workspace), 66 | ) 67 | logger.debug("multipass create process: %s", multipass_create.pid) 68 | 69 | def multipass_create_done(): 70 | if multipass_create.poll() is not None: 71 | output = subprocess.check_output( 72 | [ 73 | "multipass", 74 | "info", 75 | "--format", 76 | "json", 77 | server_name, 78 | ] 79 | ) 80 | info = json.loads(output)["info"] 81 | if info[server_name]["state"].lower() == "running": 82 | return True 83 | return False 84 | 85 | self.time_process_action(channel, multipass_create_done) 86 | if multipass_create.returncode != 0: 87 | raise MultipassException("multipass create failed!") 88 | return LBServerMeta( 89 | provider_name=self.name, server_name=server_name, workspace=server_workspace, server_host="127.0.0.1" 90 | ) 91 | 92 | def ssh_server_command(self, meta: LBServerMeta, pri_key_path: Path = None) -> List[str]: 93 | command = ["cd {} && multipass shell {}".format(meta.workspace, meta.server_name)] 94 | 95 | logger.debug("get ssh to server command for multipass: %s", command) 96 | return command 97 | 98 | def destroy_server(self, meta: LBServerMeta, channel: Channel = None) -> bool: 99 | process = subprocess.run(["multipass", "delete", "--purge", meta.server_name], capture_output=True) 100 | if process.returncode != 0: 101 | logger.error( 102 | "fail to delete multipass server %s, returncode: %s, stdout: %s, stderr: %s", 103 | meta, 104 | process.returncode, 105 | process.stdout, 106 | process.stderr, 107 | ) 108 | return False 109 | return True 110 | -------------------------------------------------------------------------------- /lobbyboy/contrib/provider/vagrant.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import subprocess 3 | from dataclasses import dataclass 4 | from pathlib import Path 5 | from typing import List, Optional 6 | 7 | from paramiko import Channel 8 | 9 | from lobbyboy.config import LBConfigProvider, LBServerMeta 10 | from lobbyboy.exceptions import NoAvailableNameException, VagrantProviderException 11 | from lobbyboy.provider import BaseProvider 12 | from lobbyboy.utils import send_to_channel 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | 17 | @dataclass 18 | class VagrantConfig(LBConfigProvider): 19 | vagrantfile: str = "" 20 | 21 | 22 | class VagrantProvider(BaseProvider): 23 | config = VagrantConfig 24 | 25 | def __init__(self, name: str, config: LBConfigProvider, workspace: Path): 26 | super().__init__(name, config, workspace) 27 | self._tmp_ssh_config_file: Optional[Path] = None 28 | 29 | def generate_server_name(self): 30 | vm_name = None 31 | for idx in range(1, 99): 32 | vm_name = str(idx) 33 | if self.provider_config.server_name_prefix: 34 | vm_name = f"{self.provider_config.server_name_prefix}-{vm_name}" 35 | server_workspace = self.workspace.joinpath(vm_name) 36 | if not server_workspace.exists(): 37 | return vm_name 38 | raise NoAvailableNameException(f"{self.name}'s server {vm_name}[a-z] already exist!") 39 | 40 | def create_server(self, channel: Channel) -> LBServerMeta: 41 | server_name = self.generate_server_name() 42 | logger.info(f"got server name for vagrant: {server_name}.") 43 | server_workspace = self.get_server_workspace(server_name) 44 | server_workspace.mkdir(exist_ok=True, parents=True) 45 | 46 | logger.info(f"create {self.name} server {server_name} workspace: {server_workspace}.") 47 | send_to_channel(channel, f"Generate server {server_name} workspace {server_workspace} done.") 48 | 49 | with open(server_workspace.joinpath("Vagrantfile"), "w+") as f: 50 | f.write(self.provider_config.vagrantfile.format(boxname=server_name)) 51 | 52 | vagrant_up_process = VagrantProvider._popen_vagrant(["vagrant", "up"], cwd=str(server_workspace)) 53 | logger.debug("vagrant up process %s", vagrant_up_process.pid) 54 | 55 | def vagrant_up(): 56 | return vagrant_up_process.poll() == 0 57 | 58 | self.time_process_action(channel, vagrant_up) 59 | send_to_channel(channel, f"New server {server_name} created!") 60 | 61 | # export the ssh_config to file 62 | self._tmp_ssh_config_file = server_workspace.joinpath("ssh_config") 63 | VagrantProvider._run_vagrant( 64 | command_exec=["vagrant", "ssh-config", server_name], 65 | cwd=str(server_workspace), 66 | stdout=open(self._tmp_ssh_config_file, "wb+"), 67 | ) 68 | 69 | return LBServerMeta( 70 | provider_name=self.name, 71 | server_name=server_name, 72 | workspace=server_workspace, 73 | server_host="127.0.0.1", 74 | ) 75 | 76 | def destroy_server(self, meta: LBServerMeta, channel: Channel = None) -> bool: 77 | vid = self._get_vagrant_machine_id(meta.server_name) 78 | return_code, _, _ = self._run_vagrant(["vagrant", "destroy", "-f", vid]) 79 | success = return_code == 0 80 | if not success: 81 | raise VagrantProviderException("Error when destroy {}".format(vid)) 82 | return success 83 | 84 | def ssh_server_command(self, meta: LBServerMeta, pri_key_path: Path = None) -> List[str]: 85 | command = [ 86 | "ssh", 87 | "-F", 88 | self._tmp_ssh_config_file, 89 | *meta.ssh_extra_args, 90 | meta.server_name, 91 | ] 92 | logger.info(f"returning ssh command: {command}") 93 | return command 94 | 95 | @staticmethod 96 | def _run_vagrant(command_exec: list, cwd=None, stdout=None): 97 | p = VagrantProvider._popen_vagrant(command_exec, cwd, stdout) 98 | p.wait() 99 | returncode = p.returncode 100 | stdout = p.stdout 101 | if stdout is not None: 102 | stdout = stdout.read().decode() 103 | stderr = p.stderr 104 | if stderr is not None: 105 | stderr = stderr.read().decode() 106 | if returncode == 0: 107 | logger.info(f"vagrant_command SUCCESS, command={' '.join(command_exec)} stdout={stdout}, stderr={stderr}") 108 | return returncode, stdout, stderr 109 | logger.error(f"vagrant_command SUCCESS, command={' '.join(command_exec)} stdout={stdout}, stderr={stderr}") 110 | raise Exception 111 | 112 | @staticmethod 113 | def _popen_vagrant(command_exec: list, cwd=None, stdout=None): 114 | cmd = " ".join(command_exec) 115 | logger.info(f"start to run command: {cmd}") 116 | if stdout is None: 117 | stdout = subprocess.PIPE 118 | vagrant_process = subprocess.Popen(command_exec, cwd=cwd, stdout=stdout, stderr=subprocess.PIPE, close_fds=True) 119 | return vagrant_process 120 | 121 | def _get_vagrant_machine_id(self, server_name): 122 | _, stdout, _ = self._run_vagrant(["vagrant", "global-status"]) 123 | for line in stdout.decode().split("\n"): 124 | if server_name in line: 125 | v_server_id = line.split(" ")[0] 126 | logger.debug(f"Find server_id={v_server_id} by server name {server_name}") 127 | return v_server_id 128 | raise VagrantProviderException(f"{server_name} not found in Vagrant!") 129 | -------------------------------------------------------------------------------- /lobbyboy/contrib/provider/vultr.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from dataclasses import dataclass, field 4 | from pathlib import Path 5 | from typing import List, Tuple 6 | 7 | from paramiko.channel import Channel 8 | from pyvultr import VultrV2 9 | from pyvultr.v2 import OS, Instance, Plan, Region, ReqInstance, SSHKey 10 | from pyvultr.v2.enums import InstanceStatus 11 | 12 | from lobbyboy.config import LBConfigProvider, LBServerMeta 13 | from lobbyboy.provider import BaseProvider 14 | from lobbyboy.utils import choose_option, port_is_open, send_to_channel 15 | 16 | logger = logging.getLogger(__name__) 17 | ENV_TOKEN_NAME = "VULTR_TOKEN" # nosec: false B105(hardcoded_password_string) by bandit 18 | 19 | 20 | @dataclass 21 | class VultrConfig(LBConfigProvider): 22 | favorite_instance_types: List[str] = field(default_factory=list) 23 | 24 | 25 | class VultrProvider(BaseProvider): 26 | config = VultrConfig 27 | 28 | def __init__(self, name: str, config: LBConfigProvider, workspace: Path): 29 | super().__init__(name, config, workspace) 30 | self.__token = os.getenv(ENV_TOKEN_NAME) or config.api_token 31 | self.client = VultrV2(self.__token) 32 | 33 | def instance_is_up(self, node_id: str) -> bool: 34 | try: 35 | node: Instance = self.client.instance.get(node_id) 36 | is_success = node.status == InstanceStatus.ACTIVE.value 37 | logger.debug(f"Vultr(name: {node.label}) status: {node.status}.") 38 | return is_success 39 | except Exception as e: 40 | logger.error(f"check vlutr {node_id} status failed: {str(e)}") 41 | return False 42 | 43 | def create_server(self, channel: Channel) -> LBServerMeta: 44 | region_id, plan_id, image_id = self._ask_user_customize_server(channel) 45 | server_name = self.generate_default_server_name() 46 | server_workspace = self.get_server_workspace(server_name) 47 | server_workspace.mkdir(exist_ok=True, parents=True) 48 | 49 | logger.info(f"create {self.name} server {server_name} workspace: {server_workspace}.") 50 | send_to_channel(channel, f"Generate server {server_name} workspace {server_workspace} done.") 51 | 52 | # confirm ssh key pairs 53 | ssh_keys = self.collection_ssh_keys(save_path=server_workspace) 54 | logger.info(f"prepare ssh key pairs for server {server_name} done.") 55 | 56 | logger.info( 57 | f"going to create a new node in vultr... " 58 | f"server name={server_name}, region={region_id}, image={image_id}, type={plan_id}" 59 | ) 60 | 61 | send_to_channel(channel, "Waiting for server to created...") 62 | ssh_key_ids = [] 63 | for idx, ssh_key in enumerate(ssh_keys): 64 | res: SSHKey = self.client.ssh_key.create(f"{server_name}-{idx}", ssh_key) 65 | ssh_key_ids.append(res.id) 66 | instance: Instance = self.client.instance.create( 67 | ReqInstance( 68 | region=region_id, 69 | plan=plan_id, 70 | os_id=int(image_id), 71 | label=server_name, 72 | tag=server_name, 73 | sshkey_id=ssh_key_ids or None, 74 | ) 75 | ) 76 | self.time_process_action(channel, self.instance_is_up, interval=5, node_id=instance.id) 77 | return self.prepare_after_server_created(channel, instance, server_workspace, server_name) 78 | 79 | def prepare_after_server_created( 80 | self, channel: Channel, instance: Instance, workspace: Path, server_name: str 81 | ) -> LBServerMeta: 82 | # refresh instance info, in case of some data of instance is missing. 83 | instance: Instance = self.client.instance.get(instance.id) 84 | 85 | send_to_channel(channel, f"New server {server_name} (IP: {instance.main_ip}) created!") 86 | 87 | # save server info to local first 88 | self.save_raw_server(instance.to_dict(), workspace) 89 | 90 | # wait for server to startup(check port is alive or not) 91 | send_to_channel(channel, "Waiting for server to boot...") 92 | self.time_process_action(channel, port_is_open, ip=instance.main_ip) 93 | send_to_channel(channel, f"Server {server_name} has boot successfully!") 94 | 95 | return LBServerMeta( 96 | provider_name=self.name, 97 | server_name=server_name, 98 | workspace=workspace, 99 | server_host=instance.main_ip, 100 | ) 101 | 102 | def _ask_user_customize_server(self, channel: Channel) -> Tuple[str, str, str]: 103 | manually_create_choice = "Manually choose a new vultr to create.." 104 | options = [manually_create_choice, *self.provider_config.favorite_instance_types] 105 | user_selected_idx = choose_option(channel, options, ask_prompt="Please choose new vultr to create: ") 106 | user_selected = options[user_selected_idx] 107 | logger.info(f"choose vultr, user selected: {user_selected_idx}: {user_selected}") 108 | if user_selected_idx == 0: 109 | return self._manually_create_new_node(channel) 110 | region_id, plan_id, image_id = user_selected.split(":") 111 | return region_id, plan_id, image_id 112 | 113 | def _manually_create_new_node(self, channel) -> Tuple[str, str, str]: 114 | send_to_channel(channel, "Fetching metadata from vultr...") 115 | regions: List[Region] = [i for i in self.client.region.list()] 116 | plans: List[Plan] = [i for i in self.client.plan.list()] 117 | oses: List[OS] = [i for i in self.client.operating_system.list()] 118 | 119 | region_slugs = [f"{r.id:3} - {r.city:15} | country: {r.country:3} | Position: {r.continent:5}" for r in regions] 120 | selected_region_idx = choose_option(channel, region_slugs, ask_prompt="Please choose region: ") 121 | selected_region: Region = regions[selected_region_idx] 122 | 123 | size_slugs = [ 124 | ( 125 | f"{t.id:15} | Disk: {t.disk:5} GB, Disk count: {t.disk_count} | Mem: {t.ram:6} MB | " 126 | f"Month Price($): {t.monthly_cost:5}" 127 | ) 128 | for t in plans 129 | ] 130 | selected_plan_idx = choose_option(channel, size_slugs, ask_prompt="Please choose vultr plan: ") 131 | selected_plan: Plan = plans[selected_plan_idx] 132 | 133 | os_options = [f"{i.id:4} | arch: {i.arch:5} | family: {i.family:15} | name: {i.name}" for i in oses] 134 | selected_image_idx = choose_option(channel, os_options, ask_prompt="Please choose vultr image: ") 135 | selected_images: OS = oses[selected_image_idx] 136 | 137 | return selected_region.id, selected_plan.id, str(selected_images.id) 138 | 139 | def destroy_server(self, meta: LBServerMeta, channel: Channel = None) -> bool: 140 | logger.info(f"try to destroy {meta.server_name} under workspace {meta.workspace}...") 141 | if channel: 142 | send_to_channel(channel, f"Destroy server {meta.server_name}...") 143 | data = self.load_raw_server(meta.workspace) 144 | 145 | failed = self.client.instance.delete(instance_id=data["id"]) 146 | success = not failed 147 | logger.info(f"destroy vultr, result: {success}") 148 | if channel: 149 | send_to_channel(channel, f"Destroy server {meta.server_name} done.") 150 | return success 151 | -------------------------------------------------------------------------------- /lobbyboy/exceptions.py: -------------------------------------------------------------------------------- 1 | class LobbyBoyException(Exception): 2 | pass 3 | 4 | 5 | class InvalidConfigException(LobbyBoyException): 6 | pass 7 | 8 | 9 | class TimeStrParseTypeException(LobbyBoyException): 10 | pass 11 | 12 | 13 | class UnsupportedPrivateKeyTypeException(LobbyBoyException): 14 | pass 15 | 16 | 17 | class UserCancelException(LobbyBoyException): 18 | pass 19 | 20 | 21 | class ProviderException(LobbyBoyException): 22 | pass 23 | 24 | 25 | class NoAvailableNameException(ProviderException): 26 | pass 27 | 28 | 29 | class NoProviderException(ProviderException): 30 | pass 31 | 32 | 33 | class NoTTYException(ProviderException): 34 | pass 35 | 36 | 37 | class VagrantProviderException(ProviderException): 38 | pass 39 | 40 | 41 | class CantEnsureBytesException(ProviderException): ... 42 | -------------------------------------------------------------------------------- /lobbyboy/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import logging 5 | import socket 6 | import sys 7 | import threading 8 | import traceback 9 | from pathlib import Path 10 | from typing import Dict 11 | 12 | from lobbyboy.config import LBConfig 13 | from lobbyboy.provider import BaseProvider 14 | from lobbyboy.server_killer import ServerKiller 15 | from lobbyboy.socket_handle import SocketHandlerThread 16 | from lobbyboy.utils import confirm_ssh_key_pair, to_seconds 17 | 18 | # TODO generate all keys when start, if key not exist. 19 | # TODO fix server threading problems (no sleep!) 20 | # TODO support sync server from provider, add destroy manage flag on those server 21 | 22 | 23 | logger = logging.getLogger(__name__) 24 | 25 | 26 | def setup_logs(level=logging.DEBUG): 27 | """send paramiko logs to a logfile, 28 | if they're not already going somewhere""" 29 | 30 | frm = "%(levelname)-.3s [%(asctime)s.%(msecs)03d] thr=%(thread)d %(name)s:%(lineno)d: %(message)s" 31 | handler = logging.StreamHandler() 32 | handler.setFormatter(logging.Formatter(frm, "%Y%m%d-%H:%M:%S")) 33 | logging.basicConfig(level=level, handlers=[handler]) 34 | 35 | 36 | def prepare_socket(listen_ip: str, listen_port: int) -> socket: 37 | try: 38 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 39 | sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) 40 | logger.info("start listen on %s:%s...", listen_ip, listen_port) 41 | sock.bind((listen_ip, listen_port)) 42 | except Exception as e: 43 | logger.error(f"*** Bind failed: {e}") 44 | traceback.print_exc() 45 | sys.exit(1) 46 | 47 | try: 48 | sock.listen(100) 49 | except Exception as e: 50 | logger.error(f"*** Listen failed: {e}") 51 | traceback.print_exc() 52 | sys.exit(1) 53 | logger.info(f"Listening for connection on {listen_ip}:{listen_port} ...") 54 | return sock 55 | 56 | 57 | def runserver(sock: socket, conf: LBConfig, providers: Dict[str, BaseProvider]): 58 | while 1: 59 | try: 60 | client, address = sock.accept() 61 | except Exception as e: 62 | logger.error(f"*** Accept new socket failed: {e}") 63 | continue 64 | logger.info(f"get a connection, from address: {address}") 65 | SocketHandlerThread(client, address, conf, providers).start() 66 | 67 | 68 | def main(): 69 | parser = argparse.ArgumentParser() 70 | parser.add_argument("-c", "--config", dest="config_path", help="config file path", required=True) 71 | args = parser.parse_args() 72 | 73 | # Load config. 74 | config: LBConfig = LBConfig.load(Path(args.config_path)) 75 | # Init providers instances. 76 | providers = { 77 | provider_name: cls( 78 | name=provider_name, 79 | config=config.provider[provider_name], 80 | workspace=config.data_dir.joinpath(provider_name), 81 | ) 82 | for provider_name, cls in config.provider_cls.items() 83 | } 84 | 85 | # Setup log. 86 | setup_logs(logging.getLevelName(config.log_level)) 87 | confirm_ssh_key_pair(config.data_dir, key_name="ssh_host_rsa_key") 88 | 89 | # Prepare socket. 90 | sock: socket = prepare_socket(config.listen_ip, config.listen_port) 91 | 92 | # Set killer. 93 | killer = ServerKiller(providers, config.servers_db_path) 94 | killer_thread = threading.Thread( 95 | target=killer.patrol, 96 | args=(to_seconds(config.min_destroy_interval),), 97 | daemon=True, 98 | ) 99 | killer_thread.start() 100 | logger.info(f"started server_killer thread: {killer_thread}") 101 | 102 | runserver(sock, config, providers) 103 | 104 | 105 | if __name__ == "__main__": 106 | main() 107 | -------------------------------------------------------------------------------- /lobbyboy/provider.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import string 4 | import subprocess 5 | import time 6 | from abc import ABC, abstractmethod 7 | from datetime import datetime 8 | from pathlib import Path 9 | from typing import Callable, Dict, List 10 | 11 | from paramiko.channel import Channel 12 | 13 | from lobbyboy.config import LBConfigProvider, LBServerMeta 14 | from lobbyboy.exceptions import NoAvailableNameException 15 | from lobbyboy.utils import KeyTypeSupport, confirm_ssh_key_pair, send_to_channel 16 | 17 | logger = logging.getLogger(__name__) 18 | SERVER_FILE = "server.json" 19 | 20 | 21 | class BaseProvider(ABC): 22 | config = LBConfigProvider 23 | 24 | def __init__(self, name: str, config: LBConfigProvider, workspace: Path): 25 | self.name: str = name 26 | self.provider_config: LBConfigProvider = config 27 | self.workspace: Path = workspace 28 | 29 | def generate_default_server_name(self): 30 | server_name = datetime.now().strftime("%Y-%m-%d-%H%M") 31 | if self.provider_config.server_name_prefix: 32 | server_name = f"{self.provider_config.server_name_prefix}-{server_name}" 33 | 34 | for suffix in ["", *string.ascii_lowercase]: 35 | _server_name = f"{server_name}{suffix}" 36 | server_workspace = self.workspace.joinpath(_server_name) 37 | if not server_workspace.exists(): 38 | server_workspace.mkdir(parents=True) 39 | return _server_name 40 | raise NoAvailableNameException(f"{self.name}'s server {server_name}[a-z] already exist!") 41 | 42 | def get_server_workspace(self, server_name: str) -> Path: 43 | return self.workspace.joinpath(server_name) 44 | 45 | @staticmethod 46 | def time_process_action( 47 | channel: Channel, action: Callable, max_check: int = 20, interval: int = 3, **action_kws 48 | ) -> bool: 49 | """ 50 | This is a helper function, it block until ``action`` is done (when the 51 | ``action`` returns True, consider it as "done"). Before the action 52 | being done, the check will be executed(``action`` being called) every 53 | ``internal` seconds, until ``max_check`` limit exceed. 54 | 55 | For every check, LobbyBoy ssh server will send a "." to ssh user to 56 | incident that it starts a new turn or check. When the ``action`` final 57 | return ``True``, LobbyBoy will show the total time cost to user. 58 | 59 | Args: 60 | channel: paramiko channel 61 | action: execute with time process, need return bool 62 | max_check: max check times 63 | interval: check interval in seconds 64 | 65 | Returns: 66 | bool: bool result before end of check time 67 | """ 68 | action_name = " ".join(action.__name__.split("_")) 69 | logger.debug("watch a new action, action_name: %s", action_name) 70 | send_to_channel(channel, f"Check {action_name}", suffix=b"") 71 | start_at = time.time() 72 | try_times = 1 73 | while try_times <= max_check: 74 | send_to_channel(channel, ".", suffix=b"") 75 | res = action(**action_kws) 76 | if res: 77 | send_to_channel(channel, f"OK({round(time.time() - start_at, 2)}s).") 78 | return res 79 | time.sleep(interval) 80 | try_times += 1 81 | send_to_channel( 82 | channel, 83 | "\n I have checked {} times for action {}, still not finished, give up...".format(max_check, action_name), 84 | ) 85 | return False 86 | 87 | @staticmethod 88 | def save_raw_server(server_obj: Dict, server_workspace: Path) -> Path: 89 | _path = server_workspace.joinpath(SERVER_FILE) 90 | with open(_path, "w+") as f: 91 | logger.debug(f"write new server data to {_path}") 92 | json.dump(server_obj, f) 93 | return _path 94 | 95 | @staticmethod 96 | def load_raw_server(server_workspace: Path): 97 | _path = server_workspace.joinpath(SERVER_FILE) 98 | with open(_path, "r+") as f: 99 | logger.debug(f"load server data from {_path}") 100 | return json.load(f) 101 | 102 | def is_available(self) -> bool: 103 | """ 104 | Returns: 105 | bool: True if this provider is available, False to disable it 106 | """ 107 | return True 108 | 109 | @abstractmethod 110 | def create_server(self, channel: Channel) -> LBServerMeta: 111 | """ 112 | Args: 113 | channel: paramiko channel 114 | 115 | Returns: 116 | LBServerMeta: server meta info 117 | """ 118 | ... 119 | 120 | @abstractmethod 121 | def destroy_server(self, meta: LBServerMeta, channel: Channel = None) -> bool: 122 | """ 123 | Args: 124 | meta: LBServerMeta, we use this to locate one server then destroy it. 125 | channel: Note that the channel can be None. 126 | If called from server_killer, channel will be None. 127 | if called when user logout from server, channel is active. 128 | 129 | Returns: 130 | bool: True if destroy successfully, False if not. 131 | """ 132 | ... 133 | 134 | def collection_ssh_keys(self, generate: bool = True, save_path: Path = None) -> List[str]: 135 | ssh_keys = self.provider_config.extra_ssh_keys[::] or [] 136 | if generate: 137 | ssh_key_path = (save_path or self.workspace).joinpath(".ssh") 138 | _, pub_key = confirm_ssh_key_pair(save_path=ssh_key_path) 139 | ssh_keys.append(pub_key) 140 | return ssh_keys 141 | 142 | def default_private_key_path(self, workspace: Path = None, key_type: KeyTypeSupport = KeyTypeSupport.RSA) -> Path: 143 | workspace = workspace or self.workspace 144 | return workspace.joinpath(f".ssh/id_{key_type.key.lower()}") 145 | 146 | def ssh_server_command(self, meta: LBServerMeta, pri_key_path: Path = None) -> List[str]: 147 | """ 148 | Args: 149 | meta: LBServerMeta 150 | pri_key_path: path to private key 151 | 152 | Returns: 153 | str: ssh command to connect to provider's server. 154 | """ 155 | _pri_key_path = pri_key_path or self.default_private_key_path(meta.workspace) 156 | command = [ 157 | "ssh", 158 | "-i", 159 | str(_pri_key_path), 160 | "-o", 161 | "StrictHostKeyChecking=no", 162 | "-p", 163 | str(meta.server_port), 164 | "-l", 165 | meta.server_user, 166 | *meta.ssh_extra_args, 167 | meta.server_host, 168 | ] 169 | logger.info(f"returning ssh command: {command}") 170 | return command 171 | 172 | def get_bill(self): ... 173 | 174 | def check_command(self, command: List[str]) -> bool: 175 | try: 176 | process = subprocess.run(command, capture_output=True) 177 | except FileNotFoundError: 178 | return False 179 | return process.returncode == 0 180 | -------------------------------------------------------------------------------- /lobbyboy/scripts.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | def print_example_config(): 5 | example_config_file = os.path.dirname(__file__) + "/conf/lobbyboy_config.toml" 6 | with open(example_config_file, "r") as conf: 7 | print(conf.read()) 8 | -------------------------------------------------------------------------------- /lobbyboy/server.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import fcntl 3 | import logging 4 | import os 5 | import pty 6 | import signal 7 | import struct 8 | import termios 9 | import threading 10 | from binascii import hexlify 11 | 12 | import paramiko 13 | 14 | from lobbyboy.config import LBConfig 15 | from lobbyboy.exceptions import NoTTYException, UnsupportedPrivateKeyTypeException 16 | 17 | logger = logging.getLogger(__name__) 18 | 19 | 20 | def set_window_size(fd, row, col, xpix=0, ypix=0): 21 | size = struct.pack("HHHH", row, col, xpix, ypix) 22 | fcntl.ioctl(fd, termios.TIOCSWINSZ, size) 23 | 24 | 25 | class Server(paramiko.ServerInterface): 26 | def __init__(self, config: LBConfig): 27 | self.pty_event = threading.Event() 28 | self.shell_event = threading.Event() 29 | self.config = config 30 | self.window_width = self.window_height = 0 31 | self.proxy_subprocess_pid = None 32 | self.client_exec = None 33 | self.client_exec_provider = None 34 | self.master_fd = self.slave_fd = None 35 | 36 | def check_channel_request(self, kind, channel_id): 37 | if kind == "session": 38 | return paramiko.common.OPEN_SUCCEEDED 39 | return paramiko.common.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED 40 | 41 | def check_auth_password(self, username, password): 42 | logger.warning( 43 | "Using password for authentication is considered unsafe in production, please use a publickey instead." 44 | ) 45 | config = self.config.reload() 46 | if username in config.user and password == config.user[username].password: 47 | return paramiko.common.AUTH_SUCCESSFUL 48 | return paramiko.common.AUTH_FAILED 49 | 50 | @staticmethod 51 | def _get_key_class(key_type): 52 | # TODO 53 | key_cls = { 54 | "ssh-rsa": paramiko.RSAKey, 55 | "ssh-dss": paramiko.DSSKey, 56 | "ssh-ecdsa": paramiko.ECDSAKey, 57 | "ssh-ed25519": paramiko.Ed25519Key, 58 | } 59 | if key_type in key_cls: 60 | return key_cls[key_type] 61 | raise UnsupportedPrivateKeyTypeException(f"Unknown key type {key_type}") 62 | 63 | def try_auth(self, key, user, key_str) -> bool: 64 | key_type = key.get_name().lower() 65 | key_cls = self._get_key_class(key_type) 66 | 67 | accept_key = key_cls(data=base64.b64decode(key_str)) 68 | k = hexlify(accept_key.get_fingerprint()).decode() 69 | logger.info(f"try to auth {user} with key {k}") 70 | success = key == accept_key 71 | if success: 72 | logger.info(f"accept auth {user} with key {k}") 73 | return success 74 | 75 | def check_auth_publickey(self, username: str, key: paramiko.PKey): # noqa 76 | use_key_type = key.get_name() 77 | logger.info(f"try to auth {username} with key type {use_key_type}...") 78 | config = self.config.reload() 79 | ssh_key_paris = config.user[username].auth_key_pairs() 80 | for key_type, key_data in ssh_key_paris: 81 | if use_key_type != key_type: 82 | continue 83 | success = self.try_auth(key, username, key_data) 84 | if success: 85 | return paramiko.common.AUTH_SUCCESSFUL 86 | logger.info(f"Can not auth {username} with any key.") 87 | return paramiko.common.AUTH_FAILED 88 | 89 | def check_auth_gssapi_with_mic(self, username, gss_authenticated=paramiko.common.AUTH_FAILED, cc_file=None): 90 | """ 91 | .. note:: 92 | We are just checking in `AuthHandler` that the given user is a 93 | valid krb5 principal! We don't check if the krb5 principal is 94 | allowed to log in on the server, because there is no way to do that 95 | in python. So if you develop your own SSH server with paramiko for 96 | a certain platform like Linux, you should call ``krb5_kuserok()`` in 97 | your local kerberos library to make sure that the krb5_principal 98 | has an account on the server and is allowed to log in as a user. 99 | 100 | .. seealso:: 101 | `krb5_kuserok() man page 102 | `_ 103 | """ 104 | if gss_authenticated == paramiko.common.AUTH_SUCCESSFUL: 105 | return paramiko.common.AUTH_SUCCESSFUL 106 | return paramiko.common.AUTH_FAILED 107 | 108 | def check_auth_gssapi_keyex(self, username, gss_authenticated=paramiko.common.AUTH_FAILED, cc_file=None): 109 | # TODO 110 | if gss_authenticated == paramiko.common.AUTH_SUCCESSFUL: 111 | logger.info("gss auth success") 112 | return paramiko.common.AUTH_SUCCESSFUL 113 | return paramiko.common.AUTH_FAILED 114 | 115 | def enable_auth_gssapi(self): 116 | return True 117 | 118 | def get_allowed_auths(self, username): 119 | return "gssapi-keyex,gssapi-with-mic,password,publickey" 120 | 121 | def check_channel_shell_request(self, channel): 122 | logger.info("client request shell...") 123 | self.pty_event.wait(timeout=10) 124 | if not self.pty_event.is_set(): 125 | logger.error("Client never ask a tty, can not allocate shell...") 126 | raise NoTTYException("No TTY") 127 | self.shell_event.set() 128 | return True 129 | 130 | def check_channel_pty_request(self, channel, term, width, height, pixelwidth, pixelheight, modes): 131 | logger.info( 132 | f"Client request pty..., term={term} width={width}, height={height}, " 133 | f"pixelwidth={pixelwidth}, pixelheight={pixelheight}" 134 | ) 135 | self.master_fd, self.slave_fd = pty.openpty() 136 | logger.debug(f"user's pty ready, master_fd={self.master_fd}, slave_fd={self.slave_fd}") 137 | 138 | self.window_width, self.window_height = width, height 139 | set_window_size(self.master_fd, self.window_height, self.window_width, pixelwidth, pixelheight) 140 | self.pty_event.set() 141 | return True 142 | 143 | def check_channel_window_change_request(self, channel, width, height, pixelwidth, pixelheight): 144 | logger.debug( 145 | f"client send window size change request... " 146 | f"width={width}, height={height}, pixelwidth={pixelwidth}, pixelheight={pixelheight}, " 147 | f"my proxy_subprocess_pid={self.proxy_subprocess_pid}, master_fd={self.master_fd}" 148 | ) 149 | self.window_width, self.window_height = width, height 150 | set_window_size(self.master_fd, self.window_height, self.window_width, pixelwidth, pixelheight) 151 | 152 | if self.proxy_subprocess_pid is not None: 153 | logger.debug(f"send signal to {self.proxy_subprocess_pid}") 154 | os.kill(self.proxy_subprocess_pid, signal.SIGWINCH) 155 | return True 156 | -------------------------------------------------------------------------------- /lobbyboy/server_killer.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import time 3 | from pathlib import Path 4 | from typing import Dict, OrderedDict, Tuple 5 | 6 | from paramiko import Channel 7 | 8 | from lobbyboy.config import ( 9 | LBConfigProvider, 10 | LBServerMeta, 11 | load_local_servers, 12 | update_local_servers, 13 | ) 14 | from lobbyboy.provider import BaseProvider 15 | from lobbyboy.utils import ( 16 | active_session, 17 | available_server_db_lock, 18 | humanize_seconds, 19 | to_seconds, 20 | ) 21 | 22 | logger = logging.getLogger(__name__) 23 | 24 | 25 | class ServerKiller: 26 | def __init__(self, watched_providers: Dict[str, BaseProvider], servers_db_path: Path): 27 | self.servers_db_path: Path = servers_db_path 28 | self.watched_providers: Dict[str, BaseProvider] = watched_providers 29 | 30 | def patrol(self, cycle_sec: int = 1 * 60): 31 | while 1: 32 | logger.info(f"killer start a new {cycle_sec} seconds round...") 33 | self.check_all_live_servers() 34 | time.sleep(cycle_sec) 35 | 36 | def check_all_live_servers(self): 37 | metas: OrderedDict[str, LBServerMeta] = load_local_servers(self.servers_db_path) 38 | 39 | for server_name, meta in metas.items(): 40 | provider: BaseProvider 41 | provider = self.watched_providers.get(meta.provider_name) 42 | if not provider: 43 | logger.error(f"can't find provider of server {meta.server_name}, destroy check failed.") 44 | raise Exception 45 | 46 | need_to_be_destroy, reason = self.need_destroy(provider, meta) 47 | logger.info(f"{server_name} need to be destroyed? {need_to_be_destroy}, reason: {reason}.") 48 | if need_to_be_destroy: 49 | self.destroy(provider, meta) 50 | 51 | @classmethod 52 | def need_destroy(cls, provider: BaseProvider, meta: LBServerMeta) -> Tuple[bool, str]: 53 | """ 54 | check if a provider's server need to be destroyed or not. 55 | 56 | Args: 57 | provider: provider, provider info 58 | meta: LBServerMeta, server meta info 59 | 60 | Returns: 61 | tuple, (need_to_be_destroy: bool, reason: str) 62 | """ 63 | # check whether there is an activity session first 64 | active_sessions = active_session.get(meta.server_name, []) 65 | active_session_cnt = len(active_sessions) 66 | if active_session_cnt > 0: 67 | return False, f"still have {active_session_cnt} active sessions." 68 | 69 | if not meta.manage: 70 | return False, f"server {meta.server_name} has flag not manage by me." 71 | 72 | config: LBConfigProvider = provider.provider_config 73 | # check whether the minimum life cycle is reached 74 | min_life_to_live_in_sec = to_seconds(config.min_life_to_live) 75 | if min_life_to_live_in_sec <= 0: 76 | return True, f"min_life_to_live less or equal 0: {min_life_to_live_in_sec}." 77 | ttl = min_life_to_live_in_sec - meta.live_sec 78 | if ttl > 0: 79 | return False, f"still have {humanize_seconds(ttl)} to live(min_life_to_live={config.min_life_to_live})." 80 | 81 | # check whether it should be destroyed within a reasonable bill cycle. 82 | bill_time_unit_in_sec = to_seconds(config.bill_time_unit) 83 | cur_bill_live_time = meta.live_sec % bill_time_unit_in_sec 84 | destroy_safe_time_in_sec = to_seconds(config.destroy_safe_time) if config.destroy_safe_time else 0 85 | ttl = bill_time_unit_in_sec - cur_bill_live_time - destroy_safe_time_in_sec 86 | if ttl > 0: 87 | return False, f"still have {humanize_seconds(ttl)} to live(bill_time_unit={config.bill_time_unit})." 88 | 89 | return True, "is about to enter the next billing cycle." 90 | 91 | def destroy(self, provider: BaseProvider, meta: LBServerMeta, channel: Channel = None): 92 | """ 93 | Args: 94 | provider: provider instance, object 95 | meta: server data dict 96 | channel: paramiko.Transport, optional, if not None, will send destroy message to this channel 97 | """ 98 | if not meta.manage: 99 | raise Exception(f"destroy failed, provider {provider.name} server {meta.server_name} not manage by me!") 100 | provider.destroy_server(meta, channel) 101 | with available_server_db_lock: 102 | update_local_servers(self.servers_db_path, deleted=[meta]) 103 | -------------------------------------------------------------------------------- /lobbyboy/socket_handle.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import select 4 | import socket 5 | import threading 6 | from binascii import hexlify 7 | from io import StringIO 8 | from subprocess import Popen 9 | from typing import Dict, Optional, OrderedDict, Tuple 10 | 11 | import paramiko 12 | from paramiko.channel import Channel 13 | from paramiko.transport import Transport 14 | 15 | from lobbyboy import __version__ 16 | from lobbyboy.config import ( 17 | LBConfig, 18 | LBServerMeta, 19 | load_local_servers, 20 | update_local_servers, 21 | ) 22 | from lobbyboy.exceptions import ( 23 | NoProviderException, 24 | ProviderException, 25 | UserCancelException, 26 | ) 27 | from lobbyboy.provider import BaseProvider 28 | from lobbyboy.server import Server 29 | from lobbyboy.server_killer import ServerKiller 30 | from lobbyboy.utils import ( 31 | DoGSSAPIKeyExchange, 32 | KeyTypeSupport, 33 | active_session, 34 | active_session_lock, 35 | available_server_db_lock, 36 | choose_option, 37 | confirm_ssh_key_pair, 38 | send_to_channel, 39 | ) 40 | 41 | logger = logging.getLogger(__name__) 42 | 43 | 44 | class SocketHandlerThread(threading.Thread): 45 | def __init__(self, sock: socket, address, config: LBConfig, providers: Dict[str, BaseProvider]) -> None: 46 | super().__init__() 47 | self.socket_client = sock 48 | self.client_address = address 49 | self.config = config 50 | self.providers: Dict[str, BaseProvider] = providers 51 | self.killer = ServerKiller(providers, config.servers_db_path) 52 | self.channel: Optional[Channel] = None 53 | 54 | def choose_providers(self) -> BaseProvider: 55 | if not self.providers: 56 | send_to_channel(self.channel, "There is no available providers.") 57 | raise NoProviderException("Do not have available providers to provision a new server!") 58 | default = None 59 | ask_prompt = "Please choose a provider to create a new server: " 60 | if len(self.providers) == 1: 61 | default = 0 62 | default_provider = next(iter(self.providers.keys())) 63 | ask_prompt = f"Please choose a provider to create a new server [default: {default_provider}]: " 64 | user_input = choose_option( 65 | self.channel, 66 | list(self.providers.keys()), 67 | option_prompt="Available VPS providers:", 68 | ask_prompt=ask_prompt, 69 | default=default, 70 | ) 71 | return list(self.providers.values())[user_input] 72 | 73 | def choose_server(self) -> LBServerMeta: 74 | available_servers: OrderedDict[str, LBServerMeta] = load_local_servers(self.config.servers_db_path) 75 | if not available_servers: 76 | send_to_channel(self.channel, "There is no available servers, provision a new server...") 77 | return self._ask_user_to_create_server() 78 | 79 | options = ["Create a new server..."] 80 | meta: LBServerMeta 81 | for meta in available_servers.values(): 82 | server_desc = f"{meta.provider_name} {meta.server_name} {meta.server_host}" 83 | sessions_cnt = len(active_session.get(meta.server_name, [])) 84 | options.append(f"Enter {server_desc} ({sessions_cnt} active sessions)") 85 | user_input = choose_option( 86 | self.channel, 87 | options, 88 | option_prompt=f"There are {len(available_servers)} available servers:", 89 | ) 90 | 91 | logger.info(f"user choose server input={user_input}.") 92 | if user_input == 0: 93 | return self._ask_user_to_create_server() 94 | user_input -= 1 95 | return list(available_servers.values())[user_input] 96 | 97 | def _ask_user_to_create_server(self) -> LBServerMeta: 98 | provider: BaseProvider = self.choose_providers() 99 | meta: LBServerMeta = provider.create_server(self.channel) 100 | 101 | with available_server_db_lock: 102 | update_local_servers(self.config.servers_db_path, new=[meta]) 103 | return meta 104 | 105 | def _create_proxy_process(self, slave_fd) -> Tuple[Popen, LBServerMeta]: 106 | # if has available servers, prompt login or create 107 | # if no, create, and redirect 108 | meta: LBServerMeta = self.choose_server() 109 | provider = self.providers.get(meta.provider_name) 110 | if not provider: 111 | raise NoProviderException(f"not find provider for server {meta.server_name}") 112 | 113 | ssh_command_units = provider.ssh_server_command(meta) 114 | ssh_command = " ".join(str(i) for i in ssh_command_units) 115 | logger.info(f"ssh to server {meta.server_name} {meta.server_host}: {ssh_command}") 116 | send_to_channel( 117 | self.channel, 118 | f"Redirect you to {meta.provider_name} server: {meta.server_name} ({meta.server_host})...", 119 | ) 120 | proxy_subprocess = Popen( 121 | ssh_command, 122 | shell=True, 123 | preexec_fn=os.setsid, 124 | stdin=slave_fd, 125 | stdout=slave_fd, 126 | stderr=slave_fd, 127 | universal_newlines=True, 128 | ) 129 | with active_session_lock: 130 | active_session.setdefault(meta.server_name, []).append(self.channel.get_transport()) 131 | return proxy_subprocess, meta 132 | 133 | def prepare_server(self, t: Transport, key_type: KeyTypeSupport = KeyTypeSupport.RSA) -> Optional[Server]: 134 | try: 135 | t.load_server_moduli() 136 | except: # noqa 137 | logger.error("(Failed to load moduli -- gex will be unsupported.)") 138 | raise 139 | 140 | pri, _ = confirm_ssh_key_pair(key_type=key_type, save_path=self.config.data_dir, key_name="ssh_host_rsa_key") 141 | host_key = paramiko.RSAKey.from_private_key(StringIO(pri)) 142 | 143 | logger.info("Read host key: " + hexlify(host_key.get_fingerprint()).decode()) 144 | t.add_server_key(host_key) 145 | 146 | server = Server(self.config) 147 | try: 148 | t.start_server(server=server) 149 | except paramiko.SSHException: 150 | logger.error("*** SSH negotiation failed.") 151 | logger.error(f"close the transport now... {t}") 152 | return 153 | 154 | self.channel = t.accept(timeout=20) 155 | if self.channel is None: 156 | logger.error("Client never open a new channel, close transport now...") 157 | return 158 | return server 159 | 160 | def prepare_shell_env(self, server: Server, t: Transport) -> Tuple[Optional[LBServerMeta], Optional[Popen]]: 161 | server.shell_event.wait() 162 | if not server.shell_event.is_set(): 163 | logger.warning("Client never asked for a shell, I am going to end this ssh session now...") 164 | send_to_channel( 165 | self.channel, 166 | "*** Client never asked for a shell. Server will end session...", 167 | ) 168 | return None, None 169 | 170 | logger.info(f"transport peer name: {t.getpeername()}") 171 | proxy_subprocess = lb_server = None 172 | try: 173 | proxy_subprocess, lb_server = self._create_proxy_process(server.slave_fd) 174 | except UserCancelException: 175 | logger.warning("user input Ctrl-C or Ctrl-D during the input.") 176 | send_to_channel(self.channel, "Got EOF, closing session...") 177 | except ProviderException as e: 178 | logger.warning(f"got exceptions from provider: {e}") 179 | send_to_channel(self.channel, f"LobbyBoy got exceptions from provider: {e}") 180 | except Exception as e: 181 | logger.warning(f"got exceptions: {e}") 182 | send_to_channel(self.channel, f"LobbyBoy got exceptions: {e}") 183 | raise 184 | 185 | if not (proxy_subprocess and lb_server): 186 | return None, None 187 | 188 | logger.info(f"proxy subprocess created, pid={proxy_subprocess.pid}") 189 | server.proxy_subprocess_pid = proxy_subprocess.pid 190 | 191 | send_to_channel(self.channel, int(server.window_width) * "=") 192 | return lb_server, proxy_subprocess 193 | 194 | def user_using(self, server: Server, proxy_subprocess: Popen): 195 | channel_fd = self.channel.fileno() 196 | master_fd = server.master_fd 197 | while proxy_subprocess.poll() is None: 198 | r, *_ = select.select([master_fd, channel_fd], [], [], 0.1) 199 | if master_fd in r: 200 | send_to_channel(self.channel, os.read(master_fd, 10240), suffix=b"") 201 | elif channel_fd in r: 202 | os.write(master_fd, self.channel.recv(10240)) 203 | 204 | def cleanup(self, t: Transport = None, meta: LBServerMeta = None, check_destroy: bool = False): 205 | if t and meta: 206 | self.remove_server_session(t, meta.server_name) 207 | if check_destroy: 208 | self.destroy_server_if_needed(meta) 209 | 210 | if self.channel: 211 | self.channel.shutdown(0) 212 | if t: 213 | t.close() 214 | 215 | def destroy_server_if_needed(self, server: LBServerMeta): 216 | provider = self.providers[server.provider_name] 217 | need_destroy, reason = self.killer.need_destroy(provider, server) 218 | send_to_channel(self.channel, f"LobbyBoy: This server {reason}.") 219 | if not need_destroy: 220 | return 221 | 222 | send_to_channel(self.channel, f"LobbyBoy: I will destroy {server.server_name}({server.server_host}) now!") 223 | self.killer.destroy(provider, server, self.channel) 224 | send_to_channel( 225 | self.channel, 226 | f"LobbyBoy: Server {server.server_name}({server.server_host}) has been destroyed.", 227 | ) 228 | 229 | @staticmethod 230 | def remove_server_session(transport: Transport, server_name: str): 231 | peer_name = transport.getpeername() 232 | with active_session_lock: 233 | sessions = active_session.get(server_name) 234 | if sessions: 235 | active_session[server_name] = list(filter(lambda x: x.getpeername() != peer_name, sessions)) 236 | 237 | def run(self): 238 | logger.info( 239 | f"start new thread " 240 | f"handle {self.socket_client}, " 241 | f"address: {self.client_address}, " 242 | f"my thread id={threading.get_ident()}" 243 | ) 244 | t = Transport(self.socket_client, gss_kex=DoGSSAPIKeyExchange) 245 | try: 246 | t.set_gss_host(socket.getfqdn()) 247 | server = self.prepare_server(t) 248 | if not (server and self.channel): 249 | self.cleanup(t) 250 | return 251 | 252 | send_to_channel(self.channel, f"Welcome to LobbyBoy {__version__}!") 253 | lb_server, proxy_subprocess = self.prepare_shell_env(server, t) 254 | if not (proxy_subprocess and lb_server): 255 | logger.error("failed to create proxy subprocess or lb_server") 256 | self.cleanup(t, meta=lb_server) 257 | return 258 | 259 | self.user_using(server, proxy_subprocess) 260 | send_to_channel( 261 | self.channel, 262 | f"LobbyBoy: SSH to remote server {lb_server.server_name} closed.", 263 | ) 264 | self.cleanup(t, meta=lb_server, check_destroy=True) 265 | except Exception: # noqa 266 | logger.critical("*** Socket thread error.", exc_info=True) 267 | self.cleanup(t) 268 | -------------------------------------------------------------------------------- /lobbyboy/utils.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | import logging 3 | import os 4 | import re 5 | import socket 6 | import threading 7 | from dataclasses import asdict, is_dataclass 8 | from datetime import date, datetime, timedelta 9 | from decimal import Decimal 10 | from enum import Enum, unique 11 | from io import StringIO 12 | from pathlib import Path 13 | from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union 14 | 15 | import paramiko 16 | from paramiko.channel import Channel 17 | 18 | from lobbyboy.exceptions import ( 19 | CantEnsureBytesException, 20 | TimeStrParseTypeException, 21 | UnsupportedPrivateKeyTypeException, 22 | UserCancelException, 23 | ) 24 | 25 | logger = logging.getLogger(__name__) 26 | 27 | 28 | DoGSSAPIKeyExchange = True 29 | active_session: Dict[str, List[paramiko.Transport]] = {} 30 | available_server_db_lock = threading.Lock() 31 | active_session_lock = threading.Lock() 32 | 33 | UNIT_SEC_PAIRS = { 34 | "s": 1, 35 | "m": 1 * 60, 36 | "h": 1 * 60 * 60, 37 | "d": 1 * 60 * 60 * 24, 38 | } 39 | 40 | 41 | def encoder_factory( 42 | date_fmt: str = "%Y-%m-%d", 43 | dt_fmt: str = "%Y-%m-%d %H:%M:%S", 44 | decimal_factory: Callable = str, 45 | path_factory: Callable = str, 46 | dataclass_factory: Callable = asdict, 47 | *, 48 | raise_error: bool = True, 49 | ): 50 | """Serialize additional types.""" 51 | 52 | def encoder(obj): 53 | if isinstance(obj, datetime): 54 | return obj.strftime(dt_fmt) 55 | elif isinstance(obj, date): 56 | return obj.strftime(date_fmt) 57 | elif isinstance(obj, Decimal): 58 | return decimal_factory(obj) 59 | elif isinstance(obj, Path): 60 | return path_factory(obj) 61 | elif is_dataclass(obj): 62 | return dataclass_factory(obj) 63 | elif not raise_error: 64 | return obj 65 | raise TypeError("%r is not JSON serializable" % obj) 66 | 67 | return encoder 68 | 69 | 70 | def dict_factory(d: Dict, ignore_fields: List[str] = None, ignore_rule: Callable = None, encoder: Type = None) -> Dict: 71 | _ignore_fields = ignore_fields or [] 72 | _ignore_rule = ignore_rule or (lambda x: False) 73 | filtered_dict = {} 74 | for k, v in d.items(): 75 | if any( 76 | ( 77 | k is None, 78 | k in _ignore_fields, 79 | _ignore_rule(k), 80 | ) 81 | ): 82 | continue 83 | filtered_dict[k] = encoder(v) if encoder else v 84 | return filtered_dict 85 | 86 | 87 | def port_is_open(ip: str, port: int = 22) -> bool: 88 | a_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 89 | a_socket.settimeout(2) 90 | result = a_socket.connect_ex((ip, port)) 91 | a_socket.close() 92 | return result == 0 93 | 94 | 95 | def to_seconds(time_str: str) -> int: 96 | """ 97 | Args: 98 | time_str: str, 10s, 10m, 10h, 10d 99 | 100 | Returns: 101 | int: seconds 102 | """ 103 | if time_str == "0": 104 | return 0 105 | for unit, sec in UNIT_SEC_PAIRS.items(): 106 | re_time_str = r"^(\d+){unit}$".format(unit=unit) 107 | matched = re.match(re_time_str, time_str) 108 | if matched: 109 | return int(matched.group(1)) * sec 110 | raise TimeStrParseTypeException(f"Can not parse {time_str}") 111 | 112 | 113 | def humanize_seconds(seconds: int): 114 | """human-readable, eg: 364121 -> '4 days, 5:08:41'""" 115 | return str(timedelta(seconds=seconds)) 116 | 117 | 118 | def ensure_bytes(s: Union[str, bytes]) -> bytes: 119 | if isinstance(s, str): 120 | return s.encode() 121 | if isinstance(s, bytes): 122 | return s 123 | raise CantEnsureBytesException() 124 | 125 | 126 | def send_to_channel( 127 | channel: Channel, msg: Union[str, bytes] = b"", prefix: Union[str, bytes] = b"", suffix: Union[str, bytes] = b"\r\n" 128 | ): 129 | msg = ensure_bytes(msg) 130 | prefix = ensure_bytes(prefix) 131 | suffix = ensure_bytes(suffix) 132 | 133 | buf = bytearray(prefix) 134 | buf.extend(msg) 135 | buf.extend(suffix) 136 | channel.sendall(buf) 137 | 138 | 139 | def confirm_dc_type(value: Any, should_be: Type): 140 | """ 141 | confirm that the value is of the correct type during dataclass conversion, try converting it if it is not 142 | consider replacing it with [dacite](https://github.com/konradhalas/dacite/) 143 | 144 | Args: 145 | value: the value that we want to check 146 | should_be: the value type we expect or convert to 147 | 148 | Returns: 149 | value: the value that was passed in, converted if needed 150 | """ 151 | if isinstance(value, should_be): 152 | return value 153 | elif isinstance(value, dict): 154 | return should_be(**value) 155 | logger.error(f"{should_be.__name__} unable to confirm type of {value}, type: {type(value)}") 156 | return value 157 | 158 | 159 | def choose_option( 160 | channel: Channel, 161 | options: List[str], 162 | option_prompt: str = None, 163 | ask_prompt: str = None, 164 | default: Optional[int] = None, 165 | ) -> int: 166 | """ 167 | ask user to choose one option from channel 168 | """ 169 | logger.info(f"need user to choose from {options}\nprompt: {option_prompt}") 170 | send_to_channel(channel, option_prompt or "Available options:") 171 | for index, option in enumerate(options): 172 | send_to_channel(channel, f"{index:>3} - {option}") 173 | _ask_prompt = ask_prompt or f"Please enter the number of choice[{0}-{len(options) - 1}]: " 174 | send_to_channel(channel, _ask_prompt, suffix=b"") 175 | 176 | result = read_user_input_line(channel) 177 | try: 178 | if result == "" and default is not None: 179 | # User hits enter directly 180 | num_selected = default 181 | else: 182 | num_selected = int(result) 183 | if 0 <= num_selected < len(options): 184 | logger.info(f"user choose {result} for option {option_prompt}") 185 | send_to_channel(channel, f"You selected: {options[num_selected]}") 186 | return num_selected 187 | raise Exception(f"user choose {result} for option {option_prompt}, but it's out of range") 188 | except Exception: # noqa 189 | logger.error(f"user choose {result} for option {option_prompt} invalid, re-choose...") 190 | send_to_channel(channel, f"unknown choice, please choose again [{0}-{len(options) - 1}]") 191 | return choose_option(channel, options, option_prompt=option_prompt, ask_prompt=ask_prompt) 192 | 193 | 194 | def read_user_input_line(channel) -> str: 195 | # TODO do not support del 196 | # receive user input correctly: see [ANSI escape code](https://en.wikipedia.org/wiki/ANSI_escape_code) 197 | chars = [] 198 | while 1: 199 | content = channel.recv(1) 200 | logger.debug(f"channel recv: {content}") 201 | if content in [b"\x04", b"\x03"]: 202 | raise UserCancelException() 203 | elif content == b"\r": 204 | send_to_channel(channel) 205 | break 206 | elif content in [b"\x7f"]: 207 | send_to_channel(channel, "\x08\x1b[K", suffix=b"") 208 | continue 209 | else: 210 | send_to_channel(channel, content, suffix=b"") 211 | chars.append(content) 212 | return b"".join(chars).decode() 213 | 214 | 215 | @unique 216 | class KeyTypeSupport(Enum): 217 | # openssh ssh-keygen: The default length is 3072 bits (RSA) or 256 bits (ECDSA). 218 | # todo default length of DSS/ED25519 219 | RSA = "RSA", 3072 220 | DSS = "DSS", 1024 221 | ED25519 = "ED25519", 256 222 | ECDSA = "ECDSA", 256 223 | 224 | def __init__(self, key, default_key_length=None): 225 | super().__init__() 226 | self._key = key 227 | self.default_key_length = default_key_length 228 | 229 | @property 230 | def key(self) -> str: 231 | return self._key 232 | 233 | 234 | def confirm_ssh_key_pair( 235 | save_path: Path, key_type: KeyTypeSupport = KeyTypeSupport.RSA, key_len: int = None, key_name: str = None 236 | ): 237 | pri_key, pub_key = try_load_key_from_file(from_path=save_path, key_type=key_type, key_name=key_name) 238 | if pri_key and pub_key: 239 | return pri_key, pub_key 240 | 241 | pri_key, pub_key = generate_ssh_key_pair(key_type=key_type, key_len=key_len) 242 | if not save_path: 243 | return pri_key, pub_key 244 | return write_key_to_file(pri_key, pub_key, key_type=key_type, save_path=save_path, key_name=key_name) 245 | 246 | 247 | def generate_ssh_key_pair(key_type: KeyTypeSupport = KeyTypeSupport.RSA, key_len: int = None) -> Tuple[str, str]: 248 | _key_length = key_len or key_type.default_key_length 249 | if not _key_length: 250 | raise UnsupportedPrivateKeyTypeException() 251 | 252 | if key_type == KeyTypeSupport.RSA: 253 | key = paramiko.RSAKey.generate(bits=_key_length) 254 | elif key_type == KeyTypeSupport.DSS: 255 | key = paramiko.DSSKey.generate(bits=_key_length) 256 | elif key_type == KeyTypeSupport.ECDSA: 257 | key = paramiko.ECDSAKey.generate(bits=_key_length) 258 | elif key_type == KeyTypeSupport.Ed25519: 259 | # Todo 260 | raise UnsupportedPrivateKeyTypeException() 261 | else: 262 | raise UnsupportedPrivateKeyTypeException() 263 | 264 | out = StringIO() 265 | key.write_private_key(out) 266 | return out.getvalue(), f"{key.get_name()} {key.get_base64()}" 267 | 268 | 269 | def write_key_to_file( 270 | pri_key: str, pub_key: str, key_type: KeyTypeSupport, save_path: Path, key_name: str = None 271 | ) -> Tuple[str, str]: 272 | save_path.mkdir(parents=True, exist_ok=True, mode=0o700) 273 | if not key_name: 274 | key_name = f"id_{key_type.key.lower()}" 275 | private_key_file = save_path.joinpath(key_name) 276 | public_key_file = save_path.joinpath(f"{key_name}.pub") 277 | 278 | if private_key_file.exists() and public_key_file.exists(): 279 | logger.info(f"ssh key pair exists: {private_key_file}/{public_key_file}, skip generation.") 280 | return pri_key, pub_key 281 | 282 | with open(private_key_file, "w+") as f: 283 | f.write(pri_key) 284 | os.chmod(private_key_file, 0o600) 285 | with open(public_key_file, "w+") as f: 286 | f.write(pub_key) 287 | os.chmod(public_key_file, 0o600) 288 | return pri_key, pub_key 289 | 290 | 291 | def try_load_key_from_file( 292 | from_path: Path, key_type: KeyTypeSupport, raise_error: bool = False, key_name: str = None 293 | ) -> Tuple[str, str]: 294 | if not key_name: 295 | key_name = f"id_{key_type.key.lower()}" 296 | private_key_file = from_path.joinpath(key_name) 297 | public_key_file = from_path.joinpath(f"{key_name}.pub") 298 | 299 | pri_key = pub_key = "" 300 | if private_key_file.exists(): 301 | with open(private_key_file, "r+") as f: 302 | pri_key = f.read() 303 | if public_key_file.exists(): 304 | with open(public_key_file, "r+") as f: 305 | pub_key = f.read() 306 | 307 | if not (pri_key and pub_key) and raise_error: 308 | raise FileNotFoundError(f"ssh key pair not found: {private_key_file}/{public_key_file}") 309 | return pri_key, pub_key 310 | 311 | 312 | def import_class(cls_path: str = ""): 313 | source = cls_path.split("::") 314 | if len(source) != 2: 315 | return 316 | module_path, cls_name = source 317 | module = importlib.import_module(module_path) 318 | return getattr(module, cls_name, None) 319 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "atomicwrites" 3 | version = "1.4.0" 4 | description = "Atomic file writes." 5 | category = "dev" 6 | optional = false 7 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 8 | 9 | [[package]] 10 | name = "attrs" 11 | version = "21.2.0" 12 | description = "Classes Without Boilerplate" 13 | category = "dev" 14 | optional = false 15 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 16 | 17 | [package.extras] 18 | dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] 19 | docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] 20 | tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] 21 | tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] 22 | 23 | [[package]] 24 | name = "backports.entry-points-selectable" 25 | version = "1.1.1" 26 | description = "Compatibility shim providing selectable entry points for older implementations" 27 | category = "main" 28 | optional = false 29 | python-versions = ">=2.7" 30 | 31 | [package.dependencies] 32 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 33 | 34 | [package.extras] 35 | docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] 36 | testing = ["pytest", "pytest-flake8", "pytest-cov", "pytest-black (>=0.3.7)", "pytest-mypy", "pytest-checkdocs (>=2.4)", "pytest-enabler (>=1.0.1)"] 37 | 38 | [[package]] 39 | name = "bcrypt" 40 | version = "3.2.0" 41 | description = "Modern password hashing for your software and your servers" 42 | category = "main" 43 | optional = false 44 | python-versions = ">=3.6" 45 | 46 | [package.dependencies] 47 | cffi = ">=1.1" 48 | six = ">=1.4.1" 49 | 50 | [package.extras] 51 | tests = ["pytest (>=3.2.1,!=3.3.0)"] 52 | typecheck = ["mypy"] 53 | 54 | [[package]] 55 | name = "black" 56 | version = "21.9b0" 57 | description = "The uncompromising code formatter." 58 | category = "dev" 59 | optional = false 60 | python-versions = ">=3.6.2" 61 | 62 | [package.dependencies] 63 | click = ">=7.1.2" 64 | mypy-extensions = ">=0.4.3" 65 | pathspec = ">=0.9.0,<1" 66 | platformdirs = ">=2" 67 | regex = ">=2020.1.8" 68 | tomli = ">=0.2.6,<2.0.0" 69 | typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\""} 70 | typing-extensions = [ 71 | {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, 72 | {version = "!=3.10.0.1", markers = "python_version >= \"3.10\""}, 73 | ] 74 | 75 | [package.extras] 76 | colorama = ["colorama (>=0.4.3)"] 77 | d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"] 78 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 79 | python2 = ["typed-ast (>=1.4.2)"] 80 | uvloop = ["uvloop (>=0.15.2)"] 81 | 82 | [[package]] 83 | name = "bump2version" 84 | version = "1.0.1" 85 | description = "Version-bump your software with a single command!" 86 | category = "dev" 87 | optional = false 88 | python-versions = ">=3.5" 89 | 90 | [[package]] 91 | name = "bumpversion" 92 | version = "0.6.0" 93 | description = "Version-bump your software with a single command!" 94 | category = "dev" 95 | optional = false 96 | python-versions = "*" 97 | 98 | [package.dependencies] 99 | bump2version = "*" 100 | 101 | [[package]] 102 | name = "certifi" 103 | version = "2021.10.8" 104 | description = "Python package for providing Mozilla's CA Bundle." 105 | category = "main" 106 | optional = false 107 | python-versions = "*" 108 | 109 | [[package]] 110 | name = "cffi" 111 | version = "1.15.0" 112 | description = "Foreign Function Interface for Python calling C code." 113 | category = "main" 114 | optional = false 115 | python-versions = "*" 116 | 117 | [package.dependencies] 118 | pycparser = "*" 119 | 120 | [[package]] 121 | name = "cfgv" 122 | version = "3.3.1" 123 | description = "Validate configuration and produce human readable error messages." 124 | category = "main" 125 | optional = false 126 | python-versions = ">=3.6.1" 127 | 128 | [[package]] 129 | name = "charset-normalizer" 130 | version = "2.0.7" 131 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 132 | category = "main" 133 | optional = false 134 | python-versions = ">=3.5.0" 135 | 136 | [package.extras] 137 | unicode_backport = ["unicodedata2"] 138 | 139 | [[package]] 140 | name = "click" 141 | version = "8.0.3" 142 | description = "Composable command line interface toolkit" 143 | category = "dev" 144 | optional = false 145 | python-versions = ">=3.6" 146 | 147 | [package.dependencies] 148 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 149 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 150 | 151 | [[package]] 152 | name = "colorama" 153 | version = "0.4.4" 154 | description = "Cross-platform colored terminal text." 155 | category = "dev" 156 | optional = false 157 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 158 | 159 | [[package]] 160 | name = "coverage" 161 | version = "6.4.1" 162 | description = "Code coverage measurement for Python" 163 | category = "dev" 164 | optional = false 165 | python-versions = ">=3.7" 166 | 167 | [package.dependencies] 168 | tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} 169 | 170 | [package.extras] 171 | toml = ["tomli"] 172 | 173 | [[package]] 174 | name = "cryptography" 175 | version = "35.0.0" 176 | description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." 177 | category = "main" 178 | optional = false 179 | python-versions = ">=3.6" 180 | 181 | [package.dependencies] 182 | cffi = ">=1.12" 183 | 184 | [package.extras] 185 | docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] 186 | docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] 187 | pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] 188 | sdist = ["setuptools_rust (>=0.11.4)"] 189 | ssh = ["bcrypt (>=3.1.5)"] 190 | test = ["pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] 191 | 192 | [[package]] 193 | name = "dacite" 194 | version = "1.6.0" 195 | description = "Simple creation of data classes from dictionaries." 196 | category = "main" 197 | optional = false 198 | python-versions = ">=3.6" 199 | 200 | [package.extras] 201 | dev = ["pytest (>=5)", "pytest-cov", "coveralls", "black", "mypy", "pylint"] 202 | 203 | [[package]] 204 | name = "decorator" 205 | version = "5.1.0" 206 | description = "Decorators for Humans" 207 | category = "main" 208 | optional = false 209 | python-versions = ">=3.5" 210 | 211 | [[package]] 212 | name = "distlib" 213 | version = "0.3.4" 214 | description = "Distribution utilities" 215 | category = "main" 216 | optional = false 217 | python-versions = "*" 218 | 219 | [[package]] 220 | name = "filelock" 221 | version = "3.4.0" 222 | description = "A platform independent file lock." 223 | category = "main" 224 | optional = false 225 | python-versions = ">=3.6" 226 | 227 | [package.extras] 228 | docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] 229 | testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] 230 | 231 | [[package]] 232 | name = "fire" 233 | version = "0.4.0" 234 | description = "A library for automatically generating command line interfaces." 235 | category = "main" 236 | optional = false 237 | python-versions = "*" 238 | 239 | [package.dependencies] 240 | six = "*" 241 | termcolor = "*" 242 | 243 | [[package]] 244 | name = "flake8" 245 | version = "4.0.1" 246 | description = "the modular source code checker: pep8 pyflakes and co" 247 | category = "dev" 248 | optional = false 249 | python-versions = ">=3.6" 250 | 251 | [package.dependencies] 252 | importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""} 253 | mccabe = ">=0.6.0,<0.7.0" 254 | pycodestyle = ">=2.8.0,<2.9.0" 255 | pyflakes = ">=2.4.0,<2.5.0" 256 | 257 | [[package]] 258 | name = "freezegun" 259 | version = "1.1.0" 260 | description = "Let your Python tests travel through time" 261 | category = "dev" 262 | optional = false 263 | python-versions = ">=3.5" 264 | 265 | [package.dependencies] 266 | python-dateutil = ">=2.7" 267 | 268 | [[package]] 269 | name = "gssapi" 270 | version = "1.7.2" 271 | description = "Python GSSAPI Wrapper" 272 | category = "main" 273 | optional = false 274 | python-versions = ">=3.6" 275 | 276 | [package.dependencies] 277 | decorator = "*" 278 | 279 | [[package]] 280 | name = "identify" 281 | version = "2.4.0" 282 | description = "File identification library for Python" 283 | category = "main" 284 | optional = false 285 | python-versions = ">=3.6.1" 286 | 287 | [package.extras] 288 | license = ["ukkonen"] 289 | 290 | [[package]] 291 | name = "idna" 292 | version = "3.3" 293 | description = "Internationalized Domain Names in Applications (IDNA)" 294 | category = "main" 295 | optional = false 296 | python-versions = ">=3.5" 297 | 298 | [[package]] 299 | name = "importlib-metadata" 300 | version = "4.2.0" 301 | description = "Read metadata from Python packages" 302 | category = "main" 303 | optional = false 304 | python-versions = ">=3.6" 305 | 306 | [package.dependencies] 307 | typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} 308 | zipp = ">=0.5" 309 | 310 | [package.extras] 311 | docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] 312 | testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] 313 | 314 | [[package]] 315 | name = "iniconfig" 316 | version = "1.1.1" 317 | description = "iniconfig: brain-dead simple config-ini parsing" 318 | category = "dev" 319 | optional = false 320 | python-versions = "*" 321 | 322 | [[package]] 323 | name = "jsonpickle" 324 | version = "2.0.0" 325 | description = "Python library for serializing any arbitrary object graph into JSON" 326 | category = "main" 327 | optional = false 328 | python-versions = ">=2.7" 329 | 330 | [package.dependencies] 331 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 332 | 333 | [package.extras] 334 | docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] 335 | testing = ["coverage (<5)", "pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-black-multipy", "pytest-cov", "ecdsa", "feedparser", "numpy", "pandas", "pymongo", "sklearn", "sqlalchemy", "enum34", "jsonlib"] 336 | "testing.libs" = ["demjson", "simplejson", "ujson", "yajl"] 337 | 338 | [[package]] 339 | name = "linode-api4" 340 | version = "5.2.1" 341 | description = "The official python SDK for Linode API v4" 342 | category = "main" 343 | optional = false 344 | python-versions = ">=3.6" 345 | 346 | [package.dependencies] 347 | requests = "*" 348 | 349 | [[package]] 350 | name = "mccabe" 351 | version = "0.6.1" 352 | description = "McCabe checker, plugin for flake8" 353 | category = "dev" 354 | optional = false 355 | python-versions = "*" 356 | 357 | [[package]] 358 | name = "mypy-extensions" 359 | version = "0.4.3" 360 | description = "Experimental type system extensions for programs checked with the mypy typechecker." 361 | category = "dev" 362 | optional = false 363 | python-versions = "*" 364 | 365 | [[package]] 366 | name = "nodeenv" 367 | version = "1.6.0" 368 | description = "Node.js virtual environment builder" 369 | category = "main" 370 | optional = false 371 | python-versions = "*" 372 | 373 | [[package]] 374 | name = "packaging" 375 | version = "21.0" 376 | description = "Core utilities for Python packages" 377 | category = "dev" 378 | optional = false 379 | python-versions = ">=3.6" 380 | 381 | [package.dependencies] 382 | pyparsing = ">=2.0.2" 383 | 384 | [[package]] 385 | name = "paramiko" 386 | version = "2.10.1" 387 | description = "SSH2 protocol library" 388 | category = "main" 389 | optional = false 390 | python-versions = "*" 391 | 392 | [package.dependencies] 393 | bcrypt = ">=3.1.3" 394 | cryptography = ">=2.5" 395 | gssapi = {version = ">=1.4.1", optional = true, markers = "platform_system != \"Windows\" and extra == \"gssapi\""} 396 | pyasn1 = {version = ">=0.1.7", optional = true, markers = "extra == \"gssapi\""} 397 | pynacl = ">=1.0.1" 398 | pywin32 = {version = ">=2.1.8", optional = true, markers = "platform_system == \"Windows\" and extra == \"gssapi\""} 399 | six = "*" 400 | 401 | [package.extras] 402 | all = ["pyasn1 (>=0.1.7)", "pynacl (>=1.0.1)", "bcrypt (>=3.1.3)", "invoke (>=1.3)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"] 403 | ed25519 = ["pynacl (>=1.0.1)", "bcrypt (>=3.1.3)"] 404 | gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"] 405 | invoke = ["invoke (>=1.3)"] 406 | 407 | [[package]] 408 | name = "pathspec" 409 | version = "0.9.0" 410 | description = "Utility library for gitignore style pattern matching of file paths." 411 | category = "dev" 412 | optional = false 413 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 414 | 415 | [[package]] 416 | name = "platformdirs" 417 | version = "2.4.0" 418 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 419 | category = "main" 420 | optional = false 421 | python-versions = ">=3.6" 422 | 423 | [package.extras] 424 | docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] 425 | test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] 426 | 427 | [[package]] 428 | name = "pluggy" 429 | version = "1.0.0" 430 | description = "plugin and hook calling mechanisms for python" 431 | category = "dev" 432 | optional = false 433 | python-versions = ">=3.6" 434 | 435 | [package.dependencies] 436 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 437 | 438 | [package.extras] 439 | dev = ["pre-commit", "tox"] 440 | testing = ["pytest", "pytest-benchmark"] 441 | 442 | [[package]] 443 | name = "pre-commit" 444 | version = "2.16.0" 445 | description = "A framework for managing and maintaining multi-language pre-commit hooks." 446 | category = "main" 447 | optional = false 448 | python-versions = ">=3.6.1" 449 | 450 | [package.dependencies] 451 | cfgv = ">=2.0.0" 452 | identify = ">=1.0.0" 453 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 454 | nodeenv = ">=0.11.1" 455 | pyyaml = ">=5.1" 456 | toml = "*" 457 | virtualenv = ">=20.0.8" 458 | 459 | [[package]] 460 | name = "py" 461 | version = "1.10.0" 462 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 463 | category = "dev" 464 | optional = false 465 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 466 | 467 | [[package]] 468 | name = "pyasn1" 469 | version = "0.4.8" 470 | description = "ASN.1 types and codecs" 471 | category = "main" 472 | optional = false 473 | python-versions = "*" 474 | 475 | [[package]] 476 | name = "pycodestyle" 477 | version = "2.8.0" 478 | description = "Python style guide checker" 479 | category = "dev" 480 | optional = false 481 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 482 | 483 | [[package]] 484 | name = "pycparser" 485 | version = "2.20" 486 | description = "C parser in Python" 487 | category = "main" 488 | optional = false 489 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 490 | 491 | [[package]] 492 | name = "pyflakes" 493 | version = "2.4.0" 494 | description = "passive checker of Python programs" 495 | category = "dev" 496 | optional = false 497 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 498 | 499 | [[package]] 500 | name = "pygments" 501 | version = "2.10.0" 502 | description = "Pygments is a syntax highlighting package written in Python." 503 | category = "main" 504 | optional = false 505 | python-versions = ">=3.5" 506 | 507 | [[package]] 508 | name = "pynacl" 509 | version = "1.4.0" 510 | description = "Python binding to the Networking and Cryptography (NaCl) library" 511 | category = "main" 512 | optional = false 513 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 514 | 515 | [package.dependencies] 516 | cffi = ">=1.4.1" 517 | six = "*" 518 | 519 | [package.extras] 520 | docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] 521 | tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"] 522 | 523 | [[package]] 524 | name = "pyparsing" 525 | version = "3.0.3" 526 | description = "Python parsing module" 527 | category = "dev" 528 | optional = false 529 | python-versions = ">=3.6" 530 | 531 | [package.extras] 532 | diagrams = ["jinja2", "railroad-diagrams"] 533 | 534 | [[package]] 535 | name = "pytest" 536 | version = "6.2.5" 537 | description = "pytest: simple powerful testing with Python" 538 | category = "dev" 539 | optional = false 540 | python-versions = ">=3.6" 541 | 542 | [package.dependencies] 543 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} 544 | attrs = ">=19.2.0" 545 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 546 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 547 | iniconfig = "*" 548 | packaging = "*" 549 | pluggy = ">=0.12,<2.0" 550 | py = ">=1.8.2" 551 | toml = "*" 552 | 553 | [package.extras] 554 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] 555 | 556 | [[package]] 557 | name = "pytest-cov" 558 | version = "3.0.0" 559 | description = "Pytest plugin for measuring coverage." 560 | category = "dev" 561 | optional = false 562 | python-versions = ">=3.6" 563 | 564 | [package.dependencies] 565 | coverage = {version = ">=5.2.1", extras = ["toml"]} 566 | pytest = ">=4.6" 567 | 568 | [package.extras] 569 | testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] 570 | 571 | [[package]] 572 | name = "python-dateutil" 573 | version = "2.8.2" 574 | description = "Extensions to the standard Python datetime module" 575 | category = "dev" 576 | optional = false 577 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" 578 | 579 | [package.dependencies] 580 | six = ">=1.5" 581 | 582 | [[package]] 583 | name = "python-digitalocean" 584 | version = "1.17.0" 585 | description = "digitalocean.com API to manage Droplets and Images" 586 | category = "main" 587 | optional = false 588 | python-versions = "*" 589 | 590 | [package.dependencies] 591 | jsonpickle = "*" 592 | requests = "*" 593 | 594 | [[package]] 595 | name = "pyvultr" 596 | version = "0.1.5" 597 | description = "Python library for Vultr API" 598 | category = "main" 599 | optional = false 600 | python-versions = ">=3.6.2,<4.0.0" 601 | 602 | [package.dependencies] 603 | dacite = ">=1.6.0,<2.0.0" 604 | fire = ">=0.4.0,<0.5.0" 605 | Pygments = ">=2.10.0,<3.0.0" 606 | requests = ">=2.26.0,<3.0.0" 607 | 608 | [[package]] 609 | name = "pywin32" 610 | version = "302" 611 | description = "Python for Window Extensions" 612 | category = "main" 613 | optional = false 614 | python-versions = "*" 615 | 616 | [[package]] 617 | name = "pyyaml" 618 | version = "6.0" 619 | description = "YAML parser and emitter for Python" 620 | category = "main" 621 | optional = false 622 | python-versions = ">=3.6" 623 | 624 | [[package]] 625 | name = "regex" 626 | version = "2021.10.23" 627 | description = "Alternative regular expression module, to replace re." 628 | category = "dev" 629 | optional = false 630 | python-versions = "*" 631 | 632 | [[package]] 633 | name = "requests" 634 | version = "2.26.0" 635 | description = "Python HTTP for Humans." 636 | category = "main" 637 | optional = false 638 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" 639 | 640 | [package.dependencies] 641 | certifi = ">=2017.4.17" 642 | charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} 643 | idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} 644 | urllib3 = ">=1.21.1,<1.27" 645 | 646 | [package.extras] 647 | socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] 648 | use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] 649 | 650 | [[package]] 651 | name = "six" 652 | version = "1.16.0" 653 | description = "Python 2 and 3 compatibility utilities" 654 | category = "main" 655 | optional = false 656 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 657 | 658 | [[package]] 659 | name = "termcolor" 660 | version = "1.1.0" 661 | description = "ANSII Color formatting for output in terminal." 662 | category = "main" 663 | optional = false 664 | python-versions = "*" 665 | 666 | [[package]] 667 | name = "toml" 668 | version = "0.10.2" 669 | description = "Python Library for Tom's Obvious, Minimal Language" 670 | category = "main" 671 | optional = false 672 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 673 | 674 | [[package]] 675 | name = "tomli" 676 | version = "1.2.1" 677 | description = "A lil' TOML parser" 678 | category = "dev" 679 | optional = false 680 | python-versions = ">=3.6" 681 | 682 | [[package]] 683 | name = "typed-ast" 684 | version = "1.4.3" 685 | description = "a fork of Python 2 and 3 ast modules with type comment support" 686 | category = "dev" 687 | optional = false 688 | python-versions = "*" 689 | 690 | [[package]] 691 | name = "typing-extensions" 692 | version = "3.10.0.2" 693 | description = "Backported and Experimental Type Hints for Python 3.5+" 694 | category = "main" 695 | optional = false 696 | python-versions = "*" 697 | 698 | [[package]] 699 | name = "urllib3" 700 | version = "1.26.7" 701 | description = "HTTP library with thread-safe connection pooling, file post, and more." 702 | category = "main" 703 | optional = false 704 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 705 | 706 | [package.extras] 707 | brotli = ["brotlipy (>=0.6.0)"] 708 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] 709 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 710 | 711 | [[package]] 712 | name = "virtualenv" 713 | version = "20.10.0" 714 | description = "Virtual Python Environment builder" 715 | category = "main" 716 | optional = false 717 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 718 | 719 | [package.dependencies] 720 | "backports.entry-points-selectable" = ">=1.0.4" 721 | distlib = ">=0.3.1,<1" 722 | filelock = ">=3.2,<4" 723 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 724 | platformdirs = ">=2,<3" 725 | six = ">=1.9.0,<2" 726 | 727 | [package.extras] 728 | docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] 729 | testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] 730 | 731 | [[package]] 732 | name = "zipp" 733 | version = "3.6.0" 734 | description = "Backport of pathlib-compatible object wrapper for zip files" 735 | category = "main" 736 | optional = false 737 | python-versions = ">=3.6" 738 | 739 | [package.extras] 740 | docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] 741 | testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] 742 | 743 | [metadata] 744 | lock-version = "1.1" 745 | python-versions = "^3.7" 746 | content-hash = "1d4a62a8eb75b68412e82c0196230b62856d9835f3208a1456eed0991a9c24f3" 747 | 748 | [metadata.files] 749 | atomicwrites = [ 750 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, 751 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, 752 | ] 753 | attrs = [ 754 | {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, 755 | {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, 756 | ] 757 | "backports.entry-points-selectable" = [ 758 | {file = "backports.entry_points_selectable-1.1.1-py2.py3-none-any.whl", hash = "sha256:7fceed9532a7aa2bd888654a7314f864a3c16a4e710b34a58cfc0f08114c663b"}, 759 | {file = "backports.entry_points_selectable-1.1.1.tar.gz", hash = "sha256:914b21a479fde881635f7af5adc7f6e38d6b274be32269070c53b698c60d5386"}, 760 | ] 761 | bcrypt = [ 762 | {file = "bcrypt-3.2.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b589229207630484aefe5899122fb938a5b017b0f4349f769b8c13e78d99a8fd"}, 763 | {file = "bcrypt-3.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c95d4cbebffafcdd28bd28bb4e25b31c50f6da605c81ffd9ad8a3d1b2ab7b1b6"}, 764 | {file = "bcrypt-3.2.0-cp36-abi3-manylinux1_x86_64.whl", hash = "sha256:63d4e3ff96188e5898779b6057878fecf3f11cfe6ec3b313ea09955d587ec7a7"}, 765 | {file = "bcrypt-3.2.0-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:cd1ea2ff3038509ea95f687256c46b79f5fc382ad0aa3664d200047546d511d1"}, 766 | {file = "bcrypt-3.2.0-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:cdcdcb3972027f83fe24a48b1e90ea4b584d35f1cc279d76de6fc4b13376239d"}, 767 | {file = "bcrypt-3.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a0584a92329210fcd75eb8a3250c5a941633f8bfaf2a18f81009b097732839b7"}, 768 | {file = "bcrypt-3.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:56e5da069a76470679f312a7d3d23deb3ac4519991a0361abc11da837087b61d"}, 769 | {file = "bcrypt-3.2.0-cp36-abi3-win32.whl", hash = "sha256:a67fb841b35c28a59cebed05fbd3e80eea26e6d75851f0574a9273c80f3e9b55"}, 770 | {file = "bcrypt-3.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:81fec756feff5b6818ea7ab031205e1d323d8943d237303baca2c5f9c7846f34"}, 771 | {file = "bcrypt-3.2.0.tar.gz", hash = "sha256:5b93c1726e50a93a033c36e5ca7fdcd29a5c7395af50a6892f5d9e7c6cfbfb29"}, 772 | ] 773 | black = [ 774 | {file = "black-21.9b0-py3-none-any.whl", hash = "sha256:380f1b5da05e5a1429225676655dddb96f5ae8c75bdf91e53d798871b902a115"}, 775 | {file = "black-21.9b0.tar.gz", hash = "sha256:7de4cfc7eb6b710de325712d40125689101d21d25283eed7e9998722cf10eb91"}, 776 | ] 777 | bump2version = [ 778 | {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, 779 | {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"}, 780 | ] 781 | bumpversion = [ 782 | {file = "bumpversion-0.6.0-py2.py3-none-any.whl", hash = "sha256:4eb3267a38194d09f048a2179980bb4803701969bff2c85fa8f6d1ce050be15e"}, 783 | {file = "bumpversion-0.6.0.tar.gz", hash = "sha256:4ba55e4080d373f80177b4dabef146c07ce73c7d1377aabf9d3c3ae1f94584a6"}, 784 | ] 785 | certifi = [ 786 | {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, 787 | {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, 788 | ] 789 | cffi = [ 790 | {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, 791 | {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, 792 | {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, 793 | {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, 794 | {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, 795 | {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, 796 | {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, 797 | {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, 798 | {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, 799 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, 800 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, 801 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, 802 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, 803 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, 804 | {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, 805 | {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, 806 | {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, 807 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, 808 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, 809 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, 810 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, 811 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, 812 | {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, 813 | {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, 814 | {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, 815 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, 816 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, 817 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, 818 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, 819 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, 820 | {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, 821 | {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, 822 | {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, 823 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, 824 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, 825 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, 826 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, 827 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, 828 | {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, 829 | {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, 830 | {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, 831 | {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, 832 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, 833 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, 834 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, 835 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, 836 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, 837 | {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, 838 | {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, 839 | {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, 840 | ] 841 | cfgv = [ 842 | {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, 843 | {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, 844 | ] 845 | charset-normalizer = [ 846 | {file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"}, 847 | {file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"}, 848 | ] 849 | click = [ 850 | {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, 851 | {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, 852 | ] 853 | colorama = [ 854 | {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, 855 | {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, 856 | ] 857 | coverage = [ 858 | {file = "coverage-6.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1d5aa2703e1dab4ae6cf416eb0095304f49d004c39e9db1d86f57924f43006b"}, 859 | {file = "coverage-6.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ce1b258493cbf8aec43e9b50d89982346b98e9ffdfaae8ae5793bc112fb0068"}, 860 | {file = "coverage-6.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c4e737f60c6936460c5be330d296dd5b48b3963f48634c53b3f7deb0f34ec4"}, 861 | {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84e65ef149028516c6d64461b95a8dbcfce95cfd5b9eb634320596173332ea84"}, 862 | {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f69718750eaae75efe506406c490d6fc5a6161d047206cc63ce25527e8a3adad"}, 863 | {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e57816f8ffe46b1df8f12e1b348f06d164fd5219beba7d9433ba79608ef011cc"}, 864 | {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:01c5615d13f3dd3aa8543afc069e5319cfa0c7d712f6e04b920431e5c564a749"}, 865 | {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ab269400706fab15981fd4bd5080c56bd5cc07c3bccb86aab5e1d5a88dc8f4"}, 866 | {file = "coverage-6.4.1-cp310-cp310-win32.whl", hash = "sha256:a7f3049243783df2e6cc6deafc49ea123522b59f464831476d3d1448e30d72df"}, 867 | {file = "coverage-6.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ee2ddcac99b2d2aec413e36d7a429ae9ebcadf912946b13ffa88e7d4c9b712d6"}, 868 | {file = "coverage-6.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb73e0011b8793c053bfa85e53129ba5f0250fdc0392c1591fd35d915ec75c46"}, 869 | {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:106c16dfe494de3193ec55cac9640dd039b66e196e4641fa8ac396181578b982"}, 870 | {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87f4f3df85aa39da00fd3ec4b5abeb7407e82b68c7c5ad181308b0e2526da5d4"}, 871 | {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:961e2fb0680b4f5ad63234e0bf55dfb90d302740ae9c7ed0120677a94a1590cb"}, 872 | {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cec3a0f75c8f1031825e19cd86ee787e87cf03e4fd2865c79c057092e69e3a3b"}, 873 | {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:129cd05ba6f0d08a766d942a9ed4b29283aff7b2cccf5b7ce279d50796860bb3"}, 874 | {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bf5601c33213d3cb19d17a796f8a14a9eaa5e87629a53979a5981e3e3ae166f6"}, 875 | {file = "coverage-6.4.1-cp37-cp37m-win32.whl", hash = "sha256:269eaa2c20a13a5bf17558d4dc91a8d078c4fa1872f25303dddcbba3a813085e"}, 876 | {file = "coverage-6.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f02cbbf8119db68455b9d763f2f8737bb7db7e43720afa07d8eb1604e5c5ae28"}, 877 | {file = "coverage-6.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ffa9297c3a453fba4717d06df579af42ab9a28022444cae7fa605af4df612d54"}, 878 | {file = "coverage-6.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:145f296d00441ca703a659e8f3eb48ae39fb083baba2d7ce4482fb2723e050d9"}, 879 | {file = "coverage-6.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d44996140af8b84284e5e7d398e589574b376fb4de8ccd28d82ad8e3bea13"}, 880 | {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2bd9a6fc18aab8d2e18f89b7ff91c0f34ff4d5e0ba0b33e989b3cd4194c81fd9"}, 881 | {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3384f2a3652cef289e38100f2d037956194a837221edd520a7ee5b42d00cc605"}, 882 | {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b3e07152b4563722be523e8cd0b209e0d1a373022cfbde395ebb6575bf6790d"}, 883 | {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1480ff858b4113db2718848d7b2d1b75bc79895a9c22e76a221b9d8d62496428"}, 884 | {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:865d69ae811a392f4d06bde506d531f6a28a00af36f5c8649684a9e5e4a85c83"}, 885 | {file = "coverage-6.4.1-cp38-cp38-win32.whl", hash = "sha256:664a47ce62fe4bef9e2d2c430306e1428ecea207ffd68649e3b942fa8ea83b0b"}, 886 | {file = "coverage-6.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:26dff09fb0d82693ba9e6231248641d60ba606150d02ed45110f9ec26404ed1c"}, 887 | {file = "coverage-6.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9c80df769f5ec05ad21ea34be7458d1dc51ff1fb4b2219e77fe24edf462d6df"}, 888 | {file = "coverage-6.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:39ee53946bf009788108b4dd2894bf1349b4e0ca18c2016ffa7d26ce46b8f10d"}, 889 | {file = "coverage-6.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5b66caa62922531059bc5ac04f836860412f7f88d38a476eda0a6f11d4724f4"}, 890 | {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd180ed867e289964404051a958f7cccabdeed423f91a899829264bb7974d3d3"}, 891 | {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84631e81dd053e8a0d4967cedab6db94345f1c36107c71698f746cb2636c63e3"}, 892 | {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8c08da0bd238f2970230c2a0d28ff0e99961598cb2e810245d7fc5afcf1254e8"}, 893 | {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d42c549a8f41dc103a8004b9f0c433e2086add8a719da00e246e17cbe4056f72"}, 894 | {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:309ce4a522ed5fca432af4ebe0f32b21d6d7ccbb0f5fcc99290e71feba67c264"}, 895 | {file = "coverage-6.4.1-cp39-cp39-win32.whl", hash = "sha256:fdb6f7bd51c2d1714cea40718f6149ad9be6a2ee7d93b19e9f00934c0f2a74d9"}, 896 | {file = "coverage-6.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:342d4aefd1c3e7f620a13f4fe563154d808b69cccef415415aece4c786665397"}, 897 | {file = "coverage-6.4.1-pp36.pp37.pp38-none-any.whl", hash = "sha256:4803e7ccf93230accb928f3a68f00ffa80a88213af98ed338a57ad021ef06815"}, 898 | {file = "coverage-6.4.1.tar.gz", hash = "sha256:4321f075095a096e70aff1d002030ee612b65a205a0a0f5b815280d5dc58100c"}, 899 | ] 900 | cryptography = [ 901 | {file = "cryptography-35.0.0-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:d57e0cdc1b44b6cdf8af1d01807db06886f10177469312fbde8f44ccbb284bc9"}, 902 | {file = "cryptography-35.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:ced40344e811d6abba00295ced98c01aecf0c2de39481792d87af4fa58b7b4d6"}, 903 | {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:54b2605e5475944e2213258e0ab8696f4f357a31371e538ef21e8d61c843c28d"}, 904 | {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7b7ceeff114c31f285528ba8b390d3e9cfa2da17b56f11d366769a807f17cbaa"}, 905 | {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d69645f535f4b2c722cfb07a8eab916265545b3475fdb34e0be2f4ee8b0b15e"}, 906 | {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2d0e0acc20ede0f06ef7aa58546eee96d2592c00f450c9acb89c5879b61992"}, 907 | {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:07bb7fbfb5de0980590ddfc7f13081520def06dc9ed214000ad4372fb4e3c7f6"}, 908 | {file = "cryptography-35.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7eba2cebca600a7806b893cb1d541a6e910afa87e97acf2021a22b32da1df52d"}, 909 | {file = "cryptography-35.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:18d90f4711bf63e2fb21e8c8e51ed8189438e6b35a6d996201ebd98a26abbbe6"}, 910 | {file = "cryptography-35.0.0-cp36-abi3-win32.whl", hash = "sha256:c10c797ac89c746e488d2ee92bd4abd593615694ee17b2500578b63cad6b93a8"}, 911 | {file = "cryptography-35.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:7075b304cd567694dc692ffc9747f3e9cb393cc4aa4fb7b9f3abd6f5c4e43588"}, 912 | {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a688ebcd08250eab5bb5bca318cc05a8c66de5e4171a65ca51db6bd753ff8953"}, 913 | {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d99915d6ab265c22873f1b4d6ea5ef462ef797b4140be4c9d8b179915e0985c6"}, 914 | {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:928185a6d1ccdb816e883f56ebe92e975a262d31cc536429041921f8cb5a62fd"}, 915 | {file = "cryptography-35.0.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ebeddd119f526bcf323a89f853afb12e225902a24d29b55fe18dd6fcb2838a76"}, 916 | {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22a38e96118a4ce3b97509443feace1d1011d0571fae81fc3ad35f25ba3ea999"}, 917 | {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb80e8a1f91e4b7ef8b33041591e6d89b2b8e122d787e87eeb2b08da71bb16ad"}, 918 | {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:abb5a361d2585bb95012a19ed9b2c8f412c5d723a9836418fab7aaa0243e67d2"}, 919 | {file = "cryptography-35.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1ed82abf16df40a60942a8c211251ae72858b25b7421ce2497c2eb7a1cee817c"}, 920 | {file = "cryptography-35.0.0.tar.gz", hash = "sha256:9933f28f70d0517686bd7de36166dda42094eac49415459d9bdf5e7df3e0086d"}, 921 | ] 922 | dacite = [ 923 | {file = "dacite-1.6.0-py3-none-any.whl", hash = "sha256:4331535f7aabb505c732fa4c3c094313fc0a1d5ea19907bf4726a7819a68b93f"}, 924 | {file = "dacite-1.6.0.tar.gz", hash = "sha256:d48125ed0a0352d3de9f493bf980038088f45f3f9d7498f090b50a847daaa6df"}, 925 | ] 926 | decorator = [ 927 | {file = "decorator-5.1.0-py3-none-any.whl", hash = "sha256:7b12e7c3c6ab203a29e157335e9122cb03de9ab7264b137594103fd4a683b374"}, 928 | {file = "decorator-5.1.0.tar.gz", hash = "sha256:e59913af105b9860aa2c8d3272d9de5a56a4e608db9a2f167a8480b323d529a7"}, 929 | ] 930 | distlib = [ 931 | {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, 932 | {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, 933 | ] 934 | filelock = [ 935 | {file = "filelock-3.4.0-py3-none-any.whl", hash = "sha256:2e139a228bcf56dd8b2274a65174d005c4a6b68540ee0bdbb92c76f43f29f7e8"}, 936 | {file = "filelock-3.4.0.tar.gz", hash = "sha256:93d512b32a23baf4cac44ffd72ccf70732aeff7b8050fcaf6d3ec406d954baf4"}, 937 | ] 938 | fire = [ 939 | {file = "fire-0.4.0.tar.gz", hash = "sha256:c5e2b8763699d1142393a46d0e3e790c5eb2f0706082df8f647878842c216a62"}, 940 | ] 941 | flake8 = [ 942 | {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, 943 | {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, 944 | ] 945 | freezegun = [ 946 | {file = "freezegun-1.1.0-py2.py3-none-any.whl", hash = "sha256:2ae695f7eb96c62529f03a038461afe3c692db3465e215355e1bb4b0ab408712"}, 947 | {file = "freezegun-1.1.0.tar.gz", hash = "sha256:177f9dd59861d871e27a484c3332f35a6e3f5d14626f2bf91be37891f18927f3"}, 948 | ] 949 | gssapi = [ 950 | {file = "gssapi-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c02a563d7e8b4005ccfc1f6080eaf0805c052876397ea9fb03b7ee725bbbbccc"}, 951 | {file = "gssapi-1.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3d07a74a57cf0df22d0db9452cf500d487736e91cbd21aefffefa3d53e8d8ca2"}, 952 | {file = "gssapi-1.7.2-cp310-cp310-win32.whl", hash = "sha256:14bde0adcb6fd435021292899f8111396e54c039b47f107e97c40608ee451b84"}, 953 | {file = "gssapi-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:fcde5311a574c2ac314dddaf9608310bd7a4eae3b361ab6374c552428534b51b"}, 954 | {file = "gssapi-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2b4b979c075062654a64aeb6faf579f3070d6e52a9c94b10e77eaea0ef88795d"}, 955 | {file = "gssapi-1.7.2-cp36-cp36m-win32.whl", hash = "sha256:b2b57d01debcaa79b91a9c7e40ccc200cbcae9afe48f15cc54901ddf733b4d91"}, 956 | {file = "gssapi-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:0b02bbdd850c079b1d453546579fc283f0646f56ff4b39cd3b0e27263a6af97e"}, 957 | {file = "gssapi-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:145bee55bff2461d2704b958bb6f45f6abf11442efe0f2e7a612f0ab049613f2"}, 958 | {file = "gssapi-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:b199f59450c4cdc85d7d4f0f35f2b7b7420e536309d0370a5ea0734d56d06e38"}, 959 | {file = "gssapi-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:20b5365557684856c9b20160b26eaba664afd92a7c098b4bc0c0b2aaf7e5a31c"}, 960 | {file = "gssapi-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6ad1df7dd638485402c5b9afef5b7355d855266a09e2f7d55e5f0c8ee0f6a2e7"}, 961 | {file = "gssapi-1.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:65b2820fd547398c55c69ab6884245b268ea56702f6411149a3be6e520e3efd5"}, 962 | {file = "gssapi-1.7.2-cp38-cp38-win32.whl", hash = "sha256:5231e3d5d299cefe535a854b7e8d4700775704e75fa33fc3aba1cf83a3e2e7a0"}, 963 | {file = "gssapi-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:33aa276927a82ec630bf580fdd187399062568e699901c33ff8191d3ae9daa4f"}, 964 | {file = "gssapi-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe5e95c88fd737117374223683e319b0d80024f7d176822f14a12be0663cc3d7"}, 965 | {file = "gssapi-1.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7123301f368f4198c46c62ee791878e7174fe7eed05c5d602708e0c5ec6774db"}, 966 | {file = "gssapi-1.7.2-cp39-cp39-win32.whl", hash = "sha256:69983699e9ef39fb7f84b02039d739362fb39959285bb5fdd0efcc19943aa5b3"}, 967 | {file = "gssapi-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:a01dbcd17760cdd77701006105e00a1bd213bd6e189b6602242f175ab488afee"}, 968 | {file = "gssapi-1.7.2.tar.gz", hash = "sha256:748efbcf7cfb31183cd75e5314493e79fe3521b3ec00d090a77e23f7c75fa59d"}, 969 | ] 970 | identify = [ 971 | {file = "identify-2.4.0-py2.py3-none-any.whl", hash = "sha256:eba31ca80258de6bb51453084bff4a923187cd2193b9c13710f2516ab30732cc"}, 972 | {file = "identify-2.4.0.tar.gz", hash = "sha256:a33ae873287e81651c7800ca309dc1f84679b763c9c8b30680e16fbfa82f0107"}, 973 | ] 974 | idna = [ 975 | {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, 976 | {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, 977 | ] 978 | importlib-metadata = [ 979 | {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, 980 | {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, 981 | ] 982 | iniconfig = [ 983 | {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, 984 | {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, 985 | ] 986 | jsonpickle = [ 987 | {file = "jsonpickle-2.0.0-py2.py3-none-any.whl", hash = "sha256:c1010994c1fbda87a48f8a56698605b598cb0fc6bb7e7927559fc1100e69aeac"}, 988 | {file = "jsonpickle-2.0.0.tar.gz", hash = "sha256:0be49cba80ea6f87a168aa8168d717d00c6ca07ba83df3cec32d3b30bfe6fb9a"}, 989 | ] 990 | linode-api4 = [ 991 | {file = "linode_api4-5.2.1-py3-none-any.whl", hash = "sha256:9e083abe1568a5162b83082240b654cf70859fba97e710eef043567a1b749d11"}, 992 | {file = "linode_api4-5.2.1.tar.gz", hash = "sha256:899bc9946cf4f99b72bd8a9db138db0b1ba63200c6615128ef367655c733e7cd"}, 993 | ] 994 | mccabe = [ 995 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 996 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 997 | ] 998 | mypy-extensions = [ 999 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, 1000 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, 1001 | ] 1002 | nodeenv = [ 1003 | {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"}, 1004 | {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, 1005 | ] 1006 | packaging = [ 1007 | {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"}, 1008 | {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"}, 1009 | ] 1010 | paramiko = [ 1011 | {file = "paramiko-2.10.1-py2.py3-none-any.whl", hash = "sha256:f6cbd3e1204abfdbcd40b3ecbc9d32f04027cd3080fe666245e21e7540ccfc1b"}, 1012 | {file = "paramiko-2.10.1.tar.gz", hash = "sha256:443f4da23ec24e9a9c0ea54017829c282abdda1d57110bf229360775ccd27a31"}, 1013 | ] 1014 | pathspec = [ 1015 | {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, 1016 | {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, 1017 | ] 1018 | platformdirs = [ 1019 | {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, 1020 | {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, 1021 | ] 1022 | pluggy = [ 1023 | {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, 1024 | {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, 1025 | ] 1026 | pre-commit = [ 1027 | {file = "pre_commit-2.16.0-py2.py3-none-any.whl", hash = "sha256:758d1dc9b62c2ed8881585c254976d66eae0889919ab9b859064fc2fe3c7743e"}, 1028 | {file = "pre_commit-2.16.0.tar.gz", hash = "sha256:fe9897cac830aa7164dbd02a4e7b90cae49630451ce88464bca73db486ba9f65"}, 1029 | ] 1030 | py = [ 1031 | {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, 1032 | {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, 1033 | ] 1034 | pyasn1 = [ 1035 | {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"}, 1036 | {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"}, 1037 | {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"}, 1038 | {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"}, 1039 | {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, 1040 | {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"}, 1041 | {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"}, 1042 | {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"}, 1043 | {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"}, 1044 | {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"}, 1045 | {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"}, 1046 | {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"}, 1047 | {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, 1048 | ] 1049 | pycodestyle = [ 1050 | {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, 1051 | {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, 1052 | ] 1053 | pycparser = [ 1054 | {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, 1055 | {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, 1056 | ] 1057 | pyflakes = [ 1058 | {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, 1059 | {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, 1060 | ] 1061 | pygments = [ 1062 | {file = "Pygments-2.10.0-py3-none-any.whl", hash = "sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380"}, 1063 | {file = "Pygments-2.10.0.tar.gz", hash = "sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"}, 1064 | ] 1065 | pynacl = [ 1066 | {file = "PyNaCl-1.4.0-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:ea6841bc3a76fa4942ce00f3bda7d436fda21e2d91602b9e21b7ca9ecab8f3ff"}, 1067 | {file = "PyNaCl-1.4.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:d452a6746f0a7e11121e64625109bc4468fc3100452817001dbe018bb8b08514"}, 1068 | {file = "PyNaCl-1.4.0-cp27-cp27m-win32.whl", hash = "sha256:2fe0fc5a2480361dcaf4e6e7cea00e078fcda07ba45f811b167e3f99e8cff574"}, 1069 | {file = "PyNaCl-1.4.0-cp27-cp27m-win_amd64.whl", hash = "sha256:f8851ab9041756003119368c1e6cd0b9c631f46d686b3904b18c0139f4419f80"}, 1070 | {file = "PyNaCl-1.4.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:7757ae33dae81c300487591c68790dfb5145c7d03324000433d9a2c141f82af7"}, 1071 | {file = "PyNaCl-1.4.0-cp35-abi3-macosx_10_10_x86_64.whl", hash = "sha256:757250ddb3bff1eecd7e41e65f7f833a8405fede0194319f87899690624f2122"}, 1072 | {file = "PyNaCl-1.4.0-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:30f9b96db44e09b3304f9ea95079b1b7316b2b4f3744fe3aaecccd95d547063d"}, 1073 | {file = "PyNaCl-1.4.0-cp35-abi3-win32.whl", hash = "sha256:4e10569f8cbed81cb7526ae137049759d2a8d57726d52c1a000a3ce366779634"}, 1074 | {file = "PyNaCl-1.4.0-cp35-abi3-win_amd64.whl", hash = "sha256:c914f78da4953b33d4685e3cdc7ce63401247a21425c16a39760e282075ac4a6"}, 1075 | {file = "PyNaCl-1.4.0-cp35-cp35m-win32.whl", hash = "sha256:06cbb4d9b2c4bd3c8dc0d267416aaed79906e7b33f114ddbf0911969794b1cc4"}, 1076 | {file = "PyNaCl-1.4.0-cp35-cp35m-win_amd64.whl", hash = "sha256:511d269ee845037b95c9781aa702f90ccc36036f95d0f31373a6a79bd8242e25"}, 1077 | {file = "PyNaCl-1.4.0-cp36-cp36m-win32.whl", hash = "sha256:11335f09060af52c97137d4ac54285bcb7df0cef29014a1a4efe64ac065434c4"}, 1078 | {file = "PyNaCl-1.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:cd401ccbc2a249a47a3a1724c2918fcd04be1f7b54eb2a5a71ff915db0ac51c6"}, 1079 | {file = "PyNaCl-1.4.0-cp37-cp37m-win32.whl", hash = "sha256:8122ba5f2a2169ca5da936b2e5a511740ffb73979381b4229d9188f6dcb22f1f"}, 1080 | {file = "PyNaCl-1.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:537a7ccbea22905a0ab36ea58577b39d1fa9b1884869d173b5cf111f006f689f"}, 1081 | {file = "PyNaCl-1.4.0-cp38-cp38-win32.whl", hash = "sha256:9c4a7ea4fb81536c1b1f5cc44d54a296f96ae78c1ebd2311bd0b60be45a48d96"}, 1082 | {file = "PyNaCl-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:7c6092102219f59ff29788860ccb021e80fffd953920c4a8653889c029b2d420"}, 1083 | {file = "PyNaCl-1.4.0.tar.gz", hash = "sha256:54e9a2c849c742006516ad56a88f5c74bf2ce92c9f67435187c3c5953b346505"}, 1084 | ] 1085 | pyparsing = [ 1086 | {file = "pyparsing-3.0.3-py3-none-any.whl", hash = "sha256:f8d3fe9fc404576c5164f0f0c4e382c96b85265e023c409c43d48f65da9d60d0"}, 1087 | {file = "pyparsing-3.0.3.tar.gz", hash = "sha256:9e3511118010f112a4b4b435ae50e1eaa610cda191acb9e421d60cf5fde83455"}, 1088 | ] 1089 | pytest = [ 1090 | {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, 1091 | {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, 1092 | ] 1093 | pytest-cov = [ 1094 | {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, 1095 | {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, 1096 | ] 1097 | python-dateutil = [ 1098 | {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, 1099 | {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, 1100 | ] 1101 | python-digitalocean = [ 1102 | {file = "python-digitalocean-1.17.0.tar.gz", hash = "sha256:107854fde1aafa21774e8053cf253b04173613c94531f75d5a039ad770562b24"}, 1103 | {file = "python_digitalocean-1.17.0-py3-none-any.whl", hash = "sha256:0032168e022e85fca314eb3f8dfaabf82087f2ed40839eb28f1eeeeca5afb1fa"}, 1104 | ] 1105 | pyvultr = [ 1106 | {file = "pyvultr-0.1.5-py3-none-any.whl", hash = "sha256:0d4dda9cf9e0fd2f8293c8bde97a3f88628e2950ac65947b97d82fc4050c6525"}, 1107 | {file = "pyvultr-0.1.5.tar.gz", hash = "sha256:4954904650bad58c6b48ae0d5015fe4f7d41e79be943d0b8526ccda9d48efcc5"}, 1108 | ] 1109 | pywin32 = [ 1110 | {file = "pywin32-302-cp310-cp310-win32.whl", hash = "sha256:251b7a9367355ccd1a4cd69cd8dd24bd57b29ad83edb2957cfa30f7ed9941efa"}, 1111 | {file = "pywin32-302-cp310-cp310-win_amd64.whl", hash = "sha256:79cf7e6ddaaf1cd47a9e50cc74b5d770801a9db6594464137b1b86aa91edafcc"}, 1112 | {file = "pywin32-302-cp36-cp36m-win32.whl", hash = "sha256:fe21c2fb332d03dac29de070f191bdbf14095167f8f2165fdc57db59b1ecc006"}, 1113 | {file = "pywin32-302-cp36-cp36m-win_amd64.whl", hash = "sha256:d3761ab4e8c5c2dbc156e2c9ccf38dd51f936dc77e58deb940ffbc4b82a30528"}, 1114 | {file = "pywin32-302-cp37-cp37m-win32.whl", hash = "sha256:48dd4e348f1ee9538dd4440bf201ea8c110ea6d9f3a5010d79452e9fa80480d9"}, 1115 | {file = "pywin32-302-cp37-cp37m-win_amd64.whl", hash = "sha256:496df89f10c054c9285cc99f9d509e243f4e14ec8dfc6d78c9f0bf147a893ab1"}, 1116 | {file = "pywin32-302-cp38-cp38-win32.whl", hash = "sha256:e372e477d938a49266136bff78279ed14445e00718b6c75543334351bf535259"}, 1117 | {file = "pywin32-302-cp38-cp38-win_amd64.whl", hash = "sha256:543552e66936378bd2d673c5a0a3d9903dba0b0a87235ef0c584f058ceef5872"}, 1118 | {file = "pywin32-302-cp39-cp39-win32.whl", hash = "sha256:2393c1a40dc4497fd6161b76801b8acd727c5610167762b7c3e9fd058ef4a6ab"}, 1119 | {file = "pywin32-302-cp39-cp39-win_amd64.whl", hash = "sha256:af5aea18167a31efcacc9f98a2ca932c6b6a6d91ebe31f007509e293dea12580"}, 1120 | ] 1121 | pyyaml = [ 1122 | {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, 1123 | {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, 1124 | {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, 1125 | {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, 1126 | {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, 1127 | {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, 1128 | {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, 1129 | {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, 1130 | {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, 1131 | {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, 1132 | {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, 1133 | {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, 1134 | {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, 1135 | {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, 1136 | {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, 1137 | {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, 1138 | {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, 1139 | {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, 1140 | {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, 1141 | {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, 1142 | {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, 1143 | {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, 1144 | {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, 1145 | {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, 1146 | {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, 1147 | {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, 1148 | {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, 1149 | {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, 1150 | {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, 1151 | {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, 1152 | {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, 1153 | {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, 1154 | {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, 1155 | ] 1156 | regex = [ 1157 | {file = "regex-2021.10.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:45b65d6a275a478ac2cbd7fdbf7cc93c1982d613de4574b56fd6972ceadb8395"}, 1158 | {file = "regex-2021.10.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74d071dbe4b53c602edd87a7476ab23015a991374ddb228d941929ad7c8c922e"}, 1159 | {file = "regex-2021.10.23-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:34d870f9f27f2161709054d73646fc9aca49480617a65533fc2b4611c518e455"}, 1160 | {file = "regex-2021.10.23-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fb698037c35109d3c2e30f2beb499e5ebae6e4bb8ff2e60c50b9a805a716f79"}, 1161 | {file = "regex-2021.10.23-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb46b542133999580ffb691baf67410306833ee1e4f58ed06b6a7aaf4e046952"}, 1162 | {file = "regex-2021.10.23-cp310-cp310-win32.whl", hash = "sha256:5e9c9e0ce92f27cef79e28e877c6b6988c48b16942258f3bc55d39b5f911df4f"}, 1163 | {file = "regex-2021.10.23-cp310-cp310-win_amd64.whl", hash = "sha256:ab7c5684ff3538b67df3f93d66bd3369b749087871ae3786e70ef39e601345b0"}, 1164 | {file = "regex-2021.10.23-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:de557502c3bec8e634246588a94e82f1ee1b9dfcfdc453267c4fb652ff531570"}, 1165 | {file = "regex-2021.10.23-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee684f139c91e69fe09b8e83d18b4d63bf87d9440c1eb2eeb52ee851883b1b29"}, 1166 | {file = "regex-2021.10.23-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5095a411c8479e715784a0c9236568ae72509450ee2226b649083730f3fadfc6"}, 1167 | {file = "regex-2021.10.23-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b568809dca44cb75c8ebb260844ea98252c8c88396f9d203f5094e50a70355f"}, 1168 | {file = "regex-2021.10.23-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eb672217f7bd640411cfc69756ce721d00ae600814708d35c930930f18e8029f"}, 1169 | {file = "regex-2021.10.23-cp36-cp36m-win32.whl", hash = "sha256:a7a986c45d1099a5de766a15de7bee3840b1e0e1a344430926af08e5297cf666"}, 1170 | {file = "regex-2021.10.23-cp36-cp36m-win_amd64.whl", hash = "sha256:6d7722136c6ed75caf84e1788df36397efdc5dbadab95e59c2bba82d4d808a4c"}, 1171 | {file = "regex-2021.10.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f665677e46c5a4d288ece12fdedf4f4204a422bb28ff05f0e6b08b7447796d1"}, 1172 | {file = "regex-2021.10.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:450dc27483548214314640c89a0f275dbc557968ed088da40bde7ef8fb52829e"}, 1173 | {file = "regex-2021.10.23-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:129472cd06062fb13e7b4670a102951a3e655e9b91634432cfbdb7810af9d710"}, 1174 | {file = "regex-2021.10.23-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a940ca7e7189d23da2bfbb38973832813eab6bd83f3bf89a977668c2f813deae"}, 1175 | {file = "regex-2021.10.23-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:530fc2bbb3dc1ebb17f70f7b234f90a1dd43b1b489ea38cea7be95fb21cdb5c7"}, 1176 | {file = "regex-2021.10.23-cp37-cp37m-win32.whl", hash = "sha256:ded0c4a3eee56b57fcb2315e40812b173cafe79d2f992d50015f4387445737fa"}, 1177 | {file = "regex-2021.10.23-cp37-cp37m-win_amd64.whl", hash = "sha256:391703a2abf8013d95bae39145d26b4e21531ab82e22f26cd3a181ee2644c234"}, 1178 | {file = "regex-2021.10.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be04739a27be55631069b348dda0c81d8ea9822b5da10b8019b789e42d1fe452"}, 1179 | {file = "regex-2021.10.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13ec99df95003f56edcd307db44f06fbeb708c4ccdcf940478067dd62353181e"}, 1180 | {file = "regex-2021.10.23-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8d1cdcda6bd16268316d5db1038965acf948f2a6f43acc2e0b1641ceab443623"}, 1181 | {file = "regex-2021.10.23-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c186691a7995ef1db61205e00545bf161fb7b59cdb8c1201c89b333141c438a"}, 1182 | {file = "regex-2021.10.23-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2b20f544cbbeffe171911f6ce90388ad36fe3fad26b7c7a35d4762817e9ea69c"}, 1183 | {file = "regex-2021.10.23-cp38-cp38-win32.whl", hash = "sha256:c0938ddd60cc04e8f1faf7a14a166ac939aac703745bfcd8e8f20322a7373019"}, 1184 | {file = "regex-2021.10.23-cp38-cp38-win_amd64.whl", hash = "sha256:56f0c81c44638dfd0e2367df1a331b4ddf2e771366c4b9c5d9a473de75e3e1c7"}, 1185 | {file = "regex-2021.10.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:80bb5d2e92b2258188e7dcae5b188c7bf868eafdf800ea6edd0fbfc029984a88"}, 1186 | {file = "regex-2021.10.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1dae12321b31059a1a72aaa0e6ba30156fe7e633355e445451e4021b8e122b6"}, 1187 | {file = "regex-2021.10.23-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1f2b59c28afc53973d22e7bc18428721ee8ca6079becf1b36571c42627321c65"}, 1188 | {file = "regex-2021.10.23-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d134757a37d8640f3c0abb41f5e68b7cf66c644f54ef1cb0573b7ea1c63e1509"}, 1189 | {file = "regex-2021.10.23-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0dcc0e71118be8c69252c207630faf13ca5e1b8583d57012aae191e7d6d28b84"}, 1190 | {file = "regex-2021.10.23-cp39-cp39-win32.whl", hash = "sha256:a30513828180264294953cecd942202dfda64e85195ae36c265daf4052af0464"}, 1191 | {file = "regex-2021.10.23-cp39-cp39-win_amd64.whl", hash = "sha256:0f7552429dd39f70057ac5d0e897e5bfe211629652399a21671e53f2a9693a4e"}, 1192 | {file = "regex-2021.10.23.tar.gz", hash = "sha256:f3f9a91d3cc5e5b0ddf1043c0ae5fa4852f18a1c0050318baf5fc7930ecc1f9c"}, 1193 | ] 1194 | requests = [ 1195 | {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, 1196 | {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, 1197 | ] 1198 | six = [ 1199 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 1200 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 1201 | ] 1202 | termcolor = [ 1203 | {file = "termcolor-1.1.0.tar.gz", hash = "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"}, 1204 | ] 1205 | toml = [ 1206 | {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, 1207 | {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, 1208 | ] 1209 | tomli = [ 1210 | {file = "tomli-1.2.1-py3-none-any.whl", hash = "sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f"}, 1211 | {file = "tomli-1.2.1.tar.gz", hash = "sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442"}, 1212 | ] 1213 | typed-ast = [ 1214 | {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, 1215 | {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, 1216 | {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, 1217 | {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, 1218 | {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, 1219 | {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, 1220 | {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, 1221 | {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, 1222 | {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, 1223 | {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, 1224 | {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, 1225 | {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, 1226 | {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, 1227 | {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, 1228 | {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, 1229 | {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, 1230 | {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, 1231 | {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, 1232 | {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, 1233 | {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, 1234 | {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, 1235 | {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, 1236 | {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, 1237 | {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, 1238 | {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, 1239 | {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, 1240 | {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, 1241 | {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, 1242 | {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, 1243 | {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, 1244 | ] 1245 | typing-extensions = [ 1246 | {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, 1247 | {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, 1248 | {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, 1249 | ] 1250 | urllib3 = [ 1251 | {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, 1252 | {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, 1253 | ] 1254 | virtualenv = [ 1255 | {file = "virtualenv-20.10.0-py2.py3-none-any.whl", hash = "sha256:4b02e52a624336eece99c96e3ab7111f469c24ba226a53ec474e8e787b365814"}, 1256 | {file = "virtualenv-20.10.0.tar.gz", hash = "sha256:576d05b46eace16a9c348085f7d0dc8ef28713a2cabaa1cf0aea41e8f12c9218"}, 1257 | ] 1258 | zipp = [ 1259 | {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, 1260 | {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, 1261 | ] 1262 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["poetry-core>=1.0.0"] 3 | build-backend = "poetry.core.masonry.api" 4 | 5 | [tool.poetry] 6 | name = "lobbyboy" 7 | version = "0.4.0" 8 | description = "Give me a server." 9 | authors = ["laixintao "] 10 | license = "GNU" 11 | 12 | [tool.poetry.scripts] 13 | lobbyboy-server = 'lobbyboy.main:main' 14 | lobbyboy-config-example = 'lobbyboy.scripts:print_example_config' 15 | 16 | [tool.poetry.dependencies] 17 | python = "^3.7" 18 | paramiko = {extras = ["gssapi"], version = "^2.8.0"} 19 | python-digitalocean = "^1.17.0" 20 | toml = "^0.10.2" 21 | linode-api4 = "^5.2.1" 22 | pyvultr = "^0.1.5" 23 | pre-commit = "^2.16.0" 24 | 25 | [tool.poetry.dev-dependencies] 26 | black = "^21.9b0" 27 | bumpversion = "^0.6.0" 28 | pytest = "^6.2.5" 29 | flake8 = "^4.0.1" 30 | freezegun = "^1.1.0" 31 | pytest-cov = "^3.0.0" 32 | 33 | [tool.black] 34 | target-version = ["py37"] 35 | line-length = 120 36 | 37 | [tool.coverage.run] 38 | source = ["lobbyboy"] 39 | 40 | [tool.isort] 41 | profile = "black" 42 | atomic = true 43 | 44 | [tool.bandit] 45 | recursive = true 46 | # B108: Probable insecure usage of temp file/directory. 47 | # B602: Subprocess call with shell=True identified, security issue. 48 | # B404: Consider possible security implications associated with the subprocess module. 49 | # B603: Subprocess call - check for execution of untrusted input. 50 | # B607: Starting a process with a partial executable path. 51 | skips = ["B108", "B602", "B404", "B603", "B607"] 52 | assert_used.skips = ['tests/test_*.py'] 53 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | # Lobbyboy 2 | 3 | > What is a lobby boy? A lobby boy is completely invisible, yet always in sight. 4 | > A lobby boy remembers what people hate. A lobby boy anticipates the client's 5 | > needs before the needs are needed. A lobby boy is, above all, discreet to a 6 | > fault. 7 | > 8 | >

--The Grand Budapest Hotel

9 | 10 | [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/lobbyboy-ssh/lobbyboy/main.svg)](https://results.pre-commit.ci/latest/github/lobbyboy-ssh/lobbyboy/main) 11 | [![Test](https://github.com/lobbyboy-ssh/lobbyboy/actions/workflows/unittest.yaml/badge.svg?branch=main)](https://github.com/lobbyboy-ssh/lobbyboy/actions/workflows/unittest.yaml) 12 | 13 | PyPI version 14 | Python version 15 | [![codecov](https://codecov.io/gh/lobbyboy-ssh/lobbyboy/branch/main/graph/badge.svg?token=2HC7YDQBGT)](https://codecov.io/gh/lobbyboy-ssh/lobbyboy) 16 | 17 | **This project is still under testing, it worked but may have bugs.** 18 | 19 | 20 | 21 | * [What is lobbyboy?](#what-is-lobbyboy) 22 | * [Key Features](#key-features) 23 | * [Installation](#installation) 24 | * [Run server](#run-server) 25 | * [Generate a key pair for authentication](#generate-a-key-pair-for-authentication) 26 | * [Deployment](#deployment) 27 | * [Systemd Example](#systemd-example) 28 | * [Run in Docker](#run-in-docker) 29 | * [Providers](#providers) 30 | * [Builtin Providers](#builtin-providers) 31 | * [Vagrant Provider](#vagrant-provider) 32 | * [Footloose Provider](#footloose-provider) 33 | * [DigitalOcean Provider](#digitalocean-provider) 34 | * [Linode Provider](#linode-provider) 35 | * [Ignite(Firecracker) Provider](#ignitefirecracker-provider) 36 | * [Multipass Provider](#multipass-provider) 37 | * [Write Your Own Providers](#write-your-own-providers) 38 | * [Publish Your Own Providers](#publish-your-own-providers) 39 | * [FAQ](#faq) 40 | * [I Want to Know More!](#i-want-to-know-more) 41 | 42 | 43 | 44 | ## What is lobbyboy? 45 | 46 | Well, lobbyboy is a ssh server. Yes, like `sshd`. But instead of spawn a new 47 | shell on the server like sshd, when you ssh to lobbyboy, lobbyboy will create a 48 | new server(VPS) from available providers(meaning to say, DigitalOcean, AWS, GCP, 49 | Vultr, etc), then redirect you to the newly created servers. Of course, if 50 | lobbyboy finds any servers available already, he will just ask if you want to 51 | enter the existing server, or still want to create a new one. 52 | 53 | ![](./docs/images/lobbyboy.png) 54 | 55 | ## Key Features 56 | 57 | - talks in SSH2 protocol, no need to install any software of configs for 58 | client-side, just ssh to lobbyboy! 59 | - extendable provider: just implement 3 methods, then lobbyboy can work with any 60 | provider! 61 | - destroy the server when you no longer needed. 62 | - manage ssh keys for you 63 | 64 | ## Installation 65 | 66 | Install libkrb5-dev first, this is a dependency for gssapi support. 67 | 68 | ```bash 69 | apt install libkrb5-dev 70 | ``` 71 | 72 | Install via pip: 73 | 74 | ```bash 75 | pip install lobbyboy 76 | ``` 77 | 78 | ## Run server 79 | 80 | First, generate a config file: 81 | 82 | ```bash 83 | lobbyboy-config-example > config.toml 84 | # Edit your config before running! 85 | ``` 86 | 87 | Run the server with: 88 | 89 | ```bash 90 | lobbyboy-server -c config.toml 91 | ``` 92 | 93 | You can ssh to Lobbyboy now, if you keep the default user `Gustave` in default 94 | config. You can ssh to Lobbyboy via: 95 | 96 | ```bash 97 | ssh Gustave@127.0.0.1 -p 12200 98 | # Enter the default password "Fiennes"(without quotes) 99 | Welcome to Lobbyboy 0.2.2! 100 | There are 1 available servers: 101 | 0 - Create a new server... 102 | 1 - Enter vagrant lobbyboy-41 127.0.0.1 (0 active sessions) 103 | Please input your choice (number): 104 | ``` 105 | 106 | You may want to change the password in `config.toml` or use a public key for 107 | authentication. The latter is recommended in a production environment. 108 | 109 | ### Generate a key pair for authentication 110 | 111 | Generate a key pair: 112 | 113 | ```bash 114 | ssh-keygen -f lobbyboy_key 115 | ``` 116 | 117 | Add the content of `lobbyboy_key.pub` to the end of `authorized_keys` under 118 | `[user.Gustave]` table. Now you can ssh to the lobbyboy server via: 119 | 120 | ```bash 121 | ssh Gustave@127.0.0.1 -i lobbyboy_key 122 | ``` 123 | 124 | ## Deployment 125 | 126 | Lobbyboy is supposed to be a server daemon, so you can manage it by 127 | systemd/[supervisord](http://supervisord.org/) or put it into a docker. 128 | 129 | ### Systemd Example 130 | 131 | ```ini 132 | [Unit] 133 | Description=Lobbyboy Server 134 | 135 | [Service] 136 | User=me 137 | Group=me 138 | ExecStart=/path/to/lobbyboy-server -c /path/to/lobbyboy/config.toml 139 | Restart=on-failure 140 | WorkingDirectory=/path/to/lobbyboy/ 141 | 142 | [Install] 143 | WantedBy=multi-user.target 144 | ``` 145 | 146 | ### Run in Docker 147 | 148 | ```bash 149 | # Generate a config file 150 | docker run --rm ghcr.io/lobbyboy-ssh/lobbyboy lobbyboy-config-example > lobbyboy_config.toml 151 | # Run the docker container 152 | docker run -v `pwd`/lobbyboy_config.toml:/app/config.toml -p "12200:12200" -d ghcr.io/lobbyboy-ssh/lobbyboy 153 | ``` 154 | 155 | The lobbyboy server should be active on 12200 port and you can connect to it 156 | with 157 | 158 | ``` 159 | ssh Gustave@127.0.0.1 -p 12200 160 | ``` 161 | 162 | The default password for user `Gustave` is `Fiennes`. **Please change it when 163 | you deployed it into production, and consider use ssh key to auth instead of 164 | password.** 165 | 166 | ## Providers 167 | 168 | // TBD 169 | 170 | ### Builtin Providers 171 | 172 | Lobbyboy current support multiple Providers: 173 | 174 | - Vagrant (Need vagrant and virtualbox to be installed) 175 | - Footlosse, in another words, containers (Need 176 | [footloose](https://github.com/weaveworks/footloose) and docker to be 177 | installed) 178 | - DigitalOcean 179 | - Linode 180 | - [Ignite](https://github.com/weaveworks/ignite) (Runs Firecracker VM) 181 | - [multipass](https://multipass.run) 182 | 183 | Different Providers support different configs, please see the 184 | [example config](https://github.com/laixintao/lobbyboy/blob/main/lobbyboy/conf/lobbyboy_config.toml) 185 | for more detail. 186 | 187 | #### Vagrant Provider 188 | 189 | Vagrant Provider won't cost you any money, [vagrant](https://www.vagrantup.com/) 190 | is a software runs on your computer along with virtual machine providers, 191 | vagrant can provision and control your VM. 192 | 193 | This provider can help you to create a new Vagrant instance when you login to 194 | Lobbyboy, and destroy the server when you no longer use it. 195 | 196 | Supported Features: 197 | 198 | - Create new Vagrant instances 199 | - You can configure your VM via `vagrantfile` config (see the config 200 | [example](./lobbyboy/conf/lobbyboy_config.toml)). 201 | 202 | #### Footloose Provider 203 | 204 | [footloose](https://github.com/weaveworks/footloose) can make your docker 205 | containers(or Firecracker with [ignite](https://github.com/weaveworks/ignite)) 206 | act like virtual machine, so you can ssh to it. 207 | 208 | Supported feature: 209 | 210 | - Configurable base image 211 | - Create a docker container and redirect you in 212 | 213 | #### DigitalOcean Provider 214 | 215 | This Provider will create 216 | [Droplet](https://docs.digitalocean.com/products/droplets/) from DigitalOcean. 217 | 218 | Supported Features: 219 | 220 | - Create a new ssh key every time create a droplet. 221 | - Ask user to input region/droplet size/image when creating. 222 | - User can save favorite Droplet region/size/image in configs to quick create. 223 | - Destroy droplet when it is not in use. 224 | 225 | #### Linode Provider 226 | 227 | This Provider will create [Node](https://www.linode.com/docs/products/compute/) 228 | from Linode. 229 | 230 | Supported Features: 231 | 232 | - Create a new ssh key every time create a droplet. 233 | - Ask user to input region/node type/image when creating. 234 | - User can save favorite node region/type/image in configs to quick create. 235 | - Destroy node when it is not in use. 236 | 237 | Please see 238 | [configs](https://github.com/laixintao/lobbyboy/blob/main/lobbyboy/conf/lobbyboy_config.toml) 239 | to check available options. 240 | 241 | #### Ignite(Firecracker) Provider 242 | 243 | Supported Features: 244 | 245 | - Create a new Firecracker virtual machine 246 | - Destroy node when it is not in use. 247 | 248 | #### Multipass Provider 249 | 250 | Supported Features: 251 | 252 | - Create a new virtual machine 253 | - Destroy node when it is not in use. 254 | 255 | ![](./docs/images/do-preview.png) 256 | 257 | ### Write Your Own Providers 258 | 259 | Providers are VPS vendors, by writing new providers, lobbyboy can work with any 260 | VPS vendors. 261 | 262 | To make a new Provider work, you need to extend base class 263 | `lobbyboy.provider.BaseProvider``, implement 2 methods: 264 | 265 | ```python 266 | def is_available(self) -> bool: 267 | """ 268 | Override this method to check for requirements of this provider 269 | 270 | Returns: 271 | bool: True if this provider is available, False to disable it 272 | """ 273 | return True 274 | 275 | def create_server(self, channel: Channel) -> LBServerMeta: 276 | """ 277 | Args: 278 | channel: paramiko channel 279 | 280 | Returns: 281 | LBServerMeta: server meta info 282 | """ 283 | ... 284 | 285 | 286 | def destroy_server(self, meta: LBServerMeta, channel: Channel = None) -> bool: 287 | """ 288 | Args: 289 | meta: LBServerMeta, we use this to locate one server then destroy it. 290 | channel: Note that the channel can be None. 291 | If called from server_killer, channel will be None. 292 | if called when user logout from server, channel is active. 293 | 294 | Returns: 295 | bool: True if destroy successfully, False if not. 296 | """ 297 | ... 298 | ``` 299 | 300 | Then add your Provider to your config file. 301 | 302 | Those 3 configs are obligatory, as lobbyboy has to know when should he destroy 303 | your spare servers. You can add more configs, and read them from 304 | `self.provider_config` from code, just remember to add docs about it :) 305 | 306 | ```toml 307 | [provider.] 308 | load_module = "lobbyboy.contrib.provider.::" 309 | min_life_to_live = "1h" 310 | bill_time_unit = "1h" 311 | ``` 312 | 313 | ### Publish Your Own Providers 314 | 315 | // TBD 316 | 317 | ## FAQ 318 | 319 | Q: Can I use lobbyboy as a proxy, like adding it to my `ProxyCommand` in ssh 320 | config? 321 | 322 | A: No. Lobbyboy works like a reverse proxy, meaning to say, for ssh client, it 323 | just like a ssh server(sshd maybe), ssh client get a shell from lobbyboy, and 324 | doesn't know if it is local shell or it is a nested shell which runs another 325 | ssh. (but you know it, right? :D ) 326 | 327 | ## I Want to Know More! 328 | 329 | - [介绍 Lobbyboy 项目](https://www.kawabangga.com/posts/4576) (in Chinese) 330 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lobbyboy-ssh/lobbyboy/dafbb5905bd1c43facda7001a36e11b3a6ede837/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | 3 | test_pair = namedtuple("test_pair", "input, expected") 4 | -------------------------------------------------------------------------------- /tests/test_config.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from lobbyboy.config import LBConfig 4 | 5 | PARENT_DIR = Path(__file__).parent 6 | CONFIG_FILE = PARENT_DIR.parent / "lobbyboy" / "conf" / "lobbyboy_config.toml" 7 | 8 | 9 | def test_load_config(): 10 | LBConfig.load(CONFIG_FILE) 11 | 12 | 13 | def test_load_providers_from_config(): 14 | config = LBConfig.load(CONFIG_FILE) 15 | assert len(config.provider_cls) > 0 16 | -------------------------------------------------------------------------------- /tests/test_providers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lobbyboy-ssh/lobbyboy/dafbb5905bd1c43facda7001a36e11b3a6ede837/tests/test_providers/__init__.py -------------------------------------------------------------------------------- /tests/test_providers/conftest.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | from pathlib import Path 3 | 4 | import pytest 5 | 6 | from lobbyboy.config import LBServerMeta 7 | from lobbyboy.contrib.provider.footloose import FootlooseConfig, FootlooseProvider 8 | 9 | 10 | @pytest.fixture 11 | def footloose_provider(): 12 | workspace = Path("/tmp/footloose_test/") 13 | yield FootlooseProvider(name="footloose", config=FootlooseConfig(), workspace=workspace) 14 | shutil.rmtree(workspace, ignore_errors=True) 15 | 16 | 17 | @pytest.fixture 18 | def footloose_server_meta(): 19 | workspace = Path("/tmp/footloose_test/") 20 | yield LBServerMeta(workspace=workspace, provider_name="footloose", server_name="2021-12-05-1405") 21 | shutil.rmtree(workspace, ignore_errors=True) 22 | -------------------------------------------------------------------------------- /tests/test_providers/test_footloose.py: -------------------------------------------------------------------------------- 1 | import re 2 | from pathlib import Path 3 | from unittest import mock 4 | from unittest.mock import call 5 | 6 | from freezegun import freeze_time 7 | 8 | from lobbyboy.config import LBServerMeta 9 | 10 | 11 | @mock.patch("subprocess.run") 12 | def test_footloose_destroy(fake_subprocess_run, footloose_provider, footloose_server_meta): 13 | fake_complete_process = mock.MagicMock() 14 | fake_complete_process.return_value.returncode = 0 15 | fake_subprocess_run.side_effect = fake_complete_process 16 | 17 | destroy_command = footloose_provider.destroy_server(footloose_server_meta, None) 18 | fake_subprocess_run.assert_called_with( 19 | ["footloose", "delete", "-c", Path("/tmp/footloose_test/footloose.yaml")], capture_output=True 20 | ) 21 | assert destroy_command is True 22 | 23 | 24 | def test_ssh_server_commands(footloose_provider, footloose_server_meta): 25 | command = footloose_provider.ssh_server_command(footloose_server_meta, None) 26 | assert command == ["cd /tmp/footloose_test && footloose ssh root@2021-12-05-14050"] 27 | 28 | 29 | @mock.patch("subprocess.Popen") 30 | @freeze_time("2012-01-14 12:00:01") 31 | def test_create_server(mock_popen, footloose_provider): 32 | mock_channel = mock.MagicMock() 33 | mock_process = mock.MagicMock() 34 | mock_process.poll.side_effect = [False, True] 35 | mock_process.returncode = 0 36 | mock_popen.return_value = mock_process 37 | 38 | server = footloose_provider.create_server(mock_channel) 39 | 40 | mock_popen.assert_called_with(["footloose", "create"], cwd="/tmp/footloose_test/2012-01-14-1200") 41 | assert mock_channel.sendall.mock_calls[:3] == [ 42 | call(b"Generate server 2012-01-14-1200 workspace /tmp/footloose_test/2012-01-14-1200 done.\r\n"), 43 | call(b"Check footloose create done"), 44 | call(b"."), 45 | ] 46 | assert re.match(rb"OK\(\d.\ds\).\r\n", mock_channel.sendall.mock_calls[-1][1][0]) is not None 47 | assert server == LBServerMeta( 48 | provider_name="footloose", 49 | workspace=Path("/tmp/footloose_test/2012-01-14-1200"), 50 | server_name="2012-01-14-1200", 51 | server_host="127.0.0.1", 52 | server_user="root", 53 | server_port=22, 54 | created_timestamp=1326542401, 55 | ssh_extra_args=[], 56 | manage=True, 57 | ) 58 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import os.path 3 | import sys 4 | from dataclasses import dataclass 5 | from datetime import date, datetime 6 | from decimal import Decimal 7 | from pathlib import Path 8 | from unittest import mock 9 | 10 | import pytest 11 | 12 | from lobbyboy.exceptions import CantEnsureBytesException, TimeStrParseTypeException 13 | from lobbyboy.utils import ( 14 | confirm_dc_type, 15 | dict_factory, 16 | encoder_factory, 17 | ensure_bytes, 18 | humanize_seconds, 19 | import_class, 20 | port_is_open, 21 | send_to_channel, 22 | to_seconds, 23 | ) 24 | from tests.conftest import test_pair 25 | 26 | 27 | @dataclass 28 | class FakeDataclass: 29 | A: int = 1 30 | B: str = "b" 31 | 32 | 33 | test_args_encoder_factory = [ 34 | test_pair(input=date(2001, 1, 19), expected="2001-01-19"), 35 | test_pair(input=datetime(2000, 1, 10, 23, 58, 59), expected="2000-01-10 23:58:59"), 36 | test_pair(input=Decimal("19.3"), expected="19.3"), 37 | test_pair(input=Path("/go/to/test/path"), expected="/go/to/test/path"), 38 | test_pair(input=FakeDataclass(), expected={"A": 1, "B": "b"}), 39 | test_pair(input={"A": "A"}, expected={"A": "A"}), 40 | ] 41 | 42 | 43 | @pytest.mark.parametrize("test", test_args_encoder_factory) 44 | def test_encoder_factory(test: test_pair): 45 | encoder = encoder_factory(raise_error=False) 46 | assert encoder(test.input) == test.expected 47 | 48 | 49 | test_mutation_args_encoder_factory = [ 50 | test_pair(input=date(2001, 1, 19), expected="2001/01/19"), 51 | test_pair(input=datetime(2000, 1, 10, 23, 58, 59), expected="20000110 235859"), 52 | test_pair(input=Decimal("19.3"), expected=19), 53 | test_pair(input=Path("/go/to/test/path"), expected="path"), 54 | test_pair(input={"B": "C"}, expected={"B": "C"}), 55 | ] 56 | 57 | 58 | @pytest.mark.parametrize("test", test_mutation_args_encoder_factory) 59 | def test_encoder_factory_mutation(test: test_pair): 60 | encoder = encoder_factory( 61 | date_fmt="%Y/%m/%d", 62 | dt_fmt="%Y%m%d %H%M%S", 63 | decimal_factory=int, 64 | path_factory=lambda x: os.path.basename(x), 65 | raise_error=False, 66 | ) 67 | assert encoder(test.input) == test.expected 68 | 69 | 70 | def test_encoder_factory_exception(): 71 | encoder = encoder_factory() 72 | with pytest.raises(TypeError): 73 | encoder(range(3)) 74 | 75 | 76 | test_args_dict_factory = [ 77 | test_pair( 78 | input=[ 79 | { 80 | "A": "A", 81 | "B": "B", 82 | "ignore_field_1": "ignore_field_1", 83 | "ignore_field_2": "ignore_field_2", 84 | "_ignore_1": "_ignore_1", 85 | "_ignore_2": "_ignore_2", 86 | }, 87 | ["ignore_field_1", "ignore_field_2"], 88 | lambda x: x.startswith("_"), 89 | ], 90 | expected={"A": "A", "B": "B"}, 91 | ), 92 | test_pair( 93 | input=[ 94 | { 95 | "C": Decimal("19.3"), 96 | "ignore_field_4": "ignore_field_14", 97 | "_ignore_2": "_ignore_2", 98 | }, 99 | ["ignore_field_4", "ignore_field_5"], 100 | lambda x: False, 101 | encoder_factory(raise_error=False), 102 | ], 103 | expected={"C": "19.3", "_ignore_2": "_ignore_2"}, 104 | ), 105 | ] 106 | 107 | 108 | @pytest.mark.parametrize("test", test_args_dict_factory) 109 | def test_dict_factory(test: test_pair): 110 | original = copy.deepcopy(test.input) 111 | assert dict_factory(*test.input) == test.expected 112 | assert original == test.input 113 | 114 | 115 | @mock.patch("socket.socket.connect_ex") 116 | def test_port_is_open(fake_socket): 117 | test_ip = "127.0.0.1" 118 | 119 | fake_socket.side_effect = mock.MagicMock(return_value=0) 120 | is_open = port_is_open(test_ip) 121 | assert is_open is True 122 | 123 | fake_socket.side_effect = mock.MagicMock(return_value=1) 124 | is_open = port_is_open(test_ip) 125 | assert is_open is False 126 | 127 | 128 | test_args_to_seconds = [ 129 | test_pair(input="0", expected=0), 130 | test_pair(input="10s", expected=10), 131 | test_pair(input="1m", expected=60), 132 | test_pair(input="59m", expected=60 * 59), 133 | test_pair(input="1h", expected=60 * 60), 134 | test_pair(input="20h", expected=60 * 60 * 20), 135 | test_pair(input="1d", expected=60 * 60 * 24 * 1), 136 | test_pair(input="3d", expected=60 * 60 * 24 * 3), 137 | ] 138 | 139 | 140 | @pytest.mark.parametrize("test", test_args_to_seconds) 141 | def test_to_seconds(test: test_pair): 142 | assert to_seconds(test.input) == test.expected 143 | 144 | 145 | test_exception_args_to_seconds = [ 146 | test_pair(input="-1", expected=TimeStrParseTypeException), 147 | test_pair(input="2", expected=TimeStrParseTypeException), 148 | test_pair(input="1w", expected=TimeStrParseTypeException), 149 | test_pair(input="1min", expected=TimeStrParseTypeException), 150 | ] 151 | 152 | 153 | @pytest.mark.parametrize("test", test_exception_args_to_seconds) 154 | def test_to_seconds_exception(test: test_pair): 155 | with pytest.raises(test.expected): 156 | to_seconds(test.input) 157 | 158 | 159 | test_args_humanize_seconds = [ 160 | test_pair(input=0, expected="0:00:00"), 161 | test_pair(input=44, expected="0:00:44"), 162 | test_pair(input=364121, expected="4 days, 5:08:41"), 163 | ] 164 | 165 | 166 | @pytest.mark.parametrize("test", test_args_humanize_seconds) 167 | def test_humanize_seconds(test: test_pair): 168 | assert humanize_seconds(test.input) == test.expected 169 | 170 | 171 | test_args_ensure_bytes = [ 172 | test_pair(input="test", expected=b"test"), 173 | test_pair(input=b"test", expected=b"test"), 174 | ] 175 | 176 | 177 | @pytest.mark.parametrize("test", test_args_ensure_bytes) 178 | def test_ensure_bytes(test: test_pair): 179 | assert ensure_bytes(test.input) == test.expected 180 | 181 | 182 | test_exception_args_ensure_bytes = [ 183 | test_pair(input=None, expected=CantEnsureBytesException), 184 | test_pair(input=0, expected=CantEnsureBytesException), 185 | ] 186 | 187 | 188 | @pytest.mark.parametrize("test", test_exception_args_ensure_bytes) 189 | def test_ensure_bytes_exception(test: test_pair): 190 | with pytest.raises(test.expected): 191 | ensure_bytes(test.input) 192 | 193 | 194 | test_args_send_to_channel = [ 195 | test_pair(input=["fake_msg1", "prefix1", "suffix1"], expected=None), 196 | test_pair(input=["fake_msg2", b"prefix2", "suffix2"], expected=None), 197 | test_pair(input=["fake_msg3", "prefix3", b"suffix3"], expected=None), 198 | ] 199 | 200 | 201 | @pytest.mark.parametrize("test", test_args_send_to_channel) 202 | def test_send_to_channel(test: test_pair): 203 | fake_channel = mock.MagicMock() 204 | assert send_to_channel(fake_channel, *test.input) == test.expected 205 | 206 | 207 | test_args_confirm_dc_type = [ 208 | test_pair(input=[FakeDataclass(A=2, B="1"), FakeDataclass], expected=FakeDataclass(A=2, B="1")), 209 | test_pair(input=[dict(A=2, B="1"), FakeDataclass], expected=FakeDataclass(A=2, B="1")), 210 | test_pair(input=[None, FakeDataclass], expected=None), 211 | test_pair(input=[[1, 2, 3], FakeDataclass], expected=[1, 2, 3]), 212 | ] 213 | 214 | 215 | @pytest.mark.parametrize("test", test_args_confirm_dc_type) 216 | def test_confirm_dc_type(test: test_pair): 217 | assert test.expected == confirm_dc_type(*test.input) 218 | 219 | 220 | def test_choose_option(): ... 221 | 222 | 223 | def test_read_user_input_line(): ... 224 | 225 | 226 | def test_confirm_ssh_key_pair(): ... 227 | 228 | 229 | def test_generate_ssh_key_pair(): ... 230 | 231 | 232 | def test_write_key_to_file(): ... 233 | 234 | 235 | def test_try_load_key_from_file(): ... 236 | 237 | 238 | def test_import_class(): 239 | assert import_class("") is None 240 | assert import_class("Test") is None 241 | assert import_class("Test::A::B") is None 242 | 243 | with mock.patch("importlib.import_module") as imp: 244 | imp.side_effect = mock.MagicMock(return_value=sys.modules[__name__]) 245 | assert import_class(f"FAKE_MODULE::{FakeDataclass.__name__}") is FakeDataclass 246 | --------------------------------------------------------------------------------