├── pytest.ini ├── .prettierrc.yml ├── .flake8 ├── .vscode └── settings.json ├── .copier-answers.autopretty.yml ├── hooks └── build ├── .editorconfig ├── .github └── workflows │ ├── pre-commit.yml │ └── ci.yml ├── tests ├── test_service.py ├── not_responding_tcp_port.py ├── conftest.py ├── healthcheck.yaml ├── test_healthcheck_ports.py └── test_healtcheck.py ├── .copier-answers.image-template.yml ├── pyproject.toml ├── Dockerfile ├── proxy.py ├── .pre-commit-config.yaml ├── .gitignore ├── healthcheck.py ├── README.md ├── LICENSE └── poetry.lock /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = -n auto -ra 3 | pythonpath = . 4 | -------------------------------------------------------------------------------- /.prettierrc.yml: -------------------------------------------------------------------------------- 1 | printWidth: 88 2 | proseWrap: always 3 | xmlWhitespaceSensitivity: "ignore" 4 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E203, E501, W503, B950 3 | max-line-length = 88 4 | select = C,E,F,W,B 5 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "cSpell.words": ["pytest"], 3 | "python.pythonPath": ".venv/bin/python", 4 | "python.testing.pytestEnabled": true 5 | } 6 | -------------------------------------------------------------------------------- /.copier-answers.autopretty.yml: -------------------------------------------------------------------------------- 1 | # Changes here will be overwritten by Copier; do NOT edit manually 2 | _commit: v0.1.0 3 | _src_path: https://github.com/copier-org/autopretty.git 4 | ansible: false 5 | biggest_kbs: 1000 6 | github: true 7 | js: false 8 | main_branches: 9 | - master 10 | python: true 11 | -------------------------------------------------------------------------------- /hooks/build: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -ex 3 | 4 | # See http://label-schema.org/rc1/#build-time-labels 5 | time docker image build \ 6 | --build-arg VCS_REF="$GIT_SHA1" \ 7 | --build-arg BUILD_DATE="$(date --rfc-3339 ns)" \ 8 | --build-arg VERSION="$DOCKER_TAG" \ 9 | --tag "$IMAGE_NAME" \ 10 | . 11 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_style = space 5 | indent_size = 4 6 | end_of_line = lf 7 | charset = utf-8 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | 11 | [*.py] 12 | # For isort 13 | profile = black 14 | 15 | [*.{code-snippets,code-workspace,json,yaml,yml}{,.jinja}] 16 | indent_size = 2 17 | -------------------------------------------------------------------------------- /.github/workflows/pre-commit.yml: -------------------------------------------------------------------------------- 1 | name: pre-commit 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: 7 | - master 8 | 9 | jobs: 10 | pre-commit: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v4 14 | - uses: actions/setup-python@v4 15 | - uses: pre-commit/action@v3.0.1 16 | -------------------------------------------------------------------------------- /tests/test_service.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from plumbum.cmd import docker 4 | 5 | logger = logging.getLogger() 6 | 7 | 8 | def test_containers_start(container_factory): 9 | with container_factory(target="google.com") as test_container: 10 | docker( 11 | "exec", 12 | test_container, 13 | "socat", 14 | "-V", 15 | ) 16 | -------------------------------------------------------------------------------- /.copier-answers.image-template.yml: -------------------------------------------------------------------------------- 1 | # Changes here will be overwritten by Copier; do NOT edit manually 2 | _commit: v0.1.3 3 | _src_path: https://github.com/Tecnativa/image-template.git 4 | dockerhub_image: tecnativa/whitelist 5 | image_platforms: 6 | - linux/386 7 | - linux/amd64 8 | main_branches: 9 | - master 10 | project_name: docker-whitelist 11 | project_owner: Tecnativa 12 | push_to_ghcr: true 13 | pytest: true 14 | python_versions: 15 | - "3.9" 16 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | 2 | [tool.poetry] 3 | name = "docker-whitelist" 4 | version = "0.0.0" 5 | description = "" 6 | package-mode = false 7 | authors = ["Tecnativa"] 8 | 9 | [tool.poetry.dependencies] 10 | python = "^3.9" 11 | plumbum = "^1.6.9" 12 | pycurl = "^7.45.6" 13 | 14 | [tool.poetry.group.dev.dependencies] 15 | black = {version = "^20.8b1", allow-prereleases = true} 16 | flake8 = "^3.8.4" 17 | plumbum = "^1.6.9" 18 | pytest-xdist = "^2.1.0" 19 | pytest-timeout = "^2.2.0" 20 | 21 | [build-system] 22 | requires = ["poetry-core>=1.0.0"] 23 | build-backend = "poetry.core.masonry.api" 24 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3-alpine 2 | ENTRYPOINT ["dumb-init", "--"] 3 | CMD ["proxy"] 4 | HEALTHCHECK CMD ["healthcheck"] 5 | RUN apk add --no-cache -t .build build-base curl-dev &&\ 6 | apk add --no-cache socat &&\ 7 | apk add --no-cache libcurl &&\ 8 | pip install --no-cache-dir dnspython dumb-init pycurl &&\ 9 | apk del .build 10 | ENV NAMESERVERS="208.67.222.222 8.8.8.8 208.67.220.220 8.8.4.4" \ 11 | PORT="80 443" \ 12 | PRE_RESOLVE=0 \ 13 | MODE=tcp \ 14 | VERBOSE=0 \ 15 | MAX_CONNECTIONS=100 \ 16 | UDP_ANSWERS=1 \ 17 | HTTP_HEALTHCHECK=0\ 18 | HTTP_HEALTHCHECK_URL="http://\$TARGET/"\ 19 | SMTP_HEALTHCHECK=0\ 20 | SMTP_HEALTHCHECK_URL="smtp://\$TARGET/"\ 21 | SMTP_HEALTHCHECK_COMMAND="HELP" 22 | COPY proxy.py /usr/local/bin/proxy 23 | COPY healthcheck.py /usr/local/bin/healthcheck 24 | 25 | # Labels 26 | ARG BUILD_DATE 27 | ARG VCS_REF 28 | ARG VERSION 29 | LABEL org.label-schema.build-date="$BUILD_DATE" \ 30 | org.label-schema.name="Docker Whitelist" \ 31 | org.label-schema.description="Simple whitelist proxy" \ 32 | org.label-schema.license=Apache-2.0 \ 33 | org.label-schema.url="https://www.tecnativa.com" \ 34 | org.label-schema.vcs-ref="$VCS_REF" \ 35 | org.label-schema.vcs-url="https://github.com/Tecnativa/docker-whitelist" \ 36 | org.label-schema.vendor="Tecnativa" \ 37 | org.label-schema.version="$VERSION" \ 38 | org.label-schema.schema-version="1.0" 39 | -------------------------------------------------------------------------------- /tests/not_responding_tcp_port.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | this is a server that accepts TCP connections but doesn't send any response. it just closes the connection after an hour 4 | has passed. this is intended for testing timeouts only. 5 | """ 6 | import errno 7 | import signal 8 | import socket 9 | import sys 10 | import time 11 | 12 | 13 | def keep_client_waiting(server_socket): 14 | client, address = server_socket.accept() 15 | print("connected", flush=True) 16 | server_socket.setblocking(0) 17 | time.sleep(3600) 18 | print("waited for an hour", flush=True) 19 | server_socket.close() 20 | 21 | 22 | def start_server(): 23 | listen_address = sys.argv[1] if len(sys.argv) > 1 else "" 24 | listen_port = int(sys.argv[2]) if len(sys.argv) > 2 else 80 25 | server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 26 | server_socket.bind((listen_address, listen_port)) 27 | server_socket.listen() 28 | print("listening", flush=True) 29 | 30 | def shutdown_on_interrupt(signum, frame): 31 | stop_server(server_socket) 32 | 33 | signal.signal(signal.SIGINT, shutdown_on_interrupt) 34 | signal.signal(signal.SIGTERM, shutdown_on_interrupt) 35 | try: 36 | keep_client_waiting(server_socket) 37 | except OSError as e: 38 | if e.errno == errno.EBADF: 39 | print("stopped", flush=True) 40 | else: 41 | raise 42 | 43 | 44 | def stop_server(server_socket): 45 | print("stopping...", flush=True) 46 | server_socket.close() 47 | 48 | 49 | if __name__ == "__main__": 50 | start_server() 51 | -------------------------------------------------------------------------------- /proxy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import asyncio 4 | import logging 5 | import os 6 | import random 7 | 8 | from dns.resolver import Resolver 9 | 10 | logging.root.setLevel(logging.INFO) 11 | mode = os.environ["MODE"] 12 | ports = os.environ["PORT"].split() 13 | max_connections = os.environ.get("MAX_CONNECTIONS", 100) 14 | ip = target = os.environ["TARGET"] 15 | udp_answers = os.environ.get("UDP_ANSWERS", "1") 16 | 17 | # Resolve target if required 18 | if os.environ["PRE_RESOLVE"] == "1": 19 | resolver = Resolver() 20 | resolver.nameservers = os.environ["NAMESERVERS"].split() 21 | ip = random.choice([answer.address for answer in resolver.resolve(target)]) 22 | logging.info("Resolved %s to %s", target, ip) 23 | 24 | 25 | async def netcat(port): 26 | # Use a persistent BusyBox netcat server in listening mode 27 | command = ["socat"] 28 | # Verbose mode 29 | if os.environ["VERBOSE"] == "1": 30 | command.append("-v") 31 | if mode == "udp" and udp_answers == "0": 32 | command += [f"udp-recv:{port},reuseaddr", f"udp-sendto:{ip}:{port}"] 33 | else: 34 | command += [ 35 | f"{mode}-listen:{port},fork,reuseaddr,max-children={max_connections}", 36 | f"{mode}-connect:{ip}:{port}", 37 | ] 38 | # Create the process and wait until it exits 39 | logging.info("Executing: %s", " ".join(command)) 40 | process = await asyncio.create_subprocess_exec(*command) 41 | await process.wait() 42 | 43 | 44 | # Wait until all proxies exited, if they ever do 45 | try: 46 | loop = asyncio.get_event_loop() 47 | loop.run_until_complete(asyncio.gather(*map(netcat, ports))) 48 | finally: 49 | loop.run_until_complete(loop.shutdown_asyncgens()) 50 | loop.close() 51 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from contextlib import contextmanager 3 | from pathlib import Path 4 | 5 | import pytest 6 | from plumbum import local 7 | from plumbum.cmd import docker 8 | 9 | _logger = logging.getLogger(__name__) 10 | 11 | 12 | def pytest_addoption(parser): 13 | """Allow prebuilding image for local testing.""" 14 | parser.addoption( 15 | "--prebuild", action="store_true", help="Build local image before testing" 16 | ) 17 | parser.addoption( 18 | "--image", 19 | action="store", 20 | default="test:docker-whitelist", 21 | help="Specify testing image name", 22 | ) 23 | 24 | 25 | @pytest.fixture(scope="session") 26 | def image(request): 27 | """Get image name. Builds it if needed.""" 28 | image = request.config.getoption("--image") 29 | if request.config.getoption("--prebuild"): 30 | build = docker["image", "build", "-t", image, Path(__file__).parent.parent] 31 | retcode, stdout, stderr = build.run() 32 | _logger.log( 33 | # Pytest prints warnings if a test fails, so this is a warning if 34 | # the build succeeded, to allow debugging the build logs 35 | logging.ERROR if retcode else logging.WARNING, 36 | "Build logs for COMMAND: %s\nEXIT CODE:%d\nSTDOUT:%s\nSTDERR:%s", 37 | build.bound_command(), 38 | retcode, 39 | stdout, 40 | stderr, 41 | ) 42 | assert not retcode, "Image build failed" 43 | return image 44 | 45 | 46 | @pytest.fixture(scope="session") 47 | def container_factory(image): 48 | """A context manager that starts the docker container.""" 49 | 50 | @contextmanager 51 | def _container(target): 52 | container_id = None 53 | _logger.info(f"Starting {image} container") 54 | try: 55 | container_id = docker( 56 | "container", 57 | "run", 58 | "--detach", 59 | "-e", 60 | "TARGET=%s" % target, 61 | image, 62 | ).strip() 63 | with local.env(): 64 | yield container_id 65 | finally: 66 | if container_id: 67 | _logger.info(f"Removing {container_id}...") 68 | docker( 69 | "container", 70 | "rm", 71 | "-f", 72 | container_id, 73 | ) 74 | 75 | return _container 76 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | default_language_version: 2 | python: python3 3 | node: "14.14.0" 4 | repos: 5 | # General 6 | - repo: local 7 | hooks: 8 | - id: forbidden-files 9 | name: forbidden files 10 | entry: found forbidden files; remove them 11 | language: fail 12 | files: "\\.rej$" 13 | - repo: https://github.com/pre-commit/mirrors-prettier 14 | rev: v3.0.3 15 | hooks: 16 | - id: prettier 17 | additional_dependencies: 18 | - prettier@2.1.2 19 | - "@prettier/plugin-xml@0.12.0" 20 | args: 21 | - --plugin=@prettier/plugin-xml 22 | - repo: https://github.com/myint/autoflake 23 | rev: v2.2.1 24 | hooks: 25 | - id: autoflake 26 | args: 27 | - --in-place 28 | - --expand-star-imports 29 | - --ignore-init-module-imports 30 | - --remove-all-unused-imports 31 | - --remove-duplicate-keys 32 | - --remove-unused-variables 33 | - repo: https://github.com/asottile/pyupgrade 34 | rev: v3.13.0 35 | hooks: 36 | - id: pyupgrade 37 | - repo: https://github.com/psf/black 38 | rev: 23.9.1 39 | hooks: 40 | - id: black 41 | - repo: https://github.com/timothycrosley/isort 42 | rev: 5.12.0 43 | hooks: 44 | - id: isort 45 | args: 46 | - --settings=. 47 | - repo: https://github.com/pycqa/flake8 48 | rev: 6.1.0 49 | hooks: 50 | - &flake8 51 | id: flake8 52 | name: flake8 except __init__.py 53 | exclude: /__init__\.py$ 54 | additional_dependencies: 55 | - flake8-bugbear==20.1.4 56 | - <<: *flake8 57 | name: flake8 for __init__.py 58 | args: 59 | # ignore unused imports in __init__.py 60 | - --extend-ignore=F401 61 | files: /__init__\.py$ 62 | - repo: https://github.com/pre-commit/pre-commit-hooks 63 | rev: v4.4.0 64 | hooks: 65 | - id: check-added-large-files 66 | args: 67 | - --maxkb=1000 68 | - id: check-case-conflict 69 | - id: check-executables-have-shebangs 70 | - id: check-json 71 | - id: check-merge-conflict 72 | - id: check-symlinks 73 | - id: check-toml 74 | - id: check-xml 75 | - id: check-yaml 76 | - id: detect-private-key 77 | - id: end-of-file-fixer 78 | - id: mixed-line-ending 79 | args: 80 | - --fix=lf 81 | - id: trailing-whitespace 82 | - id: check-ast 83 | - id: check-builtin-literals 84 | - id: check-docstring-first 85 | - id: debug-statements 86 | - id: fix-encoding-pragma 87 | args: 88 | - --remove 89 | - id: requirements-txt-fixer 90 | -------------------------------------------------------------------------------- /tests/healthcheck.yaml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | services: 3 | autoheal: 4 | image: willfarrell/autoheal 5 | restart: unless-stopped 6 | environment: 7 | AUTOHEAL_INTERVAL: 1 8 | AUTOHEAL_CONTAINER_LABEL: "AUTOHEAL_${COMPOSE_PROJECT_NAME}" 9 | privileged: "${OS_NEEDS_PRIVILEGES_FOR_DOCKER_SOCK:-false}" 10 | volumes: 11 | - /var/run/docker.sock:/var/run/docker.sock 12 | 13 | proxy_preresolve: 14 | build: 15 | dockerfile: Dockerfile 16 | context: .. 17 | labels: 18 | - "AUTOHEAL_${COMPOSE_PROJECT_NAME}=true" 19 | depends_on: 20 | - target 21 | - autoheal 22 | networks: 23 | default: 24 | aliases: 25 | - target_preresolve.example.com 26 | simulated_outside: 27 | environment: 28 | TARGET: target.example.com 29 | PRE_RESOLVE: 1 30 | NAMESERVERS: "127.0.0.11" #use local docker nameserver 31 | HTTP_HEALTHCHECK: 1 32 | HTTP_HEALTHCHECK_TIMEOUT_MS: 200 33 | healthcheck: 34 | test: ["CMD", "healthcheck"] 35 | interval: 1s 36 | timeout: 1s 37 | retries: 0 38 | start_period: 1s 39 | restart: unless-stopped 40 | 41 | proxy_without_preresolve: 42 | build: 43 | dockerfile: Dockerfile 44 | context: .. 45 | labels: 46 | - "AUTOHEAL_${COMPOSE_PROJECT_NAME}=true" 47 | depends_on: 48 | - target 49 | - autoheal 50 | networks: 51 | default: 52 | aliases: 53 | - target_without_preresolve.example.com 54 | simulated_outside: 55 | environment: 56 | TARGET: target.example.com 57 | # use no pre resolving (target gets resolved on every request) 58 | PRE_RESOLVE: 0 59 | NAMESERVERS: "127.0.0.11" #use local docker nameserver 60 | HTTP_HEALTHCHECK: 1 61 | HTTP_HEALTHCHECK_TIMEOUT_MS: 200 62 | healthcheck: 63 | test: ["CMD", "healthcheck"] 64 | interval: 1s 65 | timeout: 1s 66 | retries: 0 67 | start_period: 1s 68 | restart: unless-stopped 69 | 70 | proxy_smtp: 71 | build: 72 | dockerfile: Dockerfile 73 | context: .. 74 | labels: 75 | - "AUTOHEAL_${COMPOSE_PROJECT_NAME}=true" 76 | depends_on: 77 | - target_smtp 78 | - autoheal 79 | networks: 80 | default: 81 | aliases: 82 | - target_smtp.example.com 83 | simulated_outside: 84 | environment: 85 | TARGET: smtp.example.com 86 | PORT: 1025 87 | PRE_RESOLVE: 1 88 | NAMESERVERS: "127.0.0.11" #use local docker nameserver 89 | SMTP_HEALTHCHECK: 1 90 | SMTP_HEALTHCHECK_URL: "smtp://$$TARGET:1025/" 91 | # mailhog doesn't support HELP command 92 | SMTP_HEALTHCHECK_COMMAND: "QUIT" 93 | SMTP_HEALTHCHECK_TIMEOUT_MS: 200 94 | healthcheck: 95 | test: ["CMD", "healthcheck"] 96 | interval: 1s 97 | timeout: 1s 98 | retries: 0 99 | start_period: 1s 100 | restart: unless-stopped 101 | 102 | target: 103 | image: nginx 104 | networks: 105 | simulated_outside: 106 | aliases: 107 | - target.example.com 108 | 109 | target_smtp: 110 | image: mailhog/mailhog 111 | networks: 112 | simulated_outside: 113 | aliases: 114 | - smtp.example.com 115 | 116 | target_firewalled_not_responding: 117 | image: python:3.9 118 | volumes: 119 | - ./not_responding_tcp_port.py:/bin/not_responding_tcp_port 120 | command: ["not_responding_tcp_port", "0.0.0.0", "80"] 121 | networks: 122 | simulated_outside: 123 | aliases: 124 | - target.example.com 125 | 126 | target_smtp_firewalled_not_responding: 127 | image: python:3.9 128 | volumes: 129 | - ./not_responding_tcp_port.py:/bin/not_responding_tcp_port 130 | command: ["not_responding_tcp_port", "0.0.0.0", "1025"] 131 | networks: 132 | simulated_outside: 133 | aliases: 134 | - smtp.example.com 135 | 136 | networks: 137 | # we do not allow communication to the outside 138 | simulated_outside: 139 | internal: true 140 | default: 141 | internal: true 142 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.toptal.com/developers/gitignore/api/vscode,python,node 2 | # Edit at https://www.toptal.com/developers/gitignore?templates=vscode,python,node 3 | 4 | ### Node ### 5 | # Logs 6 | logs 7 | *.log 8 | npm-debug.log* 9 | yarn-debug.log* 10 | yarn-error.log* 11 | lerna-debug.log* 12 | 13 | # Diagnostic reports (https://nodejs.org/api/report.html) 14 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 15 | 16 | # Runtime data 17 | pids 18 | *.pid 19 | *.seed 20 | *.pid.lock 21 | 22 | # Directory for instrumented libs generated by jscoverage/JSCover 23 | lib-cov 24 | 25 | # Coverage directory used by tools like istanbul 26 | coverage 27 | *.lcov 28 | 29 | # nyc test coverage 30 | .nyc_output 31 | 32 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 33 | .grunt 34 | 35 | # Bower dependency directory (https://bower.io/) 36 | bower_components 37 | 38 | # node-waf configuration 39 | .lock-wscript 40 | 41 | # Compiled binary addons (https://nodejs.org/api/addons.html) 42 | build/Release 43 | 44 | # Dependency directories 45 | node_modules/ 46 | jspm_packages/ 47 | 48 | # TypeScript v1 declaration files 49 | typings/ 50 | 51 | # TypeScript cache 52 | *.tsbuildinfo 53 | 54 | # Optional npm cache directory 55 | .npm 56 | 57 | # Optional eslint cache 58 | .eslintcache 59 | 60 | # Microbundle cache 61 | .rpt2_cache/ 62 | .rts2_cache_cjs/ 63 | .rts2_cache_es/ 64 | .rts2_cache_umd/ 65 | 66 | # Optional REPL history 67 | .node_repl_history 68 | 69 | # Output of 'npm pack' 70 | *.tgz 71 | 72 | # Yarn Integrity file 73 | .yarn-integrity 74 | 75 | # dotenv environment variables file 76 | .env 77 | .env.test 78 | .env*.local 79 | 80 | # parcel-bundler cache (https://parceljs.org/) 81 | .cache 82 | .parcel-cache 83 | 84 | # Next.js build output 85 | .next 86 | 87 | # Nuxt.js build / generate output 88 | .nuxt 89 | dist 90 | 91 | # Gatsby files 92 | .cache/ 93 | # Comment in the public line in if your project uses Gatsby and not Next.js 94 | # https://nextjs.org/blog/next-9-1#public-directory-support 95 | # public 96 | 97 | # vuepress build output 98 | .vuepress/dist 99 | 100 | # Serverless directories 101 | .serverless/ 102 | 103 | # FuseBox cache 104 | .fusebox/ 105 | 106 | # DynamoDB Local files 107 | .dynamodb/ 108 | 109 | # TernJS port file 110 | .tern-port 111 | 112 | # Stores VSCode versions used for testing VSCode extensions 113 | .vscode-test 114 | 115 | ### Python ### 116 | # Byte-compiled / optimized / DLL files 117 | __pycache__/ 118 | *.py[cod] 119 | *$py.class 120 | 121 | # C extensions 122 | *.so 123 | 124 | # Distribution / packaging 125 | .Python 126 | build/ 127 | develop-eggs/ 128 | dist/ 129 | downloads/ 130 | eggs/ 131 | .eggs/ 132 | lib/ 133 | lib64/ 134 | parts/ 135 | sdist/ 136 | var/ 137 | wheels/ 138 | pip-wheel-metadata/ 139 | share/python-wheels/ 140 | *.egg-info/ 141 | .installed.cfg 142 | *.egg 143 | MANIFEST 144 | 145 | # PyInstaller 146 | # Usually these files are written by a python script from a template 147 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 148 | *.manifest 149 | *.spec 150 | 151 | # Installer logs 152 | pip-log.txt 153 | pip-delete-this-directory.txt 154 | 155 | # Unit test / coverage reports 156 | htmlcov/ 157 | .tox/ 158 | .nox/ 159 | .coverage 160 | .coverage.* 161 | nosetests.xml 162 | coverage.xml 163 | *.cover 164 | *.py,cover 165 | .hypothesis/ 166 | .pytest_cache/ 167 | pytestdebug.log 168 | 169 | # Translations 170 | *.mo 171 | *.pot 172 | 173 | # Django stuff: 174 | local_settings.py 175 | db.sqlite3 176 | db.sqlite3-journal 177 | 178 | # Flask stuff: 179 | instance/ 180 | .webassets-cache 181 | 182 | # Scrapy stuff: 183 | .scrapy 184 | 185 | # Sphinx documentation 186 | docs/_build/ 187 | doc/_build/ 188 | 189 | # PyBuilder 190 | target/ 191 | 192 | # Jupyter Notebook 193 | .ipynb_checkpoints 194 | 195 | # IPython 196 | profile_default/ 197 | ipython_config.py 198 | 199 | # pyenv 200 | .python-version 201 | 202 | # pipenv 203 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 204 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 205 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 206 | # install all needed dependencies. 207 | #Pipfile.lock 208 | 209 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 210 | __pypackages__/ 211 | 212 | # Celery stuff 213 | celerybeat-schedule 214 | celerybeat.pid 215 | 216 | # SageMath parsed files 217 | *.sage.py 218 | 219 | # Environments 220 | .venv 221 | env/ 222 | venv/ 223 | ENV/ 224 | env.bak/ 225 | venv.bak/ 226 | pythonenv* 227 | 228 | # Spyder project settings 229 | .spyderproject 230 | .spyproject 231 | 232 | # Rope project settings 233 | .ropeproject 234 | 235 | # mkdocs documentation 236 | /site 237 | 238 | # mypy 239 | .mypy_cache/ 240 | .dmypy.json 241 | dmypy.json 242 | 243 | # Pyre type checker 244 | .pyre/ 245 | 246 | # pytype static type analyzer 247 | .pytype/ 248 | 249 | # profiling data 250 | .prof 251 | 252 | ### vscode ### 253 | .vscode/* 254 | !.vscode/settings.json 255 | !.vscode/tasks.json 256 | !.vscode/launch.json 257 | !.vscode/extensions.json 258 | *.code-workspace 259 | 260 | # End of https://www.toptal.com/developers/gitignore/api/vscode,python,node 261 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Build, Test & Deploy 2 | 3 | "on": 4 | pull_request: 5 | push: 6 | branches: 7 | - master 8 | tags: 9 | - "v*" 10 | workflow_dispatch: 11 | inputs: 12 | pytest_addopts: 13 | description: 14 | Extra options for pytest; use -vv for full details; see 15 | https://docs.pytest.org/en/latest/example/simple.html#how-to-change-command-line-options-defaults 16 | required: false 17 | 18 | env: 19 | LANG: "en_US.utf-8" 20 | LC_ALL: "en_US.utf-8" 21 | PIP_CACHE_DIR: ${{ github.workspace }}/.cache.~/pip 22 | PIPX_HOME: ${{ github.workspace }}/.cache.~/pipx 23 | POETRY_CACHE_DIR: ${{ github.workspace }}/.cache.~/pypoetry 24 | POETRY_VIRTUALENVS_IN_PROJECT: "true" 25 | PYTEST_ADDOPTS: ${{ github.event.inputs.pytest_addopts }} 26 | PYTHONIOENCODING: "UTF-8" 27 | 28 | jobs: 29 | build-test: 30 | runs-on: ubuntu-22.04 31 | strategy: 32 | matrix: 33 | python: 34 | - "3.10" 35 | steps: 36 | # Prepare environment 37 | - uses: actions/checkout@v4 38 | # Set up and run tests 39 | - name: Install python 40 | uses: actions/setup-python@v4 41 | with: 42 | python-version: ${{ matrix.python }} 43 | - name: Generate cache key CACHE 44 | run: 45 | echo "CACHE=${{ secrets.CACHE_DATE }} ${{ runner.os }} $(python -VV | 46 | sha256sum | cut -d' ' -f1) ${{ hashFiles('pyproject.toml') }} ${{ 47 | hashFiles('poetry.lock') }}" >> $GITHUB_ENV 48 | - uses: actions/cache@v4 49 | with: 50 | path: | 51 | .cache.~ 52 | .venv 53 | ~/.local/bin 54 | key: venv ${{ env.CACHE }} 55 | - run: pip install poetry 56 | - name: Patch $PATH 57 | run: echo "$HOME/.local/bin" >> $GITHUB_PATH 58 | - run: poetry install 59 | # Run tests 60 | - run: poetry run pytest --prebuild 61 | build-push: 62 | runs-on: ubuntu-22.04 63 | services: 64 | registry: 65 | image: registry:2 66 | ports: 67 | - 5000:5000 68 | env: 69 | DOCKER_IMAGE_NAME: ${{ github.repository }} 70 | DOCKERHUB_IMAGE_NAME: tecnativa/whitelist 71 | PUSH: ${{ toJSON(github.event_name != 'pull_request') }} 72 | steps: 73 | # Set up Docker Environment 74 | - uses: actions/checkout@v4 75 | - uses: actions/cache@v4 76 | with: 77 | path: | 78 | /tmp/.buildx-cache 79 | key: buildx|${{ secrets.CACHE_DATE }}|${{ runner.os }} 80 | - name: Set up QEMU 81 | uses: docker/setup-qemu-action@v1 82 | - name: Set up Docker Buildx 83 | id: buildx 84 | uses: docker/setup-buildx-action@v1 85 | with: 86 | driver-opts: network=host 87 | install: true 88 | # Build and push 89 | - name: Docker meta for local images 90 | id: docker_meta_local 91 | uses: crazy-max/ghaction-docker-meta@v1 92 | with: 93 | images: localhost:5000/${{ env.DOCKER_IMAGE_NAME }} 94 | tag-edge: true 95 | tag-semver: | 96 | {{version}} 97 | {{major}} 98 | {{major}}.{{minor}} 99 | - name: Build and push to local (test) registry 100 | uses: docker/build-push-action@v4 101 | with: 102 | context: . 103 | file: ./Dockerfile 104 | platforms: | 105 | linux/386 106 | linux/amd64 107 | linux/arm64 108 | load: false 109 | push: true 110 | cache-from: type=local,src=/tmp/.buildx-cache 111 | cache-to: type=local,dest=/tmp/.buildx-cache,mode=max 112 | labels: ${{ steps.docker_meta_local.outputs.labels }} 113 | tags: ${{ steps.docker_meta_local.outputs.tags }} 114 | # Next jobs only happen outside of pull requests and on main branches 115 | - name: Login to DockerHub 116 | if: ${{ fromJSON(env.PUSH) }} 117 | uses: docker/login-action@v1 118 | with: 119 | username: ${{ secrets.DOCKERHUB_LOGIN }} 120 | password: ${{ secrets.DOCKERHUB_TOKEN }} 121 | - name: Login to GitHub Container Registry 122 | if: ${{ fromJSON(env.PUSH) }} 123 | uses: docker/login-action@v1 124 | with: 125 | registry: ghcr.io 126 | username: ${{ secrets.BOT_LOGIN }} 127 | password: ${{ secrets.BOT_TOKEN }} 128 | - name: Docker meta for public images 129 | if: ${{ fromJSON(env.PUSH) }} 130 | id: docker_meta_public 131 | uses: crazy-max/ghaction-docker-meta@v1 132 | with: 133 | images: | 134 | ghcr.io/${{ env.DOCKER_IMAGE_NAME }} 135 | ${{ env.DOCKERHUB_IMAGE_NAME }} 136 | tag-edge: true 137 | tag-semver: | 138 | {{version}} 139 | {{major}} 140 | {{major}}.{{minor}} 141 | - name: Build and push to public registry(s) 142 | if: ${{ fromJSON(env.PUSH) }} 143 | uses: docker/build-push-action@v4 144 | with: 145 | context: . 146 | file: ./Dockerfile 147 | platforms: | 148 | linux/386 149 | linux/amd64 150 | linux/arm64 151 | load: false 152 | push: true 153 | cache-from: type=local,src=/tmp/.buildx-cache 154 | cache-to: type=local,dest=/tmp/.buildx-cache,mode=max 155 | labels: ${{ steps.docker_meta_public.outputs.labels }} 156 | tags: ${{ steps.docker_meta_public.outputs.tags }} 157 | -------------------------------------------------------------------------------- /tests/test_healthcheck_ports.py: -------------------------------------------------------------------------------- 1 | import os 2 | from unittest import TestCase 3 | from unittest.mock import call, patch 4 | 5 | import pycurl 6 | 7 | from healthcheck import http_healthcheck, smtp_healthcheck 8 | 9 | 10 | @patch("pycurl.Curl") 11 | class TestHealthcheckPorts(TestCase): 12 | # given default environment 13 | @patch.dict(os.environ, {"PORT": "80 443"}, clear=True) 14 | def test_healthcheck_http_default_port(self, mock_curl): 15 | # when running http_healthcheck 16 | http_healthcheck() 17 | 18 | # then the called url should be http://localhost/ 19 | mock_curl.assert_has_calls( 20 | [ 21 | call().setopt(pycurl.URL, "http://localhost/"), 22 | # and port 80 should be used 23 | call().setopt(pycurl.RESOLVE, ["localhost:80:127.0.0.1"]), 24 | ] 25 | ) 26 | 27 | # given default environment with https url specified 28 | @patch.dict( 29 | os.environ, 30 | {"PORT": "80 443", "HTTP_HEALTHCHECK_URL": "https://localhost/"}, 31 | clear=True, 32 | ) 33 | def test_healthcheck_https_default_port(self, mock_curl): 34 | # when running http_healthcheck 35 | http_healthcheck() 36 | 37 | # then the called url should be https://localhost/ 38 | mock_curl.assert_has_calls( 39 | [ 40 | call().setopt(pycurl.URL, "https://localhost/"), 41 | # and port 443 should be used 42 | call().setopt(pycurl.RESOLVE, ["localhost:443:127.0.0.1"]), 43 | ] 44 | ) 45 | 46 | # given special http port 47 | @patch.dict(os.environ, {"PORT": "8025"}, clear=True) 48 | def test_healthcheck_http_custom_port(self, mock_curl): 49 | # when running http_healthcheck 50 | http_healthcheck() 51 | 52 | # then the called url should be http://localhost:8025/ 53 | mock_curl.assert_has_calls( 54 | [ 55 | call().setopt(pycurl.URL, "http://localhost:8025/"), 56 | # and port 8025 should be used 57 | call().setopt(pycurl.RESOLVE, ["localhost:8025:127.0.0.1"]), 58 | ] 59 | ) 60 | 61 | # given smtp environment 62 | @patch.dict(os.environ, {"PORT": "25"}, clear=True) 63 | def test_healthcheck_smtp_default_port(self, mock_curl): 64 | # when running smtp_healthcheck 65 | smtp_healthcheck() 66 | 67 | # then the called url should be smtp://localhost/ 68 | mock_curl.assert_has_calls( 69 | [ 70 | call().setopt(pycurl.URL, "smtp://localhost/"), 71 | # and command should be HELP 72 | call().setopt(pycurl.CUSTOMREQUEST, "HELP"), 73 | # and port 25 should be used 74 | call().setopt(pycurl.RESOLVE, ["localhost:25:127.0.0.1"]), 75 | ] 76 | ) 77 | 78 | # given mailhog smtp environment 79 | @patch.dict( 80 | os.environ, 81 | { 82 | "PORT": "1025", 83 | "TARGET": "mailhog", 84 | "SMTP_HEALTHCHECK_URL": "smtp://$TARGET/", 85 | "SMTP_HEALTHCHECK_COMMAND": "QUIT", 86 | }, 87 | clear=True, 88 | ) 89 | def test_healthcheck_smtp_mailhog_port(self, mock_curl): 90 | # when running smtp_healthcheck 91 | smtp_healthcheck() 92 | 93 | # then the called url should be smtp://mailhog:1025/ 94 | mock_curl.assert_has_calls( 95 | [ 96 | call().setopt(pycurl.URL, "smtp://mailhog:1025/"), 97 | # and command should be QUIT 98 | call().setopt(pycurl.CUSTOMREQUEST, "QUIT"), 99 | # and port 1025 should be used 100 | call().setopt(pycurl.RESOLVE, ["mailhog:1025:127.0.0.1"]), 101 | ] 102 | ) 103 | 104 | # given mailhog multiple ports environment 105 | @patch.dict( 106 | os.environ, 107 | { 108 | "PORT": "10001 10002", 109 | "TARGET": "mailhog", 110 | "SMTP_HEALTHCHECK_URL": "smtp://$TARGET/", 111 | "SMTP_HEALTHCHECK_COMMAND": "QUIT", 112 | }, 113 | clear=True, 114 | ) 115 | def test_healthcheck_smtp_mailhog_multiple_ports(self, mock_curl): 116 | # when running smtp_healthcheck 117 | smtp_healthcheck() 118 | 119 | # then the called url should be smtp://mailhog:10001/ 120 | mock_curl.assert_has_calls( 121 | [ 122 | call().setopt(pycurl.URL, "smtp://mailhog:10001/"), 123 | # and command should be QUIT 124 | call().setopt(pycurl.CUSTOMREQUEST, "QUIT"), 125 | # and port 10001 should be used 126 | call().setopt(pycurl.RESOLVE, ["mailhog:10001:127.0.0.1"]), 127 | ] 128 | ) 129 | 130 | # given mailhog multiple ports environment 131 | @patch.dict( 132 | os.environ, 133 | { 134 | "PORT": "10001 10002", 135 | "TARGET": "mailhog", 136 | "SMTP_HEALTHCHECK_URL": "smtp://$TARGET:10002/", 137 | "SMTP_HEALTHCHECK_COMMAND": "QUIT", 138 | }, 139 | clear=True, 140 | ) 141 | def test_healthcheck_smtp_mailhog_custom_port_in_url(self, mock_curl): 142 | # when running smtp_healthcheck 143 | smtp_healthcheck() 144 | 145 | # then the called url should be smtp://mailhog:10002/ 146 | mock_curl.assert_has_calls( 147 | [ 148 | call().setopt(pycurl.URL, "smtp://mailhog:10002/"), 149 | # and command should be QUIT 150 | call().setopt(pycurl.CUSTOMREQUEST, "QUIT"), 151 | # and port 10001 should be used 152 | call().setopt(pycurl.RESOLVE, ["mailhog:10002:127.0.0.1"]), 153 | ] 154 | ) 155 | -------------------------------------------------------------------------------- /healthcheck.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import logging 4 | import os 5 | 6 | logger = logging.getLogger("healthcheck") 7 | 8 | 9 | def error(message, exception=None): 10 | logger.error(message) 11 | if exception is None: 12 | exit(1) 13 | else: 14 | raise exception 15 | 16 | 17 | def http_healthcheck(): 18 | """ 19 | Use pycurl to check if the target server is still responding via proxy.py 20 | :return: None 21 | """ 22 | import re 23 | 24 | import pycurl 25 | 26 | check_url = os.environ.get("HTTP_HEALTHCHECK_URL", "http://localhost/") 27 | check_timeout_ms = int(os.environ.get("HTTP_HEALTHCHECK_TIMEOUT_MS", 2000)) 28 | target = os.environ.get("TARGET", "localhost") 29 | check_url_with_target = check_url.replace("$TARGET", target) 30 | port = re.search("https?://[^:]*(?::([^/]+))?", check_url_with_target)[1] 31 | if not port: 32 | port = "80" if check_url_with_target.startswith("http://") else "443" 33 | ports = os.environ.get("PORT").split() 34 | if port not in ports: 35 | port = ports[0] 36 | check_url_with_target = re.sub( 37 | "(https?://[^/]+)", r"\1:{}".format(port), check_url_with_target 38 | ) 39 | print("checking %s via 127.0.0.1" % check_url_with_target) 40 | logger.info("checking %s via 127.0.0.1" % check_url_with_target) 41 | try: 42 | request = pycurl.Curl() 43 | request.setopt(pycurl.URL, check_url_with_target) 44 | # do not send the request to the target directly but use our own socat proxy process to check if it's still 45 | # working 46 | request.setopt(pycurl.RESOLVE, ["{}:{}:127.0.0.1".format(target, port)]) 47 | request.setopt(pycurl.CONNECTTIMEOUT_MS, check_timeout_ms) 48 | request.setopt(pycurl.TIMEOUT_MS, check_timeout_ms) 49 | request.perform() 50 | request.close() 51 | except pycurl.error as e: 52 | error("error while checking http connection", e) 53 | 54 | 55 | def smtp_healthcheck(): 56 | """ 57 | Use pycurl to check if the target server is still responding via proxy.py 58 | :return: None 59 | """ 60 | import re 61 | 62 | import pycurl 63 | 64 | check_url = os.environ.get("SMTP_HEALTHCHECK_URL", "smtp://localhost/") 65 | check_command = os.environ.get("SMTP_HEALTHCHECK_COMMAND", "HELP") 66 | check_timeout_ms = int(os.environ.get("SMTP_HEALTHCHECK_TIMEOUT_MS", 2000)) 67 | target = os.environ.get("TARGET", "localhost") 68 | check_url_with_target = check_url.replace("$TARGET", target) 69 | port = re.search("smtp://[^:]*(?::([^/]+))?", check_url_with_target)[1] 70 | if not port: 71 | port = "25" 72 | ports = os.environ.get("PORT").split() 73 | if port not in ports: 74 | port = ports[0] 75 | check_url_with_target = re.sub( 76 | "(smtp://[^/]+)", r"\1:{}".format(port), check_url_with_target 77 | ) 78 | logger.info("checking %s via 127.0.0.1" % check_url_with_target) 79 | try: 80 | request = pycurl.Curl() 81 | request.setopt(pycurl.URL, check_url_with_target) 82 | request.setopt(pycurl.CUSTOMREQUEST, check_command) 83 | # do not send the request to the target directly but use our own socat proxy process to check if it's still 84 | # working 85 | request.setopt(pycurl.RESOLVE, ["{}:{}:127.0.0.1".format(target, port)]) 86 | request.setopt(pycurl.CONNECTTIMEOUT_MS, check_timeout_ms) 87 | request.setopt(pycurl.TIMEOUT_MS, check_timeout_ms) 88 | request.perform() 89 | request.close() 90 | except pycurl.error as e: 91 | error("error while checking smtp connection", e) 92 | 93 | 94 | def process_healthcheck(): 95 | """ 96 | Check that at least one socat process exists per port and no more than the number of configured max connections 97 | processes exist for each port. 98 | :return: 99 | """ 100 | import subprocess 101 | 102 | ports = os.environ["PORT"].split() 103 | max_connections = int(os.environ["MAX_CONNECTIONS"]) 104 | logger.info( 105 | "checking socat processes for port(s) %s having at least one and less than %d socat processes" 106 | % (ports, max_connections) 107 | ) 108 | socat_processes = ( 109 | # grep for all processes running socat, ignoring exit code 2 110 | # (unreadable file, happens if some process is stopped while grep is running) 111 | subprocess.check_output( 112 | [ 113 | "sh", 114 | "-c", 115 | "grep -R -s socat /proc/[0-9]*/cmdline" 116 | " || grep -R -s socat /proc/[0-9]*/cmdline" 117 | ' || status=$? && [ "$status" != "2" ]', 118 | ] 119 | ) 120 | .decode("utf-8") 121 | .split("\n") 122 | ) 123 | # consider only non-empty lines for socat processes not the ones for grep 124 | pids = [ 125 | process.split("/")[2] 126 | for process in socat_processes 127 | if process and process.endswith("cmdline:socat") 128 | ] 129 | if len(pids) < len(ports): 130 | # if we have less than the number of ports socat processes we do not need to count processes per port and can 131 | # fail fast 132 | error("Expected at least %d socat processes" % len(ports)) 133 | port_process_count = {port: 0 for port in ports} 134 | for pid in pids: 135 | # foreach socat pid we detect the port it's for by checking the last argument (connect to) that ends with 136 | # :{ip}:{port} for our processes 137 | try: 138 | with open("/proc/%d/cmdline" % int(pid)) as fp: 139 | # arguments in /proc/.../cmdline are split by null bytes 140 | cmd = [part for part in "".join(fp.readlines()).split("\x00") if part] 141 | port = cmd[2].split(":")[-1] 142 | port_process_count[port] = port_process_count[port] + 1 143 | except (IndexError, KeyError): 144 | logger.error("ERROR: unexpected command {} {}".format(pid, cmd)) 145 | raise 146 | except (ProcessLookupError, FileNotFoundError): 147 | # ignore processes no longer existing (possibly retrieved an answer) 148 | pass 149 | for port in ports: 150 | if port_process_count[port] == 0: 151 | error("Missing socat process(es) for port: %s" % port) 152 | if port_process_count[port] >= max_connections + 1: 153 | error( 154 | "More than %d + 1 socat process(es) for port: %s" 155 | % (max_connections, port) 156 | ) 157 | 158 | 159 | def preresolve_healthcheck(): 160 | """ 161 | Check that the pre-resolved ip is still valid now for target 162 | :return: 163 | """ 164 | from tempfile import gettempdir 165 | 166 | load_balancing_dns_fs_flag = os.path.join( 167 | gettempdir(), "load_balancing_dns_detected" 168 | ) 169 | if not os.path.exists(load_balancing_dns_fs_flag): 170 | # only run the resolver check if a previous run didn't flag the target as being dns load-balanced 171 | import subprocess 172 | 173 | from dns.resolver import Resolver 174 | 175 | pre_resolved_ips = { 176 | line.split(":")[2] 177 | for line in subprocess.check_output( 178 | [ 179 | "sh", 180 | "-c", 181 | "grep -R -s '\\(udp\\|tcp\\)-connect:' /proc/[0-9]*/cmdline || grep -R -s '\\(udp\\|tcp\\)-connect:' /proc/[0-9]*/cmdline", 182 | ] 183 | ) 184 | .decode("utf-8") 185 | .split("\n") 186 | if line 187 | } 188 | resolver = Resolver() 189 | resolver.nameservers = os.environ["NAMESERVERS"].split() 190 | target = os.environ["TARGET"] 191 | resolved_ips = [answer.address for answer in resolver.resolve(target)] 192 | for ip in pre_resolved_ips: 193 | logger.info(f"checking {target} resolves to {ip}") 194 | if ip not in resolved_ips: 195 | resolved_ips_2 = [answer.address for answer in resolver.resolve(target)] 196 | if resolved_ips_2 == resolved_ips: 197 | error( 198 | f"{target} no longer resolves to {ip}, {resolved_ips}, {resolved_ips_2}" 199 | ) 200 | else: 201 | resolved_ips_3 = [ 202 | answer.address for answer in resolver.resolve(target) 203 | ] 204 | # to make sure we didn't just hit the server switch in dns, we check again before deactivating 205 | # the healthcheck permanently (until the container restarts) 206 | if resolved_ips_3 != resolved_ips_2: 207 | logger.info( 208 | f"{target} seems to be load-balancing with dns ({resolved_ips} != {resolved_ips_2}), " 209 | f"deactivating the resolver healthcheck" 210 | ) 211 | with open(f"{load_balancing_dns_fs_flag}", "w") as fp: 212 | fp.write(target) 213 | 214 | 215 | if __name__ == "__main__": 216 | logging.basicConfig(level=logging.INFO) 217 | process_healthcheck() 218 | if os.environ["PRE_RESOLVE"] == "1": 219 | preresolve_healthcheck() 220 | if os.environ.get("HTTP_HEALTHCHECK", "0") == "1": 221 | http_healthcheck() 222 | if os.environ.get("SMTP_HEALTHCHECK", "0") == "1": 223 | smtp_healthcheck() 224 | -------------------------------------------------------------------------------- /tests/test_healtcheck.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import json 3 | import logging 4 | import os.path 5 | from datetime import datetime 6 | from time import sleep 7 | 8 | import plumbum.commands.processes 9 | import pytest 10 | from plumbum import TF, local 11 | from plumbum.cmd import docker, which 12 | 13 | HEALTHCHECK_YAML = os.path.abspath("tests/healthcheck.yaml") 14 | 15 | PROXY_TARGET_PAIRS = [ 16 | ("proxy_preresolve", "target"), 17 | ("proxy_smtp", "target_smtp"), 18 | ("proxy_without_preresolve", "target"), 19 | ] 20 | 21 | logger = logging.getLogger() 22 | 23 | # use docker compose plugin/v2 24 | docker_compose = docker["compose"] 25 | _healthcheck = docker_compose["-f", HEALTHCHECK_YAML] 26 | _get_container_id = _healthcheck["ps", "-q"] 27 | 28 | 29 | def _get_container_id_and_ip(service_name): 30 | container_id = _get_container_id(service_name).strip() 31 | container_info = json.loads(docker("inspect", container_id)) 32 | return ( 33 | container_id, 34 | container_info[0]["NetworkSettings"]["Networks"][ 35 | "%s_simulated_outside" % local.env["COMPOSE_PROJECT_NAME"] 36 | ]["IPAddress"], 37 | ) 38 | 39 | 40 | def _new_ip(target): 41 | # we get the container id of the currently running target to be able to force changing ips by scaling up 42 | # and then stopping the old container 43 | old_container_id, old_ip = _get_container_id_and_ip(target) 44 | 45 | # start a second instance of the target 46 | _healthcheck_with_error_info("up", "-d", "--scale", "%s=2" % target, target) 47 | 48 | # stop and remove the old container 49 | docker("stop", old_container_id) 50 | docker("rm", old_container_id) 51 | 52 | # verify that we got a new ip (should not be able to reuse the old one) 53 | new_container_id, new_ip = _get_container_id_and_ip(target) 54 | assert old_container_id != new_container_id 55 | assert old_ip != new_ip 56 | 57 | 58 | def _healthcheck_with_error_info(*args, **kwargs): 59 | try: 60 | _healthcheck(*args, **kwargs) 61 | except BaseException: 62 | # add additional infos to any error to make tracing down the error easier 63 | logger.error(_healthcheck("logs", "autoheal")) 64 | logger.error(_healthcheck("ps")) 65 | raise 66 | 67 | 68 | def _wait_for(proxy, messages, callback, *args): 69 | try: 70 | if isinstance(messages, str): 71 | 72 | def missing_messages(result): 73 | return messages not in result 74 | 75 | messages_for_exception = messages 76 | else: 77 | 78 | def missing_messages(result): 79 | return any(message for message in messages if message not in result) 80 | 81 | messages_for_exception = ", ".join(messages) 82 | while missing_messages(callback(*args)): 83 | # try again in one second (to not hammer the CPU) 84 | sleep(1) 85 | except BaseException: 86 | # add additional infos to any error to make tracing down the error easier 87 | logger.error("failed waiting for '%s'" % messages_for_exception) 88 | logger.error(_healthcheck("logs", "autoheal")) 89 | logger.error(_healthcheck("ps")) 90 | logger.error(_healthcheck("exec", "-T", proxy, "healthcheck", retcode=None)) 91 | raise 92 | 93 | 94 | def _sha256(text): 95 | return hashlib.sha256(str(text).encode("utf-8")).hexdigest() 96 | 97 | 98 | @pytest.fixture(scope="session") 99 | def os_needs_privileges(): 100 | if which["getenforce"] & TF: 101 | # if we can find getenforce on the current system, SELinux is probably installed and we need to start 102 | # autoheal with privileges 103 | return "true" 104 | return "false" 105 | 106 | 107 | @pytest.fixture(scope="function", autouse=True) 108 | def _cleanup_docker_compose(tmp_path, os_needs_privileges): 109 | with local.cwd(tmp_path): 110 | custom_compose_project_name = "{}_{}".format( 111 | os.path.basename(tmp_path), _sha256(tmp_path)[:6] 112 | ) 113 | with local.env( 114 | COMPOSE_PROJECT_NAME=custom_compose_project_name, 115 | OS_NEEDS_PRIVILEGES_FOR_DOCKER_SOCK=os_needs_privileges, 116 | ) as env: 117 | yield env 118 | 119 | # stop autoheal first to prevent it from restarting containers to be stopped 120 | _healthcheck("stop", "autoheal") 121 | _healthcheck("down", "-v") 122 | 123 | 124 | @pytest.fixture(scope="session", autouse=True) 125 | def _pull_images(): 126 | start = datetime.now() 127 | _healthcheck_with_error_info("pull", "--ignore-buildable") 128 | logger.info("pulled images {}".format(datetime.now() - start)) 129 | 130 | 131 | @pytest.mark.parametrize("proxy,target", PROXY_TARGET_PAIRS) 132 | def test_healthcheck_ok(proxy, target): 133 | # given a started proxy with healthcheck 134 | _healthcheck("up", "-d", proxy) 135 | 136 | # when everything is ok and target is Up 137 | assert " Up " in _healthcheck("ps", target) 138 | 139 | # then healthcheck should be successful 140 | try: 141 | _healthcheck("exec", "-T", proxy, "healthcheck") 142 | except plumbum.commands.processes.ProcessExecutionError: 143 | # at least on the second try (the first one might still happen when proxy is still starting up) 144 | _healthcheck_with_error_info("exec", "-T", proxy, "healthcheck") 145 | 146 | 147 | @pytest.mark.parametrize("proxy,target", PROXY_TARGET_PAIRS) 148 | def test_healthcheck_failing(proxy, target): 149 | # given a started proxy with healthcheck 150 | _healthcheck("up", "-d", proxy) 151 | # and autoheal not interfering 152 | _healthcheck("stop", "autoheal") 153 | 154 | # when target is not reachable 155 | _healthcheck("stop", target) 156 | assert " Exited " in _healthcheck("ps", "--all", target) 157 | 158 | # then healthcheck should return an error (non zero exit code) 159 | with pytest.raises( 160 | plumbum.commands.processes.ProcessExecutionError, 161 | match=r"Unexpected exit code: (1|137)", 162 | ): 163 | _healthcheck("exec", "-T", proxy, "healthcheck") 164 | 165 | 166 | @pytest.mark.parametrize("proxy,target", PROXY_TARGET_PAIRS) 167 | @pytest.mark.timeout(30) 168 | def test_healthcheck_failing_firewalled(proxy, target): 169 | # given a started proxy with healthcheck 170 | _healthcheck_with_error_info("up", "-d", proxy) 171 | # and autoheal not interfering 172 | _healthcheck_with_error_info("stop", "autoheal") 173 | 174 | # when target stops responding 175 | _healthcheck_with_error_info("stop", target) 176 | assert " Exited " in _healthcheck("ps", "--all", target) 177 | _healthcheck_with_error_info( 178 | "up", "-d", "{target:s}_firewalled_not_responding".format(target=target) 179 | ) 180 | assert "Up" in _healthcheck( 181 | "ps", "{target:s}_firewalled_not_responding".format(target=target) 182 | ) 183 | 184 | # then healthcheck should return an error (non-zero exit code) 185 | with pytest.raises( 186 | plumbum.commands.processes.ProcessExecutionError, 187 | match=r"Unexpected exit code: (1|137)", 188 | ): 189 | start = datetime.now() 190 | _healthcheck_with_error_info("exec", "-T", proxy, "healthcheck") 191 | end = datetime.now() 192 | # timeout is set to 200ms for tests, so the exception should be raised at earliest after 0.2s 193 | # and at most 2s after starting considering overhead 194 | # if it happens outside that timeframe (especially before 0.2s) the exception might hint to another error type 195 | assert 0.2 < (end - start).total_seconds() < 2 196 | 197 | 198 | @pytest.mark.parametrize( 199 | "proxy,target", 200 | (p for p in PROXY_TARGET_PAIRS if p[0] != "proxy_without_preresolve"), 201 | ) 202 | @pytest.mark.timeout(60) 203 | def test_healthcheck_autoheal(proxy, target): 204 | # given a started proxy with healthcheck 205 | _healthcheck_with_error_info("up", "-d", proxy) 206 | proxy_container_id = _get_container_id(proxy).strip() 207 | # that was healthy 208 | _wait_for(proxy, (" Up ", " (healthy) "), _healthcheck, "ps", proxy) 209 | 210 | # when target gets a new ip 211 | _new_ip(target) 212 | 213 | # then autoheal should restart the proxy 214 | _wait_for( 215 | proxy, 216 | "(%s) found to be unhealthy - Restarting container now" 217 | % proxy_container_id[:12], 218 | _healthcheck, 219 | "logs", 220 | "autoheal", 221 | ) 222 | 223 | # and the proxy should become healthy 224 | _wait_for(proxy, (" Up ", " (healthy) "), _healthcheck, "ps", proxy) 225 | 226 | # and healthcheck should be successful 227 | _healthcheck_with_error_info("exec", "-T", proxy, "healthcheck") 228 | 229 | 230 | @pytest.mark.timeout(60) 231 | def test_healthcheck_autoheal_proxy_without_preresolve(): 232 | # given a started proxy with healthcheck 233 | proxy = "proxy_without_preresolve" 234 | _healthcheck_with_error_info("up", "-d", proxy) 235 | # that was healthy 236 | _wait_for(proxy, (" Up ", " (healthy) "), _healthcheck, "ps", proxy) 237 | 238 | # when target gets a new ip 239 | _new_ip("target") 240 | 241 | # then healthcheck should be always successful (we wait just for 5 seconds) 242 | for _ in range(0, 50): 243 | _healthcheck_with_error_info("exec", "-T", proxy, "healthcheck") 244 | sleep(0.1) 245 | 246 | # and autoheal shouldn't have restarted anything 247 | assert not [ 248 | line 249 | for line in _healthcheck("logs", "autoheal").split("\n") 250 | if line and not line.startswith("Attaching to ") and "AUTOHEAL_" not in line 251 | ] 252 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Last image-template](https://img.shields.io/badge/last%20template%20update-v0.1.3-informational)](https://github.com/Tecnativa/image-template/tree/v0.1.3) 2 | [![GitHub Container Registry](https://img.shields.io/badge/GitHub%20Container%20Registry-latest-%2324292e)](https://github.com/orgs/Tecnativa/packages/container/package/docker-whitelist) 3 | [![Docker Hub](https://img.shields.io/badge/Docker%20Hub-latest-%23099cec)](https://hub.docker.com/r/tecnativa/whitelist) 4 | [![Docker Pulls](https://img.shields.io/docker/pulls/tecnativa/whitelist.svg)](https://hub.docker.com/r/tecnativa/whitelist) 5 | [![Layers](https://images.microbadger.com/badges/image/tecnativa/whitelist.svg)](https://microbadger.com/images/tecnativa/whitelist) 6 | [![Commit](https://images.microbadger.com/badges/commit/tecnativa/whitelist.svg)](https://microbadger.com/images/tecnativa/whitelist) 7 | [![License](https://images.microbadger.com/badges/license/tecnativa/whitelist.svg)](https://microbadger.com/images/tecnativa/whitelist) 8 | 9 | # Docker Whitelister 10 | 11 | ## What? 12 | 13 | A whitelist proxy that uses socat. 🔌😼 14 | 15 | ## Why? 16 | 17 | tl;dr: To workaround https://github.com/moby/moby/issues/36174. 18 | 19 | Basically, Docker supports internal networks; but when you use them, you simply cannot 20 | open ports from those services, which is not very convenient: you either have full or 21 | none isolation. 22 | 23 | This proxy allows some whitelist endpoints to have network connectivity. It can be used 24 | for: 25 | 26 | - Allowing connection only to some APIs, but not to the rest of the WWW. 27 | - Exposing ports from a container while still not letting the container access the 28 | WWW. 29 | 30 | ## How? 31 | 32 | Use these environment variables: 33 | 34 | ### `TARGET` 35 | 36 | Required. It's the host name where the incoming connections will be redirected to. 37 | 38 | ### `HTTP_HEALTHCHECK` 39 | 40 | Default: `0` 41 | 42 | Set to `1` to enable healthcheck with pycurl http requests. This is useful if the target 43 | uses a deployment where the ip of the service gets changed frequently (e.g. 44 | `accounts.google.com`) and you are using [`PRE_RESOLVE`](#pre_resolve) 45 | 46 | #### Automatically restarting unhealthy proxies 47 | 48 | When you enable the http healthcheck the container marks itself as unhealthy but does 49 | nothing. (see https://github.com/moby/moby/pull/22719) 50 | 51 | If you want to restart your proxies automatically, you can use 52 | https://github.com/willfarrell/docker-autoheal. 53 | 54 | ### `HTTP_HEALTHCHECK_URL` 55 | 56 | Default: `http://$TARGET/` 57 | 58 | Url to use in [`HTTP_HEALTHCHECK`](#http_healthcheck) if enabled. `$TARGET` gets 59 | replaced inside the url by the configured [`TARGET`](#target). 60 | 61 | ### `HTTP_HEALTHCHECK_TIMEOUT_MS` 62 | 63 | Default: `2000` 64 | 65 | Timeout in milliseconds for http healthcheck. This is used as a timeout for connecting 66 | and receiving an answer. You may end up with twice the time spend. 67 | 68 | ### `MODE` 69 | 70 | Default: `tcp` 71 | 72 | Set to `udp` to proxy in UDP mode. 73 | 74 | ### `MAX_CONNECTIONS` 75 | 76 | Default: `100` 77 | 78 | Limits the maximum number of accepted connections at once per port. 79 | 80 | #### Setting "unlimited" connections 81 | 82 | For each port and open connection a subprocess is spawned. Setting a number too high 83 | might make your host system unresponsive and prevent you from logging in to it. So be 84 | very careful with setting this setting to a large number. 85 | 86 | The typical linux system can handle up to 32768 so if you need a lot more parallel open 87 | connections make sure to also set the corresponding variables on your host system. See 88 | https://stackoverflow.com/questions/6294133/maximum-pid-in-linux for reference. And 89 | divide this number by at least the number of ports you are running through 90 | docker-whitelist. 91 | 92 | #### What happens when the limit is hit? 93 | 94 | docker-whitelist basically starts `socat` so the behaviour is the same. In case no more 95 | subprocesses can be forked: 96 | 97 | - UDP mode: You won't see a difference on the connecting side. But no more packets are 98 | forwarded for new connections until the number of connections for this port is 99 | reduced. 100 | - TCP mode: docker-whitelist no longer accepts the connection and your connection will 101 | wait until the number of connections for this port is reduced. Your connection may 102 | time out. 103 | 104 | ### `NAMESERVERS` 105 | 106 | Default: `208.67.222.222 8.8.8.8 208.67.220.220 8.8.4.4` to use OpenDNS and Google DNS 107 | resolution servers by default. 108 | 109 | Only used when [pre-resolving](#pre-resolve) is enabled. 110 | 111 | ### `PORT` 112 | 113 | **Default:** `80 443` Ports on which the proxy will listen and forward requests. 114 | 115 | - For standard HTTP/HTTPS services, you **do not** need to change anything (the 116 | default covers both port 80 and 443). 117 | - If you only need to proxy HTTPS (or your service listens on a different port, or you 118 | want to restrict the proxy to TLS only), specify: 119 | ```yaml 120 | environment: 121 | PORT: "443" 122 | ``` 123 | 124 | ### `PRE_RESOLVE` 125 | 126 | Default: `0` 127 | 128 | Set to `1` to force using the specified [nameservers](#nameservers) to resolve the 129 | [target](#target) before proxying. 130 | 131 | This is especially useful when using a network alias to whitelist an external API. 132 | 133 | ### `SMTP_HEALTHCHECK` 134 | 135 | Default: `0` 136 | 137 | Set to `1` to enable healthcheck with pycurl smtp requests. This is useful if the target 138 | uses a deployment where the ip of the service gets changed frequently (e.g. 139 | `smtp.eu.sparkpostmail.com`) and you are using [`PRE_RESOLVE`](#pre_resolve) 140 | 141 | #### Automatically restarting unhealthy proxies 142 | 143 | see [HTTP_HEALTHCHECK](#http_healthcheck) 144 | 145 | ### `SMTP_HEALTHCHECK_URL` 146 | 147 | Default: `smtp://$TARGET/` 148 | 149 | Url to use in [`SMTP_HEALTHCHECK`](#smtp_healthcheck) if enabled. `$TARGET` gets 150 | replaced inside the url by the configured [`TARGET`](#target). 151 | 152 | ### `SMTP_HEALTHCHECK_COMMAND` 153 | 154 | Default: `HELP` 155 | 156 | Enables changing the healthcheck command for servers that do not support `HELP` (e.g. 157 | for [MailHog](https://github.com/mailhog/MailHog) you can use `QUIT`) 158 | 159 | ### `SMTP_HEALTHCHECK_TIMEOUT_MS` 160 | 161 | Default: `2000` 162 | 163 | Timeout in milliseconds for smtp healthcheck. This is used as a timeout for connecting 164 | and receiving an answer. You may end up with twice the time spend. 165 | 166 | ### `UDP_ANSWERS` 167 | 168 | Default: `1` 169 | 170 | `1` means the process will wait for an answer from the server before the forked child 171 | process terminates (until this happens the connection counts towards the connection 172 | limit). Set to `0` if no answers are expected from the server, this prevents 173 | subprocesses waiting for an answer indefinitely. 174 | 175 | Setting to `0` is recommended if you are using this to connect to a syslog server like 176 | graylog. 177 | 178 | ### `VERBOSE` 179 | 180 | Default: `0` 181 | 182 | Set to `1` to log all connections. 183 | 184 | ## Example 185 | 186 | So say you have a production app called `coolapp` that sends and reads emails, and uses 187 | Google Font APIs to render some PDF reports. 188 | 189 | It is defined in a `docker-compose.yaml` file like this: 190 | 191 | ```yaml 192 | # Production deployment 193 | version: "2.0" 194 | services: 195 | app: 196 | image: Tecnativa/coolapp 197 | ports: 198 | - "80:80" 199 | environment: 200 | DB_HOST: db 201 | depends_on: 202 | - db 203 | 204 | db: 205 | image: postgres:alpine 206 | volumes: 207 | - dbvol:/var/lib/postgresql/data:z 208 | 209 | volumes: 210 | dbvol: 211 | ``` 212 | 213 | Now you want to set up a staging environment for your QA team, which includes a fresh 214 | copy of the production database. To avoid the app to send or read emails, you put all 215 | into a safe internal network: 216 | 217 | ```yaml 218 | # Staging deployment 219 | version: "2.0" 220 | services: 221 | proxy: 222 | image: traefik 223 | networks: 224 | default: 225 | public: 226 | ports: 227 | - "8080:8080" 228 | volumes: 229 | # Here you redirect incoming connections to the app container 230 | - /etc/traefik/traefik.toml 231 | 232 | app: 233 | image: Tecnativa/coolapp 234 | environment: 235 | DB_HOST: db 236 | depends_on: 237 | - db 238 | 239 | db: 240 | image: postgres:alpine 241 | 242 | networks: 243 | default: 244 | internal: true 245 | public: 246 | ``` 247 | 248 | Now, it turns out your QA detects font problems. Logic! `app` cannot contact 249 | `fonts.google.com`. Yikes! What to do? 🤷 250 | 251 | `tecnativa/whitelist` to the rescue!! 💪🤠 252 | 253 | ```yaml 254 | # Staging deployment 255 | version: "2.0" 256 | services: 257 | fonts_googleapis_proxy: 258 | image: tecnativa/whitelist 259 | environment: 260 | TARGET: fonts.googleapis.com 261 | PRE_RESOLVE: 1 # Otherwise it would resolve to localhost 262 | networks: 263 | # Containers in default restricted network will ask here for fonts 264 | default: 265 | aliases: 266 | - fonts.googleapis.com 267 | # We need public access to "open the door" 268 | public: 269 | 270 | fonts_gstatic_proxy: 271 | image: tecnativa/whitelist 272 | networks: 273 | default: 274 | aliases: 275 | - fonts.gstatic.com 276 | public: 277 | environment: 278 | TARGET: fonts.gstatic.com 279 | PRE_RESOLVE: 1 280 | 281 | proxy: 282 | image: traefik 283 | networks: 284 | default: 285 | public: 286 | ports: 287 | - "8080:8080" 288 | volumes: 289 | # Here you redirect incoming connections to the app container 290 | - /etc/traefik/traefik.toml 291 | 292 | app: 293 | image: Tecnativa/coolapp 294 | environment: 295 | DB_HOST: db 296 | depends_on: 297 | - db 298 | 299 | db: 300 | image: postgres:alpine 301 | 302 | networks: 303 | default: 304 | internal: true 305 | public: 306 | ``` 307 | 308 | And voilà! `app` has fonts, but nothing more. ✋👮 309 | 310 | ## Development 311 | 312 | All the dependencies you need to develop this project (apart from Docker itself) are 313 | managed with [poetry](https://python-poetry.org/). 314 | 315 | To set up your development environment, run: 316 | 317 | ```bash 318 | pip install pipx # If you don't have pipx installed 319 | pipx install poetry # Install poetry itself 320 | poetry install # Install the python dependencies and setup the development environment 321 | ``` 322 | 323 | ### Testing 324 | 325 | To run the tests locally, add `--prebuild` to autobuild the image before testing: 326 | 327 | ```sh 328 | poetry run pytest --prebuild 329 | ``` 330 | 331 | By default, the image that the tests use (and optionally prebuild) is named 332 | `test:docker-whitelist`. If you prefer, you can build it separately before testing, and 333 | remove the `--prebuild` flag, to run the tests with that image you built: 334 | 335 | ```sh 336 | docker image build -t test:docker-whitelist . 337 | poetry run pytest 338 | ``` 339 | 340 | If you want to use a different image, pass the `--image` command line argument with the 341 | name you want: 342 | 343 | ```sh 344 | # To build it automatically 345 | poetry run pytest --prebuild --image my_custom_image 346 | 347 | # To prebuild it separately 348 | docker image build -t my_custom_image . 349 | poetry run pytest --image my_custom_image 350 | ``` 351 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "appdirs" 5 | version = "1.4.4" 6 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 7 | optional = false 8 | python-versions = "*" 9 | groups = ["dev"] 10 | files = [ 11 | {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, 12 | {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, 13 | ] 14 | 15 | [[package]] 16 | name = "black" 17 | version = "20.8b1" 18 | description = "The uncompromising code formatter." 19 | optional = false 20 | python-versions = ">=3.6" 21 | groups = ["dev"] 22 | files = [ 23 | {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, 24 | ] 25 | 26 | [package.dependencies] 27 | appdirs = "*" 28 | click = ">=7.1.2" 29 | mypy-extensions = ">=0.4.3" 30 | pathspec = ">=0.6,<1" 31 | regex = ">=2020.1.8" 32 | toml = ">=0.10.1" 33 | typed-ast = ">=1.4.0" 34 | typing-extensions = ">=3.7.4" 35 | 36 | [package.extras] 37 | colorama = ["colorama (>=0.4.3)"] 38 | d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] 39 | 40 | [[package]] 41 | name = "click" 42 | version = "8.1.7" 43 | description = "Composable command line interface toolkit" 44 | optional = false 45 | python-versions = ">=3.7" 46 | groups = ["dev"] 47 | files = [ 48 | {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, 49 | {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, 50 | ] 51 | 52 | [package.dependencies] 53 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 54 | 55 | [[package]] 56 | name = "colorama" 57 | version = "0.4.6" 58 | description = "Cross-platform colored terminal text." 59 | optional = false 60 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 61 | groups = ["dev"] 62 | markers = "platform_system == \"Windows\" or sys_platform == \"win32\"" 63 | files = [ 64 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 65 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 66 | ] 67 | 68 | [[package]] 69 | name = "exceptiongroup" 70 | version = "1.2.0" 71 | description = "Backport of PEP 654 (exception groups)" 72 | optional = false 73 | python-versions = ">=3.7" 74 | groups = ["dev"] 75 | markers = "python_version < \"3.11\"" 76 | files = [ 77 | {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, 78 | {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, 79 | ] 80 | 81 | [package.extras] 82 | test = ["pytest (>=6)"] 83 | 84 | [[package]] 85 | name = "execnet" 86 | version = "2.0.2" 87 | description = "execnet: rapid multi-Python deployment" 88 | optional = false 89 | python-versions = ">=3.7" 90 | groups = ["dev"] 91 | files = [ 92 | {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, 93 | {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, 94 | ] 95 | 96 | [package.extras] 97 | testing = ["hatch", "pre-commit", "pytest", "tox"] 98 | 99 | [[package]] 100 | name = "flake8" 101 | version = "3.9.2" 102 | description = "the modular source code checker: pep8 pyflakes and co" 103 | optional = false 104 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 105 | groups = ["dev"] 106 | files = [ 107 | {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, 108 | {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, 109 | ] 110 | 111 | [package.dependencies] 112 | mccabe = ">=0.6.0,<0.7.0" 113 | pycodestyle = ">=2.7.0,<2.8.0" 114 | pyflakes = ">=2.3.0,<2.4.0" 115 | 116 | [[package]] 117 | name = "iniconfig" 118 | version = "2.0.0" 119 | description = "brain-dead simple config-ini parsing" 120 | optional = false 121 | python-versions = ">=3.7" 122 | groups = ["dev"] 123 | files = [ 124 | {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, 125 | {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, 126 | ] 127 | 128 | [[package]] 129 | name = "mccabe" 130 | version = "0.6.1" 131 | description = "McCabe checker, plugin for flake8" 132 | optional = false 133 | python-versions = "*" 134 | groups = ["dev"] 135 | files = [ 136 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 137 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 138 | ] 139 | 140 | [[package]] 141 | name = "mypy-extensions" 142 | version = "1.0.0" 143 | description = "Type system extensions for programs checked with the mypy type checker." 144 | optional = false 145 | python-versions = ">=3.5" 146 | groups = ["dev"] 147 | files = [ 148 | {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, 149 | {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, 150 | ] 151 | 152 | [[package]] 153 | name = "packaging" 154 | version = "23.2" 155 | description = "Core utilities for Python packages" 156 | optional = false 157 | python-versions = ">=3.7" 158 | groups = ["dev"] 159 | files = [ 160 | {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, 161 | {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, 162 | ] 163 | 164 | [[package]] 165 | name = "pathspec" 166 | version = "0.12.1" 167 | description = "Utility library for gitignore style pattern matching of file paths." 168 | optional = false 169 | python-versions = ">=3.8" 170 | groups = ["dev"] 171 | files = [ 172 | {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, 173 | {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, 174 | ] 175 | 176 | [[package]] 177 | name = "pluggy" 178 | version = "1.4.0" 179 | description = "plugin and hook calling mechanisms for python" 180 | optional = false 181 | python-versions = ">=3.8" 182 | groups = ["dev"] 183 | files = [ 184 | {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, 185 | {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, 186 | ] 187 | 188 | [package.extras] 189 | dev = ["pre-commit", "tox"] 190 | testing = ["pytest", "pytest-benchmark"] 191 | 192 | [[package]] 193 | name = "plumbum" 194 | version = "1.8.2" 195 | description = "Plumbum: shell combinators library" 196 | optional = false 197 | python-versions = ">=3.6" 198 | groups = ["main", "dev"] 199 | files = [ 200 | {file = "plumbum-1.8.2-py3-none-any.whl", hash = "sha256:3ad9e5f56c6ec98f6f7988f7ea8b52159662ea9e915868d369dbccbfca0e367e"}, 201 | {file = "plumbum-1.8.2.tar.gz", hash = "sha256:9e6dc032f4af952665f32f3206567bc23b7858b1413611afe603a3f8ad9bfd75"}, 202 | ] 203 | 204 | [package.dependencies] 205 | pywin32 = {version = "*", markers = "platform_system == \"Windows\" and platform_python_implementation != \"PyPy\""} 206 | 207 | [package.extras] 208 | dev = ["paramiko", "psutil", "pytest (>=6.0)", "pytest-cov", "pytest-mock", "pytest-timeout"] 209 | docs = ["sphinx (>=4.0.0)", "sphinx-rtd-theme (>=1.0.0)"] 210 | ssh = ["paramiko"] 211 | 212 | [[package]] 213 | name = "py" 214 | version = "1.11.0" 215 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 216 | optional = false 217 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 218 | groups = ["dev"] 219 | files = [ 220 | {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, 221 | {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, 222 | ] 223 | 224 | [[package]] 225 | name = "pycodestyle" 226 | version = "2.7.0" 227 | description = "Python style guide checker" 228 | optional = false 229 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 230 | groups = ["dev"] 231 | files = [ 232 | {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, 233 | {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, 234 | ] 235 | 236 | [[package]] 237 | name = "pycurl" 238 | version = "7.45.6" 239 | description = "PycURL -- A Python Interface To The cURL library" 240 | optional = false 241 | python-versions = ">=3.5" 242 | groups = ["main"] 243 | files = [ 244 | {file = "pycurl-7.45.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c31b390f1e2cd4525828f1bb78c1f825c0aab5d1588228ed71b22c4784bdb593"}, 245 | {file = "pycurl-7.45.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:942b352b69184cb26920db48e0c5cb95af39874b57dbe27318e60f1e68564e37"}, 246 | {file = "pycurl-7.45.6-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:3441ee77e830267aa6e2bb43b29fd5f8a6bd6122010c76a6f0bf84462e9ea9c7"}, 247 | {file = "pycurl-7.45.6-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:2a21e13278d7553a04b421676c458449f6c10509bebf04993f35154b06ee2b20"}, 248 | {file = "pycurl-7.45.6-cp310-cp310-win32.whl", hash = "sha256:d0b5501d527901369aba307354530050f56cd102410f2a3bacd192dc12c645e3"}, 249 | {file = "pycurl-7.45.6-cp310-cp310-win_amd64.whl", hash = "sha256:abe1b204a2f96f2eebeaf93411f03505b46d151ef6d9d89326e6dece7b3a008a"}, 250 | {file = "pycurl-7.45.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6f57ad26d6ab390391ad5030790e3f1a831c1ee54ad3bf969eb378f5957eeb0a"}, 251 | {file = "pycurl-7.45.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c6fd295f03c928da33a00f56c91765195155d2ac6f12878f6e467830b5dce5f5"}, 252 | {file = "pycurl-7.45.6-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:334721ce1ccd71ff8e405470768b3d221b4393570ccc493fcbdbef4cd62e91ed"}, 253 | {file = "pycurl-7.45.6-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:0cd6b7794268c17f3c660162ed6381769ce0ad260331ef49191418dfc3a2d61a"}, 254 | {file = "pycurl-7.45.6-cp311-cp311-win32.whl", hash = "sha256:357ea634395310085b9d5116226ac5ec218a6ceebf367c2451ebc8d63a6e9939"}, 255 | {file = "pycurl-7.45.6-cp311-cp311-win_amd64.whl", hash = "sha256:878ae64484db18f8f10ba99bffc83fefb4fe8f5686448754f93ec32fa4e4ee93"}, 256 | {file = "pycurl-7.45.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c872d4074360964697c39c1544fe8c91bfecbff27c1cdda1fee5498e5fdadcda"}, 257 | {file = "pycurl-7.45.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56d1197eadd5774582b259cde4364357da71542758d8e917f91cc6ed7ed5b262"}, 258 | {file = "pycurl-7.45.6-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:8a99e56d2575aa74c48c0cd08852a65d5fc952798f76a34236256d5589bf5aa0"}, 259 | {file = "pycurl-7.45.6-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c04230b9e9cfdca9cf3eb09a0bec6cf2f084640f1f1ca1929cca51411af85de2"}, 260 | {file = "pycurl-7.45.6-cp312-cp312-win32.whl", hash = "sha256:ae893144b82d72d95c932ebdeb81fc7e9fde758e5ecd5dd10ad5b67f34a8b8ee"}, 261 | {file = "pycurl-7.45.6-cp312-cp312-win_amd64.whl", hash = "sha256:56f841b6f2f7a8b2d3051b9ceebd478599dbea3c8d1de8fb9333c895d0c1eea5"}, 262 | {file = "pycurl-7.45.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7c09b7180799af70fc1d4eed580cfb1b9f34fda9081f73a3e3bc9a0e5a4c0e9b"}, 263 | {file = "pycurl-7.45.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:361bf94b2a057c7290f9ab84e935793ca515121fc012f4b6bef6c3b5e4ea4397"}, 264 | {file = "pycurl-7.45.6-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:bb9eff0c7794af972da769a887c87729f1bcd8869297b1c01a2732febbb75876"}, 265 | {file = "pycurl-7.45.6-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:26839d43dc7fff6b80e0067f185cc1d0e9be2ae6e2e2361ae8488cead5901c04"}, 266 | {file = "pycurl-7.45.6-cp313-cp313-win32.whl", hash = "sha256:a721c2696a71b1aa5ecf82e6d0ade64bc7211b7317f1c9c66e82f82e2264d8b4"}, 267 | {file = "pycurl-7.45.6-cp313-cp313-win_amd64.whl", hash = "sha256:f0198ebcda8686b3a0c66d490a687fa5fd466f8ecc2f20a0ed0931579538ae3d"}, 268 | {file = "pycurl-7.45.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a554a2813d415a7bb9a996a6298f3829f57e987635dcab9f1197b2dccd0ab3b2"}, 269 | {file = "pycurl-7.45.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9f721e3394e5bd7079802ec1819b19c5be4842012268cc45afcb3884efb31cf0"}, 270 | {file = "pycurl-7.45.6-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:81005c0f681d31d5af694d1d3c18bbf1bed0bc8b2bb10fb7388cb1378ba9bd6a"}, 271 | {file = "pycurl-7.45.6-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:3fc0b505c37c7c54d88ced27e1d9e3241130987c24bf1611d9bbd9a3e499e07c"}, 272 | {file = "pycurl-7.45.6-cp39-cp39-win32.whl", hash = "sha256:1309fc0f558a80ca444a3a5b0bdb1572a4d72b195233f0e65413b4d4dd78809b"}, 273 | {file = "pycurl-7.45.6-cp39-cp39-win_amd64.whl", hash = "sha256:2d1a49418b8b4c61f52e06d97b9c16142b425077bd997a123a2ba9ef82553203"}, 274 | {file = "pycurl-7.45.6.tar.gz", hash = "sha256:2b73e66b22719ea48ac08a93fc88e57ef36d46d03cb09d972063c9aa86bb74e6"}, 275 | ] 276 | 277 | [[package]] 278 | name = "pyflakes" 279 | version = "2.3.1" 280 | description = "passive checker of Python programs" 281 | optional = false 282 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 283 | groups = ["dev"] 284 | files = [ 285 | {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, 286 | {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, 287 | ] 288 | 289 | [[package]] 290 | name = "pytest" 291 | version = "8.0.0" 292 | description = "pytest: simple powerful testing with Python" 293 | optional = false 294 | python-versions = ">=3.8" 295 | groups = ["dev"] 296 | files = [ 297 | {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, 298 | {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, 299 | ] 300 | 301 | [package.dependencies] 302 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 303 | exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} 304 | iniconfig = "*" 305 | packaging = "*" 306 | pluggy = ">=1.3.0,<2.0" 307 | tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} 308 | 309 | [package.extras] 310 | testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] 311 | 312 | [[package]] 313 | name = "pytest-forked" 314 | version = "1.6.0" 315 | description = "run tests in isolated forked subprocesses" 316 | optional = false 317 | python-versions = ">=3.7" 318 | groups = ["dev"] 319 | files = [ 320 | {file = "pytest-forked-1.6.0.tar.gz", hash = "sha256:4dafd46a9a600f65d822b8f605133ecf5b3e1941ebb3588e943b4e3eb71a5a3f"}, 321 | {file = "pytest_forked-1.6.0-py3-none-any.whl", hash = "sha256:810958f66a91afb1a1e2ae83089d8dc1cd2437ac96b12963042fbb9fb4d16af0"}, 322 | ] 323 | 324 | [package.dependencies] 325 | py = "*" 326 | pytest = ">=3.10" 327 | 328 | [[package]] 329 | name = "pytest-timeout" 330 | version = "2.2.0" 331 | description = "pytest plugin to abort hanging tests" 332 | optional = false 333 | python-versions = ">=3.7" 334 | groups = ["dev"] 335 | files = [ 336 | {file = "pytest-timeout-2.2.0.tar.gz", hash = "sha256:3b0b95dabf3cb50bac9ef5ca912fa0cfc286526af17afc806824df20c2f72c90"}, 337 | {file = "pytest_timeout-2.2.0-py3-none-any.whl", hash = "sha256:bde531e096466f49398a59f2dde76fa78429a09a12411466f88a07213e220de2"}, 338 | ] 339 | 340 | [package.dependencies] 341 | pytest = ">=5.0.0" 342 | 343 | [[package]] 344 | name = "pytest-xdist" 345 | version = "2.5.0" 346 | description = "pytest xdist plugin for distributed testing and loop-on-failing modes" 347 | optional = false 348 | python-versions = ">=3.6" 349 | groups = ["dev"] 350 | files = [ 351 | {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, 352 | {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"}, 353 | ] 354 | 355 | [package.dependencies] 356 | execnet = ">=1.1" 357 | pytest = ">=6.2.0" 358 | pytest-forked = "*" 359 | 360 | [package.extras] 361 | psutil = ["psutil (>=3.0)"] 362 | setproctitle = ["setproctitle"] 363 | testing = ["filelock"] 364 | 365 | [[package]] 366 | name = "pywin32" 367 | version = "306" 368 | description = "Python for Window Extensions" 369 | optional = false 370 | python-versions = "*" 371 | groups = ["main", "dev"] 372 | markers = "platform_system == \"Windows\" and platform_python_implementation != \"PyPy\"" 373 | files = [ 374 | {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, 375 | {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, 376 | {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, 377 | {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, 378 | {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, 379 | {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, 380 | {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, 381 | {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, 382 | {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, 383 | {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, 384 | {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, 385 | {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, 386 | {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, 387 | {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, 388 | ] 389 | 390 | [[package]] 391 | name = "regex" 392 | version = "2023.12.25" 393 | description = "Alternative regular expression module, to replace re." 394 | optional = false 395 | python-versions = ">=3.7" 396 | groups = ["dev"] 397 | files = [ 398 | {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, 399 | {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, 400 | {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, 401 | {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, 402 | {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, 403 | {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, 404 | {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, 405 | {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, 406 | {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, 407 | {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, 408 | {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, 409 | {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, 410 | {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, 411 | {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, 412 | {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, 413 | {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, 414 | {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, 415 | {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, 416 | {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, 417 | {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, 418 | {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, 419 | {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, 420 | {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, 421 | {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, 422 | {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, 423 | {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, 424 | {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, 425 | {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, 426 | {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, 427 | {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, 428 | {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, 429 | {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, 430 | {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, 431 | {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, 432 | {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, 433 | {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, 434 | {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, 435 | {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, 436 | {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, 437 | {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, 438 | {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, 439 | {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, 440 | {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, 441 | {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, 442 | {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, 443 | {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, 444 | {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, 445 | {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, 446 | {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, 447 | {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, 448 | {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, 449 | {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, 450 | {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, 451 | {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, 452 | {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, 453 | {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, 454 | {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, 455 | {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, 456 | {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, 457 | {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, 458 | {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, 459 | {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, 460 | {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, 461 | {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, 462 | {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, 463 | {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, 464 | {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, 465 | {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, 466 | {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, 467 | {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, 468 | {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, 469 | {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, 470 | {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, 471 | {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, 472 | {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, 473 | {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, 474 | {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, 475 | {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, 476 | {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, 477 | {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, 478 | {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, 479 | {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, 480 | {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, 481 | {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, 482 | {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, 483 | {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, 484 | {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, 485 | {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, 486 | {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, 487 | {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, 488 | {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, 489 | {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, 490 | {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, 491 | ] 492 | 493 | [[package]] 494 | name = "toml" 495 | version = "0.10.2" 496 | description = "Python Library for Tom's Obvious, Minimal Language" 497 | optional = false 498 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 499 | groups = ["dev"] 500 | files = [ 501 | {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, 502 | {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, 503 | ] 504 | 505 | [[package]] 506 | name = "tomli" 507 | version = "2.0.1" 508 | description = "A lil' TOML parser" 509 | optional = false 510 | python-versions = ">=3.7" 511 | groups = ["dev"] 512 | markers = "python_version < \"3.11\"" 513 | files = [ 514 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 515 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 516 | ] 517 | 518 | [[package]] 519 | name = "typed-ast" 520 | version = "1.5.5" 521 | description = "a fork of Python 2 and 3 ast modules with type comment support" 522 | optional = false 523 | python-versions = ">=3.6" 524 | groups = ["dev"] 525 | files = [ 526 | {file = "typed_ast-1.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bc1efe0ce3ffb74784e06460f01a223ac1f6ab31c6bc0376a21184bf5aabe3b"}, 527 | {file = "typed_ast-1.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f7a8c46a8b333f71abd61d7ab9255440d4a588f34a21f126bbfc95f6049e686"}, 528 | {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597fc66b4162f959ee6a96b978c0435bd63791e31e4f410622d19f1686d5e769"}, 529 | {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d41b7a686ce653e06c2609075d397ebd5b969d821b9797d029fccd71fdec8e04"}, 530 | {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5fe83a9a44c4ce67c796a1b466c270c1272e176603d5e06f6afbc101a572859d"}, 531 | {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d5c0c112a74c0e5db2c75882a0adf3133adedcdbfd8cf7c9d6ed77365ab90a1d"}, 532 | {file = "typed_ast-1.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:e1a976ed4cc2d71bb073e1b2a250892a6e968ff02aa14c1f40eba4f365ffec02"}, 533 | {file = "typed_ast-1.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c631da9710271cb67b08bd3f3813b7af7f4c69c319b75475436fcab8c3d21bee"}, 534 | {file = "typed_ast-1.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b445c2abfecab89a932b20bd8261488d574591173d07827c1eda32c457358b18"}, 535 | {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc95ffaaab2be3b25eb938779e43f513e0e538a84dd14a5d844b8f2932593d88"}, 536 | {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61443214d9b4c660dcf4b5307f15c12cb30bdfe9588ce6158f4a005baeb167b2"}, 537 | {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6eb936d107e4d474940469e8ec5b380c9b329b5f08b78282d46baeebd3692dc9"}, 538 | {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e48bf27022897577d8479eaed64701ecaf0467182448bd95759883300ca818c8"}, 539 | {file = "typed_ast-1.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:83509f9324011c9a39faaef0922c6f720f9623afe3fe220b6d0b15638247206b"}, 540 | {file = "typed_ast-1.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:44f214394fc1af23ca6d4e9e744804d890045d1643dd7e8229951e0ef39429b5"}, 541 | {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:118c1ce46ce58fda78503eae14b7664163aa735b620b64b5b725453696f2a35c"}, 542 | {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be4919b808efa61101456e87f2d4c75b228f4e52618621c77f1ddcaae15904fa"}, 543 | {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fc2b8c4e1bc5cd96c1a823a885e6b158f8451cf6f5530e1829390b4d27d0807f"}, 544 | {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:16f7313e0a08c7de57f2998c85e2a69a642e97cb32f87eb65fbfe88381a5e44d"}, 545 | {file = "typed_ast-1.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:2b946ef8c04f77230489f75b4b5a4a6f24c078be4aed241cfabe9cbf4156e7e5"}, 546 | {file = "typed_ast-1.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2188bc33d85951ea4ddad55d2b35598b2709d122c11c75cffd529fbc9965508e"}, 547 | {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0635900d16ae133cab3b26c607586131269f88266954eb04ec31535c9a12ef1e"}, 548 | {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57bfc3cf35a0f2fdf0a88a3044aafaec1d2f24d8ae8cd87c4f58d615fb5b6311"}, 549 | {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2"}, 550 | {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d09d930c2d1d621f717bb217bf1fe2584616febb5138d9b3e8cdd26506c3f6d4"}, 551 | {file = "typed_ast-1.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d40c10326893ecab8a80a53039164a224984339b2c32a6baf55ecbd5b1df6431"}, 552 | {file = "typed_ast-1.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a"}, 553 | {file = "typed_ast-1.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ed4a1a42df8a3dfb6b40c3d2de109e935949f2f66b19703eafade03173f8f437"}, 554 | {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:045f9930a1550d9352464e5149710d56a2aed23a2ffe78946478f7b5416f1ede"}, 555 | {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381eed9c95484ceef5ced626355fdc0765ab51d8553fec08661dce654a935db4"}, 556 | {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bfd39a41c0ef6f31684daff53befddae608f9daf6957140228a08e51f312d7e6"}, 557 | {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8c524eb3024edcc04e288db9541fe1f438f82d281e591c548903d5b77ad1ddd4"}, 558 | {file = "typed_ast-1.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:7f58fabdde8dcbe764cef5e1a7fcb440f2463c1bbbec1cf2a86ca7bc1f95184b"}, 559 | {file = "typed_ast-1.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:042eb665ff6bf020dd2243307d11ed626306b82812aba21836096d229fdc6a10"}, 560 | {file = "typed_ast-1.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622e4a006472b05cf6ef7f9f2636edc51bda670b7bbffa18d26b255269d3d814"}, 561 | {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efebbbf4604ad1283e963e8915daa240cb4bf5067053cf2f0baadc4d4fb51b8"}, 562 | {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0aefdd66f1784c58f65b502b6cf8b121544680456d1cebbd300c2c813899274"}, 563 | {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:48074261a842acf825af1968cd912f6f21357316080ebaca5f19abbb11690c8a"}, 564 | {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:429ae404f69dc94b9361bb62291885894b7c6fb4640d561179548c849f8492ba"}, 565 | {file = "typed_ast-1.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:335f22ccb244da2b5c296e6f96b06ee9bed46526db0de38d2f0e5a6597b81155"}, 566 | {file = "typed_ast-1.5.5.tar.gz", hash = "sha256:94282f7a354f36ef5dbce0ef3467ebf6a258e370ab33d5b40c249fa996e590dd"}, 567 | ] 568 | 569 | [[package]] 570 | name = "typing-extensions" 571 | version = "4.9.0" 572 | description = "Backported and Experimental Type Hints for Python 3.8+" 573 | optional = false 574 | python-versions = ">=3.8" 575 | groups = ["dev"] 576 | files = [ 577 | {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, 578 | {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, 579 | ] 580 | 581 | [metadata] 582 | lock-version = "2.1" 583 | python-versions = "^3.9" 584 | content-hash = "70f7ff1d2fd7c03fc9361ec432322eaf585c817076066b3a3e4991f5b355eedb" 585 | --------------------------------------------------------------------------------