├── .gitattributes ├── .github └── workflows │ └── pr-description-check.yaml ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── build_on_apple_m1.sh ├── build_release.sh ├── build_with_arm.sh ├── poetry.lock ├── pyproject.toml ├── skaffold.yaml ├── src └── debug_toolkit │ ├── __init__.py │ ├── main.py │ ├── payloads │ ├── debugger.py │ ├── hello.py │ ├── memory.py │ ├── set_logging_level.py │ └── stack_trace.py │ └── trampolines │ └── simple.py └── test.yaml /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.github/workflows/pr-description-check.yaml: -------------------------------------------------------------------------------- 1 | name: Check PR Description contains tests performed 2 | 3 | on: 4 | pull_request: 5 | types: [opened, edited, reopened, synchronize] 6 | 7 | jobs: 8 | check-pr-description: 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - name: Check out the code 13 | uses: actions/checkout@v2 14 | 15 | - name: Validate PR description 16 | id: validate 17 | run: | 18 | if [[ ! "${{ github.event.pull_request.body }}" =~ "## Tests performed" ]]; then 19 | echo "PR description does not contain the section 'Tests performed'." 20 | exit 1 21 | fi 22 | 23 | # Extract the "Tests performed" section 24 | tests_performed_section=$(sed -n '/## Tests performed/,/##/p' <<< "${{ github.event.pull_request.body }}") 25 | 26 | # Check if there is at least one test description in the "Tests performed" section 27 | if [[ ! "$tests_performed_section" =~ "- " ]]; then 28 | echo "The 'Tests performed' section does not contain a list of tests." 29 | exit 1 30 | fi 31 | 32 | - name: Success message 33 | if: success() 34 | run: echo "PR description contains the 'Tests performed' section with a list of tests." 35 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .nox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *.cover 48 | .hypothesis/ 49 | .pytest_cache/ 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Django stuff: 56 | *.log 57 | local_settings.py 58 | db.sqlite3 59 | 60 | # Flask stuff: 61 | instance/ 62 | .webassets-cache 63 | 64 | # Scrapy stuff: 65 | .scrapy 66 | 67 | # Sphinx documentation 68 | docs/_build/ 69 | 70 | # PyBuilder 71 | target/ 72 | 73 | # Jupyter Notebook 74 | .ipynb_checkpoints 75 | 76 | # IPython 77 | profile_default/ 78 | ipython_config.py 79 | 80 | # pyenv 81 | .python-version 82 | 83 | # celery beat schedule file 84 | celerybeat-schedule 85 | 86 | # SageMath parsed files 87 | *.sage.py 88 | 89 | # Environments 90 | .env 91 | .venv 92 | env/ 93 | venv/ 94 | ENV/ 95 | env.bak/ 96 | venv.bak/ 97 | 98 | # Spyder project settings 99 | .spyderproject 100 | .spyproject 101 | 102 | # Rope project settings 103 | .ropeproject 104 | 105 | # mkdocs documentation 106 | /site 107 | 108 | # mypy 109 | .mypy_cache/ 110 | .dmypy.json 111 | dmypy.json 112 | 113 | # Pyre type checker 114 | .pyre/ 115 | 116 | # PyCharm 117 | .idea/ 118 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12-slim as builder 2 | 3 | ENV LANG=C.UTF-8 4 | ENV PYTHONDONTWRITEBYTECODE=1 5 | ENV PYTHONUNBUFFERED=1 6 | ENV PATH="/app/venv/bin:$PATH" 7 | 8 | WORKDIR /app 9 | 10 | RUN apt-get update \ 11 | && apt-get install -y --no-install-recommends procps gdb \ 12 | && apt-get install -y gcc\ 13 | && dpkg --add-architecture arm64 \ 14 | && apt-get purge -y --auto-remove \ 15 | && rm -rf /var/lib/apt/lists/* 16 | RUN pip install poetry==1.6.1 17 | 18 | COPY poetry.lock pyproject.toml /app/ 19 | 20 | COPY src /app/src 21 | 22 | RUN python -m venv /app/venv && \ 23 | . /app/venv/bin/activate && \ 24 | poetry config virtualenvs.create false && \ 25 | poetry install --no-dev 26 | 27 | FROM python:3.12-slim 28 | 29 | RUN apt-get update \ 30 | && apt-get install -y --no-install-recommends procps \ 31 | && dpkg --add-architecture arm64 \ 32 | && apt-get purge -y --auto-remove \ 33 | && rm -rf /var/lib/apt/lists/* 34 | 35 | 36 | ENV PYTHONUNBUFFERED=1 37 | ENV PATH="/venv/bin:$PATH" 38 | ENV PYTHONPATH=$PYTHONPATH:.:/app 39 | COPY --from=builder /app/venv /venv 40 | COPY --from=builder /app /app 41 | 42 | RUN echo '#!/bin/bash\npython /app/src/debug_toolkit/main.py $@' > /usr/bin/debug-toolkit && \ 43 | chmod +x /usr/bin/debug-toolkit 44 | 45 | # -u disables stdout buffering https://stackoverflow.com/questions/107705/disable-output-buffering 46 | # TODO: use -u in developer builds only 47 | CMD exec /bin/bash -c "trap : TERM INT; sleep infinity & wait" 48 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Robusta 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TL;DR 2 | A modern code-injection framework for Python + useful utilities. 3 | 4 | This is like [Pyrasite](https://github.com/lmacken/pyrasite) but without the bugs and Kubernetes-aware. 5 | 6 | This powers many [Robusta](http://robusta.dev/) features. You should probably use Robusta instead of using this directly. 7 | 8 | # Overview 9 | This repo contains source code for the docker container that powers the following [Robusta](http://robusta.dev/) features: 10 | 11 | 1. Inject code into Python apps 12 | 2. [Attach a VSCode debugger to any Python application running on Kubernetes](https://docs.robusta.dev/master/catalog/actions/python-troubleshooting.html#python-debugger) - this works by injecting [debugpy](https://github.com/microsoft/debugpy/) 13 | 3. [Profile cpu and memory usage of Python apps](https://docs.robusta.dev/master/catalog/actions/python-troubleshooting.html#python-profiler) - this works by wrapping PySpy (for CPU) and by injecting the `tracemalloc` library into the target process (for memory) 14 | 4. List processes in any pod (not just Python) 15 | 16 | Essentially, it is two things: 17 | 1. A [Pyrasite](https://github.com/lmacken/pyrasite) replacement that fixes deadlocks and other issues. 18 | 2. A Docker container containing that Pyrasite replacement, which is used by Robusta to troubleshoot and debug containers 19 | 20 | # Adding new python injection payloads 21 | 1. Add a new payload in src/debug_toolkit/payloads 22 | 1. Make sure ALL of your code is inside the `entrypoint` function, even imports. Errors are not handled or reported for code outside of the entrypoint. 23 | 2. Add a wrapper command in src/debug_toolkit/main.py 24 | 25 | # Releasing a new version 26 | 27 | 1. Bump the version in pyproject.toml 28 | 2. Run the following, replacing "v4" with the tag of the new release. 29 | 30 | ``` 31 | skaffold build --tag v4 32 | ``` 33 | 34 | That is all. We're not pushing versions to pypi right now. 35 | -------------------------------------------------------------------------------- /build_on_apple_m1.sh: -------------------------------------------------------------------------------- 1 | docker buildx build \ 2 | --platform linux/amd64 \ 3 | --tag $IMAGE \ 4 | --push \ 5 | $BUILD_CONTEXT -------------------------------------------------------------------------------- /build_release.sh: -------------------------------------------------------------------------------- 1 | docker buildx build \ 2 | --build-arg BUILDKIT_INLINE_CACHE=1 \ 3 | --platform linux/arm64,linux/amd64 \ 4 | --tag robustadev/debug-toolkit:${TAG} \ 5 | --tag us-central1-docker.pkg.dev/genuine-flight-317411/devel/debug-toolkit:${TAG} \ 6 | --push \ 7 | . -------------------------------------------------------------------------------- /build_with_arm.sh: -------------------------------------------------------------------------------- 1 | docker buildx build \ 2 | --platform linux/arm64,linux/amd64 \ 3 | --tag $IMAGE \ 4 | --push \ 5 | $BUILD_CONTEXT -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "click" 5 | version = "8.1.8" 6 | description = "Composable command line interface toolkit" 7 | optional = false 8 | python-versions = ">=3.7" 9 | files = [ 10 | {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, 11 | {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, 12 | ] 13 | 14 | [package.dependencies] 15 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 16 | 17 | [[package]] 18 | name = "colorama" 19 | version = "0.4.6" 20 | description = "Cross-platform colored terminal text." 21 | optional = false 22 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 23 | files = [ 24 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 25 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 26 | ] 27 | 28 | [[package]] 29 | name = "psutil" 30 | version = "5.9.8" 31 | description = "Cross-platform lib for process and system monitoring in Python." 32 | optional = false 33 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" 34 | files = [ 35 | {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, 36 | {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, 37 | {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, 38 | {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, 39 | {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, 40 | {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, 41 | {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, 42 | {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, 43 | {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, 44 | {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, 45 | {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, 46 | {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, 47 | {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, 48 | {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, 49 | {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, 50 | {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, 51 | ] 52 | 53 | [package.extras] 54 | test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] 55 | 56 | [[package]] 57 | name = "py-spy" 58 | version = "0.3.14" 59 | description = "Sampling profiler for Python programs" 60 | optional = false 61 | python-versions = "*" 62 | files = [ 63 | {file = "py_spy-0.3.14-py2.py3-none-macosx_10_7_x86_64.whl", hash = "sha256:5b342cc5feb8d160d57a7ff308de153f6be68dcf506ad02b4d67065f2bae7f45"}, 64 | {file = "py_spy-0.3.14-py2.py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fe7efe6c91f723442259d428bf1f9ddb9c1679828866b353d539345ca40d9dd2"}, 65 | {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590905447241d789d9de36cff9f52067b6f18d8b5e9fb399242041568d414461"}, 66 | {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd6211fe7f587b3532ba9d300784326d9a6f2b890af7bf6fff21a029ebbc812b"}, 67 | {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3e8e48032e71c94c3dd51694c39e762e4bbfec250df5bf514adcdd64e79371e0"}, 68 | {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f59b0b52e56ba9566305236375e6fc68888261d0d36b5addbe3cf85affbefc0e"}, 69 | {file = "py_spy-0.3.14-py2.py3-none-win_amd64.whl", hash = "sha256:8f5b311d09f3a8e33dbd0d44fc6e37b715e8e0c7efefafcda8bfd63b31ab5a31"}, 70 | ] 71 | 72 | [[package]] 73 | name = "pydantic" 74 | version = "1.10.13" 75 | description = "Data validation and settings management using python type hints" 76 | optional = false 77 | python-versions = ">=3.7" 78 | files = [ 79 | {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, 80 | {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, 81 | {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, 82 | {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, 83 | {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, 84 | {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, 85 | {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, 86 | {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, 87 | {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, 88 | {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, 89 | {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, 90 | {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, 91 | {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, 92 | {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, 93 | {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, 94 | {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, 95 | {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, 96 | {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, 97 | {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, 98 | {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, 99 | {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, 100 | {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, 101 | {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, 102 | {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, 103 | {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, 104 | {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, 105 | {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, 106 | {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, 107 | {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, 108 | {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, 109 | {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, 110 | {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, 111 | {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, 112 | {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, 113 | {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, 114 | {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, 115 | ] 116 | 117 | [package.dependencies] 118 | typing-extensions = ">=4.2.0" 119 | 120 | [package.extras] 121 | dotenv = ["python-dotenv (>=0.10.4)"] 122 | email = ["email-validator (>=1.0.3)"] 123 | 124 | [[package]] 125 | name = "typer" 126 | version = "0.7.0" 127 | description = "Typer, build great CLIs. Easy to code. Based on Python type hints." 128 | optional = false 129 | python-versions = ">=3.6" 130 | files = [ 131 | {file = "typer-0.7.0-py3-none-any.whl", hash = "sha256:b5e704f4e48ec263de1c0b3a2387cd405a13767d2f907f44c1a08cbad96f606d"}, 132 | {file = "typer-0.7.0.tar.gz", hash = "sha256:ff797846578a9f2a201b53442aedeb543319466870fbe1c701eab66dd7681165"}, 133 | ] 134 | 135 | [package.dependencies] 136 | click = ">=7.1.1,<9.0.0" 137 | 138 | [package.extras] 139 | all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] 140 | dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] 141 | doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] 142 | test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] 143 | 144 | [[package]] 145 | name = "typing-extensions" 146 | version = "4.6.0" 147 | description = "Backported and Experimental Type Hints for Python 3.7+" 148 | optional = false 149 | python-versions = ">=3.7" 150 | files = [ 151 | {file = "typing_extensions-4.6.0-py3-none-any.whl", hash = "sha256:6ad00b63f849b7dcc313b70b6b304ed67b2b2963b3098a33efe18056b1a9a223"}, 152 | {file = "typing_extensions-4.6.0.tar.gz", hash = "sha256:ff6b238610c747e44c268aa4bb23c8c735d665a63726df3f9431ce707f2aa768"}, 153 | ] 154 | 155 | [metadata] 156 | lock-version = "2.0" 157 | python-versions = "^3.8" 158 | content-hash = "4a9cc9befbe393988d2fc175cf2c2c4bddb533f586ce6f07f2c6149abce56e8b" 159 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "debug_toolkit" 3 | version = "4.3.0" 4 | description = "" 5 | authors = ["Natan Yellin "] 6 | packages = [ 7 | { include = "debug_toolkit", from = "src" }, 8 | ] 9 | 10 | [tool.poetry.scripts] 11 | debug-toolkit = "debug_toolkit.main:app" 12 | 13 | [tool.poetry.dependencies] 14 | python = "^3.8" 15 | typer = "^0.7.0" 16 | psutil = "^5.8.0" 17 | py-spy = "^0.3.10" 18 | pydantic = "1.10.13" 19 | typing-extensions = "4.6.0" 20 | 21 | [tool.poetry.dev-dependencies] 22 | 23 | [build-system] 24 | requires = ["poetry-core>=1.0.0"] 25 | build-backend = "poetry.core.masonry.api" 26 | -------------------------------------------------------------------------------- /skaffold.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: skaffold/v2beta1 2 | kind: Config 3 | metadata: 4 | name: python-tools 5 | build: 6 | artifacts: 7 | - image: us-central1-docker.pkg.dev/genuine-flight-317411/devel/debug-toolkit 8 | docker: 9 | dockerfile: Dockerfile 10 | local: 11 | push: true 12 | concurrency: 0 13 | deploy: 14 | kubectl: 15 | manifests: 16 | - test.yaml 17 | 18 | 19 | profiles: 20 | - name: apple-m1-dev 21 | build: 22 | artifacts: 23 | - image: us-central1-docker.pkg.dev/genuine-flight-317411/devel/debug-toolkit 24 | context: . 25 | custom: 26 | buildCommand: ./build_on_apple_m1.sh 27 | local: 28 | push: true 29 | concurrency: 0 30 | 31 | - name: arm 32 | build: 33 | artifacts: 34 | - image: us-central1-docker.pkg.dev/genuine-flight-317411/devel/debug-toolkit 35 | context: . 36 | custom: 37 | buildCommand: ./build_with_arm.sh 38 | local: 39 | push: true 40 | concurrency: 0 -------------------------------------------------------------------------------- /src/debug_toolkit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/robusta-dev/debug-toolkit/c8028a4b998cc6aa7b322c15e15127df3ea54d32/src/debug_toolkit/__init__.py -------------------------------------------------------------------------------- /src/debug_toolkit/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/local/bin/python3 2 | import os 3 | import re 4 | import secrets 5 | import subprocess 6 | import tempfile 7 | import textwrap 8 | import time 9 | import pkgutil 10 | from pathlib import Path 11 | from typing import List, Optional 12 | from enum import Enum 13 | 14 | import psutil 15 | import typer 16 | from pydantic import BaseModel 17 | 18 | app = typer.Typer() 19 | 20 | 21 | class Process(BaseModel): 22 | pid: int 23 | exe: str 24 | cmdline: List[str] 25 | 26 | 27 | class ProcessList(BaseModel): 28 | processes: List[Process] 29 | 30 | 31 | def is_process_in_pod(pid: int, pod_id: str, container_ids: List[str]) -> bool: 32 | # see https://man7.org/linux/man-pages/man7/cgroups.7.html 33 | # and https://github.com/elastic/apm-agent-python/blob/main/elasticapm/utils/cgroup.py 34 | all_ids = [pod_id.lower(), *container_ids] 35 | all_ids = [id.lower() for id in all_ids] 36 | 37 | try: 38 | path = f"/proc/{pid}/cgroup" 39 | with open(path, "r") as f: 40 | content = f.read().lower() 41 | if any(id in content for id in all_ids): 42 | return True 43 | except Exception as e: 44 | print("exception:", e) 45 | return False 46 | 47 | 48 | def get_pod_processes(pod_id: str, container_ids: Optional[List[str]]) -> ProcessList: 49 | processes = [] 50 | if container_ids is None: 51 | container_ids = [] 52 | 53 | for pid in psutil.pids(): 54 | if is_process_in_pod(pid, pod_id, container_ids): 55 | proc = psutil.Process(pid) 56 | processes.append(Process(pid=pid, exe=proc.exe(), cmdline=proc.cmdline())) 57 | return ProcessList(processes=processes) 58 | 59 | 60 | @app.command() 61 | def pod_ps(pod_id: str, container_ids: Optional[List[str]] = typer.Argument(None)): 62 | typer.echo(get_pod_processes(pod_id, container_ids).json()) 63 | 64 | 65 | @app.command() 66 | def find_pid(pod_id: str, cmdline: str, exe: str, container_ids: Optional[List[str]] = typer.Argument(None)): 67 | for proc in get_pod_processes(pod_id, container_ids).processes: 68 | if cmdline in " ".join(proc.cmdline) and exe in proc.exe: 69 | typer.echo(proc.pid) 70 | 71 | 72 | def do_injection(pid, python_code, verbose): 73 | python_code = ( 74 | python_code.replace("\\", "\\\\").replace('"', '\\"').replace("\n", "\\n") 75 | ) 76 | 77 | batch_file = tempfile.NamedTemporaryFile(delete=False) 78 | # see https://github.com/microsoft/debugpy/blob/1de552631756b2d32010595c2e605aa491532250/src/debugpy/_vendored/pydevd/pydevd_attach_to_process/add_code_to_python_process.py#L307 79 | batch_file.write( 80 | textwrap.dedent( 81 | f"""\ 82 | set trace-commands on 83 | set logging on 84 | set scheduler-locking off 85 | call ((int (*)())PyGILState_Ensure)() 86 | call ((int (*)(const char *))PyRun_SimpleString)("{python_code}") 87 | call ((void (*) (int) )PyGILState_Release)($1) 88 | """ 89 | ).encode() 90 | ) 91 | batch_file.flush() 92 | batch_file.close() 93 | 94 | cmd = f"gdb -p {pid} --batch --command={batch_file.name}" 95 | if verbose: 96 | typer.echo(f"running gdb with cmd {cmd}") 97 | 98 | # we don't properly catch errors like trying to debug a non existent process 99 | # we just wait for them later... fix this by both creating a _start file and by checking stderr / error code 100 | output = subprocess.check_output( 101 | cmd, shell=True, stdin=subprocess.PIPE, stderr=subprocess.STDOUT 102 | ) 103 | 104 | if verbose: 105 | typer.echo(output.decode()) 106 | 107 | 108 | def do_trampoline_injection(pid, payload, verbose, timeout=120): 109 | output_filename = f"{secrets.token_hex(16)}_output.txt" 110 | done_filename = f"{secrets.token_hex(16)}_done.txt" 111 | abs_done_path = f"/proc/{pid}/cwd/{done_filename}" 112 | abs_output_path = f"/proc/{pid}/cwd/{output_filename}" 113 | 114 | trampoline = pkgutil.get_data(__package__, "trampolines/simple.py").decode() 115 | trampoline = trampoline.replace("OUTPUT_PATH_PLACEHOLDER", f"./{output_filename}") 116 | trampoline = trampoline.replace("DONE_PATH_PLACEHOLDER", f"./{done_filename}") 117 | payload = f"{payload}\n\n{trampoline}" 118 | 119 | if verbose: 120 | typer.echo("Code to inject is:") 121 | typer.secho(payload, fg="blue") 122 | 123 | do_injection(pid, payload, verbose) 124 | if verbose: 125 | typer.secho("Done injecting", fg="green") 126 | 127 | for i in range(timeout): 128 | if os.path.exists(abs_done_path): 129 | break 130 | if verbose: 131 | typer.echo(f"waiting for {abs_done_path} to exist") 132 | time.sleep(1) 133 | 134 | if not os.path.exists(abs_done_path): 135 | typer.secho(f"ERROR! Timed out after {timeout} seconds", fg="red") 136 | 137 | with open(abs_done_path, "r") as f: 138 | status = f.read().strip() 139 | if status != "SUCCESS" or verbose: 140 | typer.secho(f"done status is {status}") 141 | 142 | if os.path.exists(abs_output_path): 143 | with open(abs_output_path, "rb") as f: 144 | result = f.read() 145 | typer.echo(result.decode()) 146 | else: 147 | result = None 148 | 149 | if os.path.exists(abs_output_path): 150 | os.remove(abs_output_path) 151 | if os.path.exists(abs_done_path): 152 | os.remove(abs_done_path) 153 | return result 154 | 155 | 156 | @app.command() 157 | def inject_string( 158 | pid: int, 159 | payload: str, 160 | trampoline: bool = False, 161 | trampoline_timeout: int = 120, 162 | verbose: bool = False, 163 | ): 164 | if not trampoline: 165 | return do_injection(pid, payload, verbose) 166 | else: 167 | return do_trampoline_injection(pid, payload, verbose, trampoline_timeout) 168 | 169 | 170 | @app.command() 171 | def inject_file( 172 | pid: int, 173 | payload_path: Path = typer.Argument( 174 | ..., 175 | exists=True, 176 | file_okay=True, 177 | readable=True, 178 | resolve_path=True, 179 | ), 180 | trampoline: bool = False, 181 | trampoline_timeout: int = 120, 182 | verbose: bool = False, 183 | ): 184 | with open(payload_path) as f: 185 | return inject_string(pid, f.read(), trampoline, trampoline_timeout, verbose) 186 | 187 | 188 | @app.command() 189 | def memory(pid: int, seconds: int = 60, verbose: bool = False): 190 | payload = pkgutil.get_data(__package__, "payloads/memory.py").decode() 191 | payload = payload.replace("SECONDS_PLACEHOLDER", str(seconds)) 192 | timeout_seconds = int(seconds * 1.1 + 10) 193 | inject_string(pid, payload, trampoline=True, trampoline_timeout=timeout_seconds, verbose=verbose) 194 | 195 | 196 | @app.command() 197 | def stack_trace(pid: int, all_threads: bool = True, amount: int = 1, sleep_duration_s: int = 1, verbose: bool = False): 198 | if amount < 1 or sleep_duration_s < 0: 199 | typer.secho(f"ERROR! amount must be greater than 1 and sleep must be greater than 0", fg="red") 200 | return 201 | payload = pkgutil.get_data(__package__, "payloads/stack_trace.py").decode() 202 | payload = payload.replace("ALL_THREADS_PLACEHOLDER", str(all_threads)) 203 | payload = payload.replace("AMOUNT_PLACEHOLDER", str(amount)) 204 | payload = payload.replace("SLEEP_DURATION_S_PLACEHOLDER", str(sleep_duration_s)) 205 | inject_string(pid, payload, trampoline=True, trampoline_timeout=amount*(sleep_duration_s+10), verbose=verbose) 206 | 207 | 208 | @app.command() 209 | def debugger(pid: int, port: int = 5678, verbose: bool = False): 210 | payload = pkgutil.get_data(__package__, "payloads/debugger.py").decode() 211 | payload = payload.replace("LISTENING_PORT_PLACEHOLDER", str(port)) 212 | timeout_seconds = 120 # might take time for payload to install debugpy 213 | inject_string(pid, payload, trampoline=True, trampoline_timeout=timeout_seconds, verbose=verbose) 214 | 215 | 216 | class LoggingLevel (Enum): 217 | DEBUG = "DEBUG" 218 | INFO = "INFO" 219 | WARNING = "WARNING" 220 | ERROR = "ERROR" 221 | CRITICAL = "CRITICAL" 222 | 223 | @app.command() 224 | def set_logging_level(pid: int, level: LoggingLevel, verbose: bool = False): 225 | payload = pkgutil.get_data(__package__, "payloads/set_logging_level.py").decode() 226 | payload = payload.replace("LOGGING_LEVEL_PLACEHOLDER", level.value) 227 | inject_string(pid, payload, trampoline=True, trampoline_timeout=10, verbose=verbose) 228 | 229 | 230 | if __name__ == "__main__": 231 | app() 232 | -------------------------------------------------------------------------------- /src/debug_toolkit/payloads/debugger.py: -------------------------------------------------------------------------------- 1 | def entrypoint(output_path: str): 2 | import subprocess 3 | import json 4 | import sys 5 | import inspect 6 | 7 | LISTENING_PORT = int(LISTENING_PORT_PLACEHOLDER) 8 | EXISTING_LISTENING_PORT = "__DEBUG_TOOLS_LISTENING_PORT" 9 | 10 | def install(package): 11 | subprocess.check_output(f"pip install {package}", shell=True) 12 | 13 | def debug(): 14 | install("debugpy") 15 | import debugpy 16 | 17 | if hasattr(debugpy, EXISTING_LISTENING_PORT): 18 | return f"Debugger already running on port {getattr(debugpy, EXISTING_LISTENING_PORT)}" 19 | 20 | try: 21 | debugpy.listen(("0.0.0.0", LISTENING_PORT)) 22 | setattr(debugpy, EXISTING_LISTENING_PORT, LISTENING_PORT) 23 | return f"success" 24 | except Exception as e: 25 | return f"Error attaching a debugger. Did you already attach a debugger? If so, ignore this message. Error={e.args[0]}" 26 | 27 | def get_module_paths(): 28 | return {name: inspect.getabsfile(module) for (name, module) in sys.modules.items() 29 | if getattr(module, "__file__", None)} 30 | 31 | message = debug() 32 | 33 | with open(output_path, "w") as output_file: 34 | output_file.write(json.dumps({ 35 | "loaded_modules": get_module_paths(), 36 | "message": message, 37 | })) 38 | -------------------------------------------------------------------------------- /src/debug_toolkit/payloads/hello.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | 4 | def entrypoint(output_path: Path): 5 | with open(output_path, "w") as output_file: 6 | output_file.write("HELLO WORLD") 7 | -------------------------------------------------------------------------------- /src/debug_toolkit/payloads/memory.py: -------------------------------------------------------------------------------- 1 | def entrypoint(output_path: str): 2 | import time 3 | import json 4 | import tracemalloc 5 | 6 | SECONDS = int(SECONDS_PLACEHOLDER) 7 | 8 | limit = 10 9 | tracemalloc.start(15) 10 | base_snapshot = tracemalloc.take_snapshot() 11 | time.sleep(SECONDS) 12 | snapshot = tracemalloc.take_snapshot() 13 | overhead = tracemalloc.get_tracemalloc_memory() 14 | tracemalloc.stop() 15 | 16 | #snapshot = snapshot.filter_traces( 17 | # ( 18 | # tracemalloc.Filter(False, ""), 19 | # #tracemalloc.Filter(False, ""), 20 | # ) 21 | #) 22 | stats = snapshot.compare_to(base_snapshot, "traceback") 23 | top_stats = stats[:limit] 24 | other_stats = stats[limit:] 25 | 26 | data = [{"size": s.size, "count": s.size, "traceback": s.traceback.format()} for s in top_stats] 27 | other_data = {"size": sum(s.size for s in other_stats), "count": sum(s.size for s in other_stats)} 28 | total = sum(s.size for s in stats) 29 | 30 | with open(output_path, "w") as output_file: 31 | output_file.write(json.dumps({"data": data, "other_data": other_data, "total": total, "overhead": overhead})) 32 | -------------------------------------------------------------------------------- /src/debug_toolkit/payloads/set_logging_level.py: -------------------------------------------------------------------------------- 1 | def entrypoint(output_path: str): 2 | import logging 3 | level = "LOGGING_LEVEL_PLACEHOLDER" 4 | logging.getLogger().setLevel(level) 5 | -------------------------------------------------------------------------------- /src/debug_toolkit/payloads/stack_trace.py: -------------------------------------------------------------------------------- 1 | def entrypoint(output_path: str): 2 | import time 3 | import json 4 | import traceback 5 | 6 | ALL_THREADS = bool(ALL_THREADS_PLACEHOLDER) 7 | AMOUNT = int(AMOUNT_PLACEHOLDER) 8 | SLEEP_DURATION_S = int(SLEEP_DURATION_S_PLACEHOLDER) 9 | 10 | thread_stack_dumps=[] 11 | for _ in range(AMOUNT): 12 | try: 13 | stack_trace = get_traceback(ALL_THREADS) 14 | formatted = format_stack_trace(stack_trace) 15 | thread_stack_dumps.append({"time": time.time(), "status": "success", "trace": formatted}) 16 | time.sleep(SLEEP_DURATION_S) 17 | except Exception as e: 18 | thread_stack_dumps.append({"time": time.time(), "status": "error", "trace": traceback.format_exc(), "error": str(e)}) 19 | 20 | with open(output_path, "w+") as output_file: 21 | json_formatted_str = json.dumps(thread_stack_dumps, indent=4) 22 | output_file.write(json_formatted_str) 23 | 24 | 25 | def format_stack_trace(stack_trace: str): 26 | 27 | import sys 28 | import threading 29 | try: 30 | for thr in threading.enumerate(): 31 | thread_identity = thr._ident 32 | if not thread_identity: 33 | continue 34 | if sys.version_info[0] >= 3 and sys.version_info[1] >= 8: 35 | # _native_id is only from python3.8+ 36 | replacement_str = f"{thr.getName()} tid:{thr._native_id}" 37 | else: 38 | replacement_str = f"{thr.getName()} thread identity:{thread_identity}" 39 | 40 | thread_id_str = '0x{:016x}'.format(thread_identity) 41 | stack_trace = stack_trace.replace(thread_id_str, replacement_str) 42 | except Exception: 43 | # formatting sometimes can fail based on python version 44 | pass 45 | finally: 46 | return stack_trace 47 | 48 | 49 | 50 | def get_traceback(all_threads: bool): 51 | import tempfile 52 | import faulthandler 53 | with tempfile.TemporaryFile() as tmp: 54 | faulthandler.dump_traceback(file=tmp, all_threads=all_threads) 55 | tmp.seek(0) 56 | return tmp.read().decode("utf-8") 57 | 58 | -------------------------------------------------------------------------------- /src/debug_toolkit/trampolines/simple.py: -------------------------------------------------------------------------------- 1 | import threading 2 | 3 | output_path = "OUTPUT_PATH_PLACEHOLDER" 4 | done_path = "DONE_PATH_PLACEHOLDER" 5 | 6 | 7 | def wrapper(): 8 | try: 9 | entrypoint(output_path) 10 | exc = None 11 | except Exception as e: 12 | exc = e 13 | 14 | with open(done_path, "w") as done_file: 15 | if exc is None: 16 | done_file.write("SUCCESS") 17 | else: 18 | done_file.write(f"ERROR: {exc}") 19 | 20 | 21 | thread = threading.Thread(target=wrapper, daemon=True) 22 | thread.start() 23 | -------------------------------------------------------------------------------- /test.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: python-tools 5 | labels: 6 | app: python-tools 7 | spec: 8 | replicas: 1 9 | selector: 10 | matchLabels: 11 | app: python-tools 12 | template: 13 | metadata: 14 | labels: 15 | app: python-tools 16 | spec: 17 | hostPID: true 18 | containers: 19 | - name: python-tools 20 | image: us-central1-docker.pkg.dev/genuine-flight-317411/devel/debug-toolkit 21 | imagePullPolicy: Always 22 | securityContext: 23 | privileged: true 24 | capabilities: 25 | add: 26 | - SYS_PTRACE 27 | --------------------------------------------------------------------------------