├── .env.sample ├── .gitignore ├── .vscode ├── launch.json └── tasks.json ├── .vscodeignore ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── azdebugrelay ├── __init__.py ├── debug_relay.py ├── debugpyex.py └── threads.py ├── images ├── azdebugrelay-icon.png └── debug-relay-diagram.png ├── make.vsix.sh ├── package-lock.json ├── package.json ├── pyproject.toml ├── requirements.txt ├── samples ├── README.md ├── __init__.py ├── azure_ml_advanced │ ├── README.md │ ├── __init__.py │ ├── publish_pipeline.py │ ├── remote_pipeline_demo.py │ └── steps │ │ ├── amldebugutils │ │ ├── __init__.py │ │ └── debugutils.py │ │ ├── mpi │ │ ├── __init__.py │ │ ├── mpi_step.py │ │ └── mpi_step_starter.py │ │ ├── parallel_step.py │ │ └── single_step.py ├── azure_ml_simple │ ├── README.md │ ├── __init__.py │ ├── deploy_and_run.py │ └── steps │ │ └── train.py └── simple_demo │ ├── README.md │ └── remote_server_demo.py ├── tsconfig.json └── vscode-extension └── extension.ts /.env.sample: -------------------------------------------------------------------------------- 1 | # Environment example for the Advanced Azure ML sample (samples/azure_ml_advanced) 2 | 3 | WORKSPACE_NAME = "" 4 | RESOURCE_GROUP = "" 5 | SUBSCRIPTION_ID = "" 6 | TENANT_ID = "" 7 | APP_ID = "" 8 | APP_SECRET = "" 9 | REGION = "" 10 | COMPUTE_NAME = "debugclust" 11 | PIPELINE_NAME = "debug_pipeline" 12 | DEBUG_GLOBAL_AZRELAY_CONNECTION_STRING = "" 13 | DEBUG_GLOBAL_CONNECTION_SECRET_NAME = "debugarglobalsecret" 14 | 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # local AzDebugRelay configuration, with secrets! 2 | azrelay.json 3 | .azrelay.json 4 | 5 | # AML config 6 | config.json 7 | 8 | # TF environment 9 | env_tf/ 10 | 11 | # VS Code settings, secrets may be here 12 | .vscode/settings.json 13 | 14 | # Node modules 15 | node_modules/ 16 | 17 | # VSCode extension builds 18 | out/ 19 | 20 | # Byte-compiled / optimized / DLL files 21 | __pycache__/ 22 | *.py[cod] 23 | *$py.class 24 | 25 | # C extensions 26 | *.so 27 | 28 | # Distribution / packaging 29 | .Python 30 | build/ 31 | develop-eggs/ 32 | dist/ 33 | downloads/ 34 | eggs/ 35 | .eggs/ 36 | lib/ 37 | lib64/ 38 | parts/ 39 | sdist/ 40 | var/ 41 | wheels/ 42 | pip-wheel-metadata/ 43 | share/python-wheels/ 44 | *.egg-info/ 45 | .installed.cfg 46 | *.egg 47 | MANIFEST 48 | 49 | # PyInstaller 50 | # Usually these files are written by a python script from a template 51 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 52 | *.manifest 53 | *.spec 54 | 55 | # Installer logs 56 | pip-log.txt 57 | pip-delete-this-directory.txt 58 | 59 | # Unit test / coverage reports 60 | htmlcov/ 61 | .tox/ 62 | .nox/ 63 | .coverage 64 | .coverage.* 65 | .cache 66 | nosetests.xml 67 | coverage.xml 68 | *.cover 69 | *.py,cover 70 | .hypothesis/ 71 | .pytest_cache/ 72 | 73 | # Translations 74 | *.mo 75 | *.pot 76 | 77 | # Django stuff: 78 | *.log 79 | local_settings.py 80 | db.sqlite3 81 | db.sqlite3-journal 82 | 83 | # Flask stuff: 84 | instance/ 85 | .webassets-cache 86 | 87 | # Scrapy stuff: 88 | .scrapy 89 | 90 | # Sphinx documentation 91 | docs/_build/ 92 | 93 | # PyBuilder 94 | target/ 95 | 96 | # Jupyter Notebook 97 | .ipynb_checkpoints 98 | 99 | # IPython 100 | profile_default/ 101 | ipython_config.py 102 | 103 | # pyenv 104 | .python-version 105 | 106 | # pipenv 107 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 108 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 109 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 110 | # install all needed dependencies. 111 | #Pipfile.lock 112 | 113 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 114 | __pypackages__/ 115 | 116 | # Celery stuff 117 | celerybeat-schedule 118 | celerybeat.pid 119 | 120 | # SageMath parsed files 121 | *.sage.py 122 | 123 | # Environments 124 | .env 125 | .venv 126 | env/ 127 | venv/ 128 | ENV/ 129 | env.bak/ 130 | venv.bak/ 131 | 132 | # Spyder project settings 133 | .spyderproject 134 | .spyproject 135 | 136 | # Rope project settings 137 | .ropeproject 138 | 139 | # mkdocs documentation 140 | /site 141 | 142 | # mypy 143 | .mypy_cache/ 144 | .dmypy.json 145 | dmypy.json 146 | 147 | # Pyre type checker 148 | .pyre/ 149 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "Run Extension", 9 | "type": "extensionHost", 10 | "request": "launch", 11 | "runtimeExecutable": "${execPath}", 12 | "args": ["--extensionDevelopmentPath=${workspaceFolder}"], 13 | "outFiles": ["${workspaceFolder}/out/**/*.js"], 14 | "preLaunchTask": "npm: watch" 15 | }, 16 | { 17 | "name": "Python: Simple Server Example", 18 | "type": "python", 19 | "request": "launch", 20 | "program": "${workspaceFolder}/samples/simple_demo/remote_server_demo.py", 21 | "args": ["--debug", "attach"], 22 | "cwd": "${workspaceFolder}", 23 | "console": "integratedTerminal" 24 | }, 25 | { 26 | "name": "Python: Listen 5678", 27 | "type": "python", 28 | "request": "attach", 29 | "listen": { 30 | "host": "127.0.0.1", 31 | "port": 5678 32 | }, 33 | "pathMappings": [ 34 | { 35 | "localRoot": "${workspaceFolder}", 36 | "remoteRoot": "." 37 | } 38 | ] 39 | }, 40 | { 41 | "name": "Python: Listen 5679", 42 | "type": "python", 43 | "request": "attach", 44 | "listen": { 45 | "host": "127.0.0.1", 46 | "port": 5679 47 | }, 48 | "pathMappings": [ 49 | { 50 | "localRoot": "${workspaceFolder}", 51 | "remoteRoot": "." 52 | } 53 | ] 54 | }, 55 | { 56 | "name": "Python: Listen 5680", 57 | "type": "python", 58 | "request": "attach", 59 | "listen": { 60 | "host": "127.0.0.1", 61 | "port": 5680 62 | }, 63 | "pathMappings": [ 64 | { 65 | "localRoot": "${workspaceFolder}", 66 | "remoteRoot": "." 67 | } 68 | ] 69 | }, 70 | { 71 | "name": "Python: Deploy Azure ML Advanced", 72 | "type": "python", 73 | "request": "launch", 74 | "program": "${workspaceFolder}/samples/azure_ml_advanced/remote_pipeline_demo.py", 75 | "args": [ 76 | "--is-debug", "true", 77 | "--debug-relay-connection-name", "${input:hybridConnName}" 78 | ], 79 | "cwd": "${workspaceFolder}", 80 | "console": "integratedTerminal" 81 | }, 82 | { 83 | "name": "Python: Current File", 84 | "type": "python", 85 | "request": "launch", 86 | "program": "${file}", 87 | "console": "integratedTerminal" 88 | }], 89 | "inputs": [ 90 | { 91 | "id": "hybridConnName", 92 | "type": "promptString", 93 | "description": "Enter Hybrid Connection Name" 94 | } 95 | ], 96 | "compounds": [ 97 | { 98 | "name": "Python: AML Advanced 3 Listeners", 99 | "configurations": [ 100 | "Python: Listen 5678", 101 | "Python: Listen 5679", 102 | "Python: Listen 5680" 103 | ] 104 | } 105 | ] 106 | } 107 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "type": "npm", 6 | "script": "watch", 7 | "problemMatcher": "$tsc-watch", 8 | "isBackground": true, 9 | "presentation": { 10 | "reveal": "never" 11 | }, 12 | "group": { 13 | "kind": "build", 14 | "isDefault": true 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /.vscodeignore: -------------------------------------------------------------------------------- 1 | .git 2 | .gitignore 3 | .vscode 4 | azrelay.json 5 | .azrelay.json 6 | node_modules/ 7 | * 8 | */** 9 | !out/extension.js 10 | !azdebugrelay/*.py 11 | !README.md 12 | !LICENSE 13 | !package.json 14 | !package.nls.json 15 | !README.md 16 | !ThirdPartyNotices.txt 17 | !images/* -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Vlad Kolesnikov 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | init: 2 | pip install -r requirements.txt 3 | 4 | #test: 5 | # add tests -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Azure Debugging Relay for Python 2 | 3 | Azure Debugging Relay is a [Visual Studio Code](https://code.visualstudio.com/) extension and a Python package for distributed remote debugging. It solves a problem of debugging code running in the cloud and on remote devices, 4 | simultaneously across multiple nodes and between different networks. 5 | 6 | * [Azure Debugging Relay extension](https://marketplace.visualstudio.com/items?itemName=VladKolesnikov-vladkol.azure-debug-relay) on Visual Studio Marketplace 7 | * [azure-debug-relay](https://pypi.org/project/azure-debug-relay/) package on PyPI 8 | 9 | Azure Debugging Relay uses [debugpy](https://github.com/microsoft/debugpy) and [Azure Relay](https://docs.microsoft.com/en-us/azure/azure-relay/relay-what-is-it) service to create a debugging tunnel between 2 machines: 10 | 11 | 1. Your local Visual Studio Code debugger in `listen` mode. 12 | 1. Your remote code in `attach` mode. 13 | 14 | Both machines can be isolated behind NAT or virtual networks. 15 | Azure Relay maintains a secure tunnel, just as if these VS Code and a remote process you debug are running in the same `localhost` network. 16 | Remote components can run in any cloud, local network, with or without public internet access -- all they need is to be able to connect to Azure Relay resource. 17 | 18 | ![Azure Relay Debugging Bridge](https://raw.githubusercontent.com/vladkol/azure-debug-relay/main/images/debug-relay-diagram.png) 19 | 20 | The debugging tunnel is handled by **[Azure Relay Bridge](https://github.com/vladkol/azure-relay-bridge)** utility which is downloaded and installed automatically by Azure Debugging Relay. Azure Relay Bridge can maintain secure TCP and UDP tunnels for different purposes. 21 | 22 | > We currently use a private fork of [Azure Relay Bridge](https://github.com/Azure/azure-relay-bridge) repo. 23 | 24 | ## Requirements 25 | 26 | * Python 3.6+ 27 | * debugpy 1.2.1+ 28 | * Visual Studio Code 1.34+ (for using VS Code extension) 29 | 30 | Azure Relay Bridge tool is a .NET Core application, so you may need to install `apt-transport-https` and other .NET Core 3.1 Runtime prerequisites on [Linux](https://docs.microsoft.com/en-us/dotnet/core/install/linux) and [Windows](https://docs.microsoft.com/en-us/dotnet/core/install/windows?tabs=netcore31). 31 | 32 | > You don't have to install .NET Runtime itself - Azure Relay Bridge builds are self-contained. 33 | 34 | ### Supported Operating Systems 35 | 36 | * Ubuntu 18+ 37 | * Debian 10+ 38 | * macOS 10+ 39 | * Windows 10 40 | 41 | ## Installation 42 | 43 | **On the debugger side (usually your dev machine with Visual Studio code)**: 44 | 45 | > Install [Azure Debugging Relay extension](https://marketplace.visualstudio.com/items?itemName=VladKolesnikov-vladkol.azure-debug-relay) from Visual Studio Marketplace. 46 | 47 | **On the server side**: 48 | 49 | > `python3 -m pip install azure-debug-relay` 50 | 51 | ## Usage 52 | 53 | Before you start debugging with Azure Debugging Relay, there are 3 places you configure it: 54 | 55 | 1. **Azure Portal or CLI** where you create an Azure Relay resource and an Azure Hybrid Connection in it. 56 | 1. **Local dev machine** where you run Visual Studio Code, its Python extension, 57 | and Azure Debugging Relay extension with 2 configuration settings. 58 | 1. **Remote machine** where you run the same code files that open locally in VS Code, 59 | with 2 lines that initiate debugging session for a certain request on an execution flow. 60 | 61 | ### In Azure Portal 62 | 63 | 1. [Create Azure Relay resource](https://ms.portal.azure.com/#create/Microsoft.Relay). Better make one in a region closest to your location. 64 | 1. Once created, switch to the resource, and select `Hybrid Connections` option in the vertical panel. 65 | 1. Add a hybrid connection (`+ Hybrid Connection` button), give it a memorable name (e.g. `test` 🙂) - this is your **Hybrid Connection Name**. 66 | 1. Switch to that new hybrid connection, then select `Shared Access Policies` in the vertical panel. 67 | 1. Add a new policy with `Send` and `Listen` permissions. 68 | 1. Once created, copy its `Primary Connection String`, this is your **Connection String**. 69 | 70 | #### **Azure CLI version** 71 | 72 | Choose your name instead of `mydebugrelay1` for an Azure Relay resource, and your custom name for Hybrid Connection instead of `debugrelayhc1`. Same applies to `debugRelayResourceGroup` as resource group. 73 | 74 | ```cmd 75 | az group create --name debugRelayResourceGroup --location westus2 76 | 77 | az relay namespace create --resource-group debugRelayResourceGroup --name mydebugrelay1 --location westus2 78 | 79 | az relay hyco create --resource-group debugRelayResourceGroup --namespace-name mydebugrelay1 --name debugrelayhc1 80 | 81 | az relay hyco authorization-rule create --resource-group debugRelayResourceGroup --namespace-name mydebugrelay1 --hybrid-connection-name debugrelayhc1 --name sendlisten --rights Send Listen 82 | 83 | az relay hyco authorization-rule keys list --resource-group debugRelayResourceGroup --namespace-name mydebugrelay1 --hybrid-connection-name debugrelayhc1 --name sendlisten 84 | ``` 85 | 86 | Last command will show you something like this: 87 | 88 | ```json 89 | { 90 | "keyName": "sendlisten", 91 | "primaryConnectionString": "Endpoint=sb://mydebugrelay1.servicebus.windows.net/;SharedAccessKeyName=sendlisten;SharedAccessKey=REDACTED1;EntityPath=debugrelayhc1", 92 | "primaryKey": "REDACTED1", 93 | "secondaryConnectionString": "Endpoint=sb://mydebugrelay1.servicebus.windows.net/;SharedAccessKeyName=sendlisten;SharedAccessKey=REDACTED2;EntityPath=debugrelayhc1", 94 | "secondaryKey": "REDACTED2" 95 | } 96 | ``` 97 | 98 | Use `primaryConnectionString` or `secondaryConnectionString` value as your **Connection String**. 99 | 100 | **Hybrid Connection Name** would be the one you choose instead of `debugrelayhc1`. 101 | 102 | 103 | ### Remotely with `remote_server_demo.py` or your code 104 | 105 | Remote Server example (in `samples/simple_demo/remote_server_demo.py`) assumes that Azure Relay credentials will are passes via `.azrelay.json` file in the current directory or via environment variables. Therefore, you have 2 options: 106 | 107 | **Option 1**: Create `.azrelay.json` file in your workspace directory root or whatever directory will be "current", 108 | and set 2 variables: 109 | 110 | 1. `AZRELAY_CONNECTION_STRING` to your **Connection String**. 111 | 1. `AZRELAY_CONNECTION_NAME` to your **Hybrid Connection Name**. 112 | 113 | For example: 114 | 115 | ```json 116 | { 117 | "AZRELAY_CONNECTION_STRING": "Endpoint=sb://mydebugrelay1.servicebus.windows.net/;SharedAccessKeyName=sendlisten;SharedAccessKey=REDACTED1;EntityPath=debugrelayhc1", 118 | "AZRELAY_CONNECTION_NAME": "debugrelayhc1" 119 | } 120 | ``` 121 | 122 | Make sure you add `.azrelay.json` to `.gitignore` so won't be committed. 123 | 124 | **Option 2**: You can assign these 2 variables as environment variables: `AZRELAY_CONNECTION_STRING` and `AZRELAY_CONNECTION_NAME` instead. 125 | 126 | ### Prepare local Visual Studio Code 127 | 128 | Use `.azrelay.json` file in the root of your workspace as above or `.vscode/settings.json` with the following settings (actual values are ones you have): 129 | 130 | ```json 131 | { 132 | "azure-debug-relay.azrelay-connection-string": "Endpoint=sb://your-relay.servicebus.windows.net/;SharedAccessKeyName=key_name;SharedAccessKey=REDACTED;EntityPath=test", 133 | 134 | "azure-debug-relay.azrelay-connection-name": "test", 135 | } 136 | ``` 137 | 138 | > Whenever Azure Debugging Relay VS Code extension detects non-empty `azure-debug-relay.hybrid-connection-string` and `azure-debug-relay.hybrid-connection-name` settings (`vscode/settings.json`) or `AZRELAY_CONNECTION_STRING` and `AZRELAY_CONNECTION_NAME` in `.azrelay.json` file, it launches Azure Relay Bridge every time a debugging session with debugpy in `listen` mode is about to begin. If extension settings are not empty and `.azrelay.json` is present, Azure Relay Bridge prefers values from the extension settings (`vscode/settings.json`). 139 | 140 | Visual Studio Code extension ignores `AZRELAY_CONNECTION_STRING` and `AZRELAY_CONNECTION_NAME` environment variables. 141 | 142 | ### Start debugging in Visual Studio Code 143 | 144 | This step must be done on your dev machine in Visual Studio Code before launching the remote code. 145 | 146 | 1. Open `remote_server_demo.py` and put a breakpoint in `do_work()` function. 147 | 1. Makes sure your `.vscode/launch.json` has `Python: Listen 5678` configuration as in this repo's `.vscode/launch.json`. 148 | 1. Start debugging in your local Visual Studio Code in `Python: Listen 5678` configuration. 149 | 150 | Notice how the debugger maps paths on the local and the remote machines. 151 | If your code has a different structure remotely, you may need to provide more sophisticated path mappings. Here is that piece in `.vscode/launch.json`: 152 | 153 | ```json 154 | "pathMappings": [ 155 | { 156 | "localRoot": "${workspaceFolder}", 157 | "remoteRoot": "." 158 | } 159 | ] 160 | ``` 161 | 162 | It tells VS Code that the workspace directory locally is mapped to the "current" directory remotely. 163 | 164 | When the debugger looks goes through a file remotely, it needs to find the corresponding file in your local VS Code workspace. 165 | When debugging `remote_server_demo.py`, the debugger maps `./samples/simple_demo/remote_server_demo.py` remotely to `${workspaceFolder}/samples/simple_demo/remote_server_demo.py` locally. 166 | 167 | ### Launch the example on the remote machine 168 | 169 | 1. Clone the repo. 170 | 1. Start `python3 ./samples/simple_demo/remote_server_demo.py --debug=attach`. Notice that current directory must contain `.azrelay.json` file unless configured with environment variables. 171 | 172 | > Terminal session where you start #2 must have the repo's directory as current directory - for a reason of mapping local and remote directories. 173 | 174 | If everything works as it's supposed to, you will hit a breakpoint in your local Visual Studio Code. 175 | 176 | ## Azure Debugging Relay Python API 177 | 178 | `remote_server_demo.py` shows how you can use Azure Debugging Relay (azure-debug-relay package) with your code. 179 | 180 | **azdebugrelay** module contains DebugRelay class that install and launches Azure Relay Bridge: 181 | 182 | ```python 183 | from azdebugrelay import DebugRelay, DebugMode, debugpy_connect_with_timeout 184 | 185 | access_key_or_connection_string = "AZURE RELAY HYBRID CONNECTION STRING OR ACCESS KEY" 186 | relay_connection_name = "HYBRID CONNECTION NAME" # your Hybrid Connection name 187 | debug_mode = DebugMode.Connect # or DebugMode.WaitForConnection if connecting from another end 188 | hybrid_connection_url = "HYBRID CONNECTION URL" # can be None if access_key_or_connection_string is a connection string 189 | host = "127.0.0.1" # local hostname or ip address the debugger starts on 190 | port = 5678 # any available port that you can use within your machine, may be a list of multiple ports 191 | debugpy_timeout = 15 # 15 seconds for debugpy to connect 192 | 193 | debug_relay = DebugRelay(access_key_or_connection_string, relay_connection_name, debug_mode, hybrid_connection_url, host, port) 194 | debug_relay.open() 195 | 196 | # attach to a remote debugger (usually from remote server code) with debug_mode = DebugMode.Connect 197 | debugpy_connect_with_timeout(host, port, debugpy_timeout) # use instead of debugpy.connect 198 | # if debug_mode = DebugMode.WaitForConnection, we are going to listen instead 199 | # debugpy.listen((host, port)) 200 | # if debug_mode = DebugMode.WaitForConnection, you can start DebugRelay on multiple ports (ports parameter is a list) 201 | # debugpy.listen must be called with each of these ports 202 | 203 | # Debug, debug, debug 204 | # ... 205 | # ... 206 | 207 | debug_relay.close() 208 | ``` 209 | 210 | * `access_key_or_connection_string` - SAS Policy key or Connection String for Azure Relay Hybrid Connection. Must have `Send` and `Listen` permissions 211 | * `relay_connection_name` - name of the Hybrid Connection 212 | * `debug_mode` - debug connection mode. `DebugMode.WaitForConnection` when starting in listening mode, `DebugMode.Connect` for attaching to a remote debugger. 213 | * `hybrid_connection_url` - Hybrid Connection URL. Required when access_key_or_connection_string as an access key, otherwise is ignored and may be None. 214 | * `host` - Local hostname or ip address the debugger starts on, `127.0.0.1` by default 215 | * `port` - debugging port, `5678` by default 216 | 217 | > We added `debugpy_connect_with_timeout` method on top of **debugpy.connect()**. 218 | It accepts `connect_timeout_seconds` parameter - how long it should wait for `debugpy.connect()` to connect. 219 | If the connection is not successfully made within the timeout, 220 | the debugging session aborts, and that can be handled in your code: 221 | `debugpy_connect_with_timeout()` returns `True` if the connection was successful, and `False` otherwise. 222 | 223 | Notice that DebugRelay accepts multiple ports to work with (**`ports` parameter is a list**). 224 | That's because Azure Relay Bridge support forwarding on multiple ports. 225 | This feature is primarily used by DebugRelay internally 226 | for [Simultaneous distributed debugging](#simultaneous-distributed-debugging). 227 | 228 | ### Azure Machine Learning samples 229 | 230 | **Simple Azure ML sample** is located in `samples/azure_ml_simple` directory. 231 | 232 | It has 2 components: 233 | 234 | 1. `deploy_and_run.py` script that deploys and launches an Azure ML pipeline with a single step. 235 | 2. `steps/train.py` script which contains that simple step. 236 | 237 | Look at the [sample's readme file](samples/azure_ml_simple/README.md). 238 | 239 | **Advanced Azure Machine Learning sample** is located in `samples/azure_ml_advanced`, and demonstrates a complex debugging scenario with parallel steps. 240 | 241 | Look at the [advanced sample's readme file](samples/azure_ml_advanced/README.md). 242 | 243 | ## Simultaneous distributed debugging 244 | 245 | You can debug multiple simultaneously running remote nodes using different ports. 246 | Each execution flow you want to debug must use a separate port - both locally and remotely. 247 | 248 | In Visual Studio code, it is achievable via so called "[compound launch configurations](https://code.visualstudio.com/docs/editor/debugging#_compound-launch-configurations)". 249 | Compound launch configurations combine multiple launch configurations, and therefore start them at the same time. 250 | 251 | Each launch configuration must be a Python `listen` configuration with a unique name and port: 252 | 253 | ```json 254 | { 255 | "name": "Python: Listen 5678", 256 | "type": "python", 257 | "request": "attach", 258 | "listen": { 259 | "host": "127.0.0.1", 260 | "port": 5678 261 | }, 262 | "pathMappings": [ 263 | { 264 | "localRoot": "${workspaceFolder}", 265 | "remoteRoot": "." 266 | } 267 | ] 268 | } 269 | ``` 270 | 271 | You need as many launch configurations as number of simultaneous execution flows or nodes you'd like to debug. 272 | Then you combine them in `.vscode/launch.json` to as a compound: 273 | 274 | ```json 275 | "compounds": [ 276 | { 277 | "name": "Python: AML Advanced 3 Listeners", 278 | "configurations": [ 279 | "Python: Listen 5678", 280 | "Python: Listen 5679", 281 | "Python: Listen 5680" 282 | ] 283 | } 284 | ] 285 | ``` 286 | 287 | Remotely, each node you debug should be aware of the port number it should use. 288 | That port number must be passed to `DebugRelay` object and `debugpy_connect_with_timeout()`. 289 | 290 | ## Troubleshooting 291 | 292 | Why using [Azure Relay Bridge](https://github.com/Azure/azure-relay-bridge) which is a .NET Core application that we have to install and use via `subprocess` calls? 293 | 294 | Reasons: 295 | 296 | 1. Azure Relay has SDKs for .NET, Java, and Node. [No Python SDK or examples](https://github.com/Azure/azure-relay/issues/28#issuecomment-390778193). 297 | 1. Azure Relay Bridge does a lot of things we have to implement otherwise. It is a great tool that can help you connecting different networks for many purposes: for RDP, SSH and other protocols over TCP or UDP. 298 | 299 | A [private fork](https://github.com/vladkol/azure-relay-bridge) we are currently using is only to provide .NET Core 3.1 builds of the most recent code. There is a pending pul-requests: [one](https://github.com/Azure/azure-relay-bridge/pull/22) and [two](https://github.com/Azure/azure-relay-bridge/pull/19). 300 | 301 | ### Known issues 302 | 303 | > **On macOS, there may be a situation when Azure Relay Bridge (`azbridge`) cannot connect when creating a local forwarder** (`-L` option). 304 | 305 | **Reason**: .NET Core wants you to add your Computer Name to `/etc/hosts` file, and make sure the hostname is configured. 306 | 307 | **Workaround**: Make necessary edits of `/etc/hosts` file, and configure hostname: 308 | 309 | 1. Look for your computer's name in `Settings → Sharing`. 310 | 1. Run the following command: `scutil --set HostName "your-computer-name"`. (**replace `your-computer-name` with your computer's name**) 311 | 1. Open `/etc/hosts` in a text editor in *sudo* mode (VS Code can save it later in *sudo* mode). 312 | 1. Add the following line (**replace `your-computer-name` with your computer's name**). Save the file. 313 | 314 | ```text 315 | 127.0.0.1 your-computer-name 316 | ``` 317 | 318 | > **I launched the debugger as described and nothing happened** 319 | 320 | **Reason**: you *probably* didn't put a breakpoint in your VS Code locally. Make sure that breakpoint is in a place that your server process actually runs through. 321 | 322 | > **I do everything right, but nothing works** 323 | 324 | **Reason**: Stop all debugging sessions (if any). Kill all `azbridge` processes locally and remotely. Try again. 325 | 326 | Doesn't help? [File an issue](https://github.com/vladkol/azure-debug-relay/issues/new)! Thank you! 327 | -------------------------------------------------------------------------------- /azdebugrelay/__init__.py: -------------------------------------------------------------------------------- 1 | from .debug_relay import DebugRelay, DebugMode 2 | from .debugpyex import DebugPyEx 3 | 4 | __all__ = [ 5 | "DebugRelay", 6 | "DebugMode", 7 | "debugpy_connect_with_timeout" 8 | ] 9 | 10 | 11 | def debugpy_connect_with_timeout(host, port, connect_timeout_seconds): 12 | return DebugPyEx.connect(str(host), int(port), float(connect_timeout_seconds)) -------------------------------------------------------------------------------- /azdebugrelay/debug_relay.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | import os 3 | import signal 4 | import argparse 5 | import sys 6 | import logging 7 | import subprocess 8 | import stat 9 | import threading 10 | import urllib.request 11 | from pathlib import Path 12 | import ssl 13 | import platform 14 | import tarfile 15 | import time 16 | import typing 17 | import json 18 | import zipfile 19 | 20 | class DebugMode(Enum): 21 | """Debugging mode enum: 22 | waiting for another machine to connect, or connect to another machine 23 | """ 24 | # Start a remote forwarder with Azure Relay Bridge, another side is attaching 25 | WaitForConnection=1, 26 | Listen = WaitForConnection, 27 | # Start a local forwarder with Azure Relay Bridge, another side is listening 28 | Connect = 2, 29 | Attach = Connect 30 | 31 | 32 | class DebugRelay(object): 33 | """Initializes and controls Azure Relay Bridge process. 34 | 35 | Raises: 36 | ValueError: Invalid arguments. 37 | TimeoutError: Azure Relay Bridge took too long to connect. 38 | """ 39 | # Azure Relay Bridge executable name 40 | relay_app_name = "azbridge" 41 | # `~/.azdebugrelay` installation directory 42 | relay_dir_name = ".azdebugrelay" 43 | # current Azure Debugging Relay build 44 | relay_version_name = "0.2.9" 45 | # are we running on Windows? 46 | is_windows = platform.platform().lower().startswith("windows") 47 | 48 | DEFAULT_AZ_RELAY_BRIDGE_UBUNTU_DOWLOAD =\ 49 | "https://github.com/vladkol/azure-relay-bridge/releases/download/v0.2.9/azbridge.azrelay_folder-rel.ubuntu.18.04-x64.tar.gz" 50 | DEFAULT_AZ_RELAY_BRIDGE_MACOS_DOWLOAD =\ 51 | "https://github.com/vladkol/azure-relay-bridge/releases/download/v0.2.9/azbridge.0.2.9-rel.osx-x64.tar.gz" 52 | DEFAULT_AZ_RELAY_BRIDGE_DEBIAN_DOWLOAD =\ 53 | "https://github.com/vladkol/azure-relay-bridge/releases/download/v0.2.9/azbridge.0.2.9-rel.debian.10-x64.tar.gz" 54 | DEFAULT_AZ_RELAY_BRIDGE_WINDOWS_DOWLOAD =\ 55 | "https://github.com/vladkol/azure-relay-bridge/releases/download/v0.2.9/azbridge.0.2.9-rel.win10-x64.zip" 56 | 57 | _installed_az_relay = False 58 | _relay_config_file = None 59 | 60 | 61 | def __init__(self, 62 | access_key_or_connection_string: str, 63 | relay_connection_name: str, 64 | debug_mode: DebugMode = DebugMode.WaitForConnection, 65 | hybrid_connection_url: str = None, 66 | host: str ="127.0.0.1", 67 | ports: typing.Union[str, int, typing.List[str], typing.List[int]] = "5678", 68 | az_relay_connection_wait_time: float = 60, 69 | logger: logging.Logger = logging.root): 70 | """Initializes DebugRelay object. 71 | 72 | Args: 73 | access_key_or_connection_string (str): access key or connection string for Azure Relay Hybrid Connection 74 | relay_connection_name (str): name of Azure Relay Hybrid Connection 75 | debug_mode (DebugMode, optional): Connect or Listen (WaitForConnection). Defaults to DebugMode.WaitForConnection. 76 | hybrid_connection_url (str, optional): optional URL of Hybrid Connection. Defaults to None. 77 | Required when access_key_or_connection_string is an access key. 78 | host (str, optional): Local hostname/address the debugging starts on. Defaults to "127.0.0.1". 79 | ports (typing.List[str], optional): Any available ports that you can use within your machine. 80 | This port will be connected to or exposed by Azure Relay Bridge. Defaults to ["5678"]. 81 | az_relay_connection_wait_time (float, optional): Maximum time to wait for Azure Relay Bridge 82 | to initialize and connect when open() is called with wait_for_connection == True. Defaults to 60. 83 | 84 | Raises: 85 | ValueError: hybrid_connection_url is None while access_key_or_connection_string is not a connection string. 86 | """ 87 | self.logger = logger 88 | 89 | self.relay_subprocess = None 90 | if access_key_or_connection_string.startswith("Endpoint="): 91 | have_connection_string = True 92 | else: 93 | have_connection_string = False 94 | if hybrid_connection_url is None or hybrid_connection_url == "": 95 | if not have_connection_string: 96 | raise ValueError( 97 | "hybrid_connection_url must be specified when "\ 98 | "access_key_or_connection_string is not a connection string.") 99 | 100 | if isinstance(ports, typing.List): 101 | converted_ports = ports 102 | elif isinstance(ports, str): 103 | converted_ports = ports.strip().replace(",", " ").split() 104 | else: 105 | converted_ports = [str(ports)] 106 | 107 | if have_connection_string: 108 | self.auth_option = f"-x \"{access_key_or_connection_string}\"" 109 | else: 110 | self.auth_option = f"-E \"{hybrid_connection_url}\" -k \"{access_key_or_connection_string}\"" 111 | 112 | if debug_mode == DebugMode.WaitForConnection: 113 | self.connection_option = f"-R \"{relay_connection_name}:{host}:{';'.join(converted_ports)}\"" 114 | else: 115 | self.connection_option = f"-L \"{host}:{';'.join(converted_ports)}:{relay_connection_name}\"" 116 | 117 | self.az_relay_connection_wait_time = az_relay_connection_wait_time 118 | 119 | 120 | def __del__(self): 121 | """destructor 122 | """ 123 | self.close() 124 | 125 | 126 | def az_relay_bridge_subprocess(self) -> subprocess.Popen: 127 | """Returns Azure Relay Bridge process subprocess.Popen object. 128 | None if one was not launched 129 | 130 | Returns: 131 | subprocess.Popen: Azure Relay Bridge process 132 | """ 133 | return self.relay_subprocess 134 | 135 | 136 | def open(self, wait_for_connection: bool = True): 137 | """Launches Azure Relay Bridge tool with configured parameters 138 | (as initialized when creating DebugRelay object). 139 | If Azure Relay Bridge is not installed, installs it. 140 | 141 | Args: 142 | wait_for_connection (bool, optional): Wait for Azure Relay Bridge to initialize and connect. Defaults to True. 143 | 144 | Raises: 145 | TimeoutError: Raised when it takes longer than az_relay_connection_wait_time secods 146 | for Azure Relay Bridge to initialize and connect. 147 | """ 148 | # close existing Azure Relay Bridge process (if running) 149 | self.close() 150 | # install Azure Relay Bridge (if not yet) 151 | DebugRelay._install_azure_relay_bridge() 152 | 153 | command = f"{DebugRelay.relay_app_name} {self.connection_option} {self.auth_option}" 154 | if DebugRelay._relay_config_file is not None: 155 | command += f" -f \"{DebugRelay._relay_config_file}\"" 156 | # start Azure Relay Bridge 157 | if not DebugRelay.is_windows: 158 | self.relay_subprocess = subprocess.Popen( 159 | command, 160 | preexec_fn=os.setpgrp, 161 | stdin=None, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, 162 | shell=True, universal_newlines=True, close_fds=True) 163 | else: 164 | self.relay_subprocess = subprocess.Popen( 165 | command, 166 | creationflags = subprocess.CREATE_NEW_PROCESS_GROUP, 167 | stdin=None, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, 168 | shell=True, universal_newlines=True, close_fds=True) 169 | 170 | start = time.perf_counter() 171 | 172 | remote_forward_ready = False 173 | local_forward_ready = False 174 | over_timeout = False 175 | connected = False 176 | 177 | if self.relay_subprocess.poll() is not None: 178 | msg = "Azure Relay Bridge stopped too soon!" 179 | self.logger.critical(msg) 180 | raise RuntimeError(msg) 181 | 182 | # If recognizing Azure Relay Bridge connection status, parse its output. 183 | if wait_for_connection: 184 | # Iterate over Azure Relay Bridge output lines, 185 | # looking for lines with "LocalForwardHostStart," and "RemoteForwardHostStart," to appear. 186 | for line in iter(self.relay_subprocess.stdout.readline, ''): 187 | self.logger.info(line) 188 | print(line) 189 | if self.relay_subprocess.poll() is not None: 190 | self.logger.critical("Azure Relay Bridge stopped.") 191 | break 192 | if wait_for_connection and not connected: 193 | if line.find("LocalForwardHostStart,") != -1: 194 | local_forward_ready = True 195 | elif line.find("RemoteForwardHostStart,") != -1: 196 | remote_forward_ready = True 197 | if remote_forward_ready and local_forward_ready: 198 | connected = True 199 | msg = "Azure Relay Bridge is connected!" 200 | self.logger.info(msg) 201 | break 202 | 203 | time_delta = time.perf_counter() - start 204 | # did take too long to initialize and connect? 205 | if time_delta > self.az_relay_connection_wait_time: 206 | over_timeout = True 207 | break 208 | else: 209 | msg = "Azure Relay Bridge is running!" 210 | self.logger.info(msg) 211 | 212 | # Handle over-timeout status 213 | if over_timeout: 214 | msg = f"Azure Relay Bridge took too long to connect." 215 | self.logger.critical(msg) 216 | self.close() 217 | raise TimeoutError(msg) 218 | elif self.relay_subprocess.poll() is None: 219 | threading.Thread(target=self._handle_output, daemon=True).start() 220 | else: 221 | msg = "Azure Relay Bridge stopped too soon!" 222 | self.logger.critical(msg) 223 | raise RuntimeError(msg) 224 | 225 | 226 | def close(self): 227 | """Stops Azure Relay Bridge process launched by this object 228 | """ 229 | if self.relay_subprocess is not None: 230 | if self.is_running(): 231 | self.logger.info("Closing Debugging Relay...") 232 | if not DebugRelay.is_windows: 233 | os.killpg(os.getpgid(self.relay_subprocess.pid), signal.SIGTERM) 234 | else: 235 | os.kill(self.relay_subprocess.pid, signal.CTRL_C_EVENT) 236 | try: 237 | self.relay_subprocess.wait(timeout=3) 238 | except subprocess.TimeoutExpired: 239 | self.relay_subprocess.kill() 240 | self.relay_subprocess = None 241 | 242 | 243 | def background_launch(self) -> subprocess.Popen: 244 | """Launches Azure Relay Bridge process in detached mode 245 | Doesn't assign self.relay_subprocess, az_relay_bridge_subprocess() will return None. 246 | """ 247 | # close existing Azure Relay Bridge process (if running) 248 | self.close() 249 | # install Azure Relay Bridge (if not yet) 250 | DebugRelay._install_azure_relay_bridge() 251 | 252 | command = f"{DebugRelay.relay_app_name} {self.connection_option} {self.auth_option}" 253 | if DebugRelay._relay_config_file is not None: 254 | command += f" -f \"{DebugRelay._relay_config_file}\"" 255 | 256 | # start Azure Relay Bridge 257 | detached_relay_subprocess = subprocess.Popen( 258 | command, 259 | stdin=None, stderr=None, stdout=None, 260 | shell=True, close_fds=True) 261 | # wait a second 262 | time.sleep(1) 263 | if detached_relay_subprocess.poll() is not None: 264 | msg = f"Azure Relay Bridge failed to launch." 265 | self.logger.critical(msg) 266 | self.close() 267 | else: 268 | msg = "Azure Relay Bridge is running!" 269 | self.logger.info(msg) 270 | 271 | return detached_relay_subprocess 272 | 273 | 274 | def wait(self): 275 | if self.relay_subprocess is not None: 276 | self.relay_subprocess.wait() 277 | self.relay_subprocess = None 278 | 279 | 280 | def is_running(self) -> bool: 281 | if self.relay_subprocess is not None: 282 | if self.relay_subprocess.poll() is None: 283 | return True 284 | else: 285 | self.relay_subprocess = None 286 | return False 287 | 288 | 289 | def _handle_output(self): 290 | for line in iter(self.relay_subprocess.stdout.readline, ''): 291 | if line.find("Microsoft.Azure.Relay.Bridge.EventTraceActivity, exception = ") != -1: 292 | msg = f"[Azure Relay Bridge FAILURE]: {line}" 293 | self.logger.critical(msg) 294 | self.close() 295 | break 296 | else: 297 | self.logger.info(line) 298 | 299 | 300 | @staticmethod 301 | def from_config(config_file: str, 302 | debug_mode: DebugMode = DebugMode.WaitForConnection, 303 | host: str = "127.0.0.1", 304 | ports: typing.Union[str, int, typing.List[str], typing.List[int]] = "5678") -> any: 305 | if os.path.exists(config_file): 306 | with open(config_file) as cfg_file: 307 | config = json.load(cfg_file) 308 | relay_connection_name = config["AZRELAY_CONNECTION_NAME"] 309 | conn_str = config["AZRELAY_CONNECTION_STRING"] 310 | return DebugRelay( 311 | access_key_or_connection_string=conn_str, 312 | relay_connection_name=relay_connection_name, 313 | debug_mode=debug_mode, 314 | host=host, 315 | ports=ports) 316 | else: 317 | return None 318 | 319 | 320 | @staticmethod 321 | def from_environment(debug_mode: DebugMode = DebugMode.WaitForConnection, 322 | host: str = "127.0.0.1", 323 | ports: typing.Union[str, int, typing.List[str], typing.List[int]] = "5678") -> any: 324 | relay_connection_name = os.environ.get("AZRELAY_CONNECTION_NAME") 325 | conn_str = os.environ.get("AZRELAY_CONNECTION_STRING") 326 | if not relay_connection_name or not conn_str: 327 | print("AZRELAY_CONNECTION_STRING and AZRELAY_CONNECTION_NAME variables must be assigned.") 328 | return None 329 | else: 330 | return DebugRelay( 331 | access_key_or_connection_string=conn_str, 332 | relay_connection_name=relay_connection_name, 333 | debug_mode=debug_mode, 334 | host=host, 335 | ports=ports) 336 | 337 | 338 | @staticmethod 339 | def kill_relays(): 340 | """Kills all Azure Relay Bridge processes (azrelay) - no matter who and how launched them 341 | """ 342 | if DebugRelay.is_windows: 343 | subprocess.run( 344 | f"taskkill /IM \"{DebugRelay.relay_app_name}.exe\" /F", shell=True) 345 | else: 346 | subprocess.run(f"pkill -9 {DebugRelay.relay_app_name}", shell=True) 347 | 348 | 349 | @staticmethod 350 | def _install_azure_relay_bridge(): 351 | """Installs or updates Azure Relay Bridge 352 | """ 353 | if DebugRelay._installed_az_relay: 354 | return 355 | DebugRelay._installed_az_relay = True 356 | 357 | azrelay_folder = os.path.join( 358 | Path.home(), DebugRelay.relay_dir_name, DebugRelay.relay_version_name) 359 | relay_file = os.path.join( 360 | azrelay_folder, DebugRelay.relay_app_name) 361 | DebugRelay._relay_config_file = os.path.join( 362 | azrelay_folder, DebugRelay.relay_app_name) + ".yml" 363 | 364 | if DebugRelay.is_windows: 365 | relay_file += ".exe" 366 | 367 | exists = os.path.exists(azrelay_folder) 368 | if not exists: 369 | if DebugRelay.is_windows: 370 | download = DebugRelay.DEFAULT_AZ_RELAY_BRIDGE_WINDOWS_DOWLOAD 371 | else: 372 | plat = platform.platform().lower() 373 | if plat.startswith("macos"): 374 | download = DebugRelay.DEFAULT_AZ_RELAY_BRIDGE_MACOS_DOWLOAD 375 | elif "-ubuntu" in plat or plat.startswith("ubuntu"): 376 | download = DebugRelay.DEFAULT_AZ_RELAY_BRIDGE_UBUNTU_DOWLOAD 377 | else: # assume Debian 378 | download = DebugRelay.DEFAULT_AZ_RELAY_BRIDGE_DEBIAN_DOWLOAD 379 | if "debian" not in plat: 380 | logging.warning(f"You are running an unsupported OS: {plat}. "\ 381 | "Using Debian build of Azure Relay Bridge.") 382 | 383 | ctx = ssl.create_default_context() 384 | ctx.check_hostname = False 385 | ctx.verify_mode = ssl.CERT_NONE 386 | 387 | if download.lower().endswith(".zip"): 388 | zip_file, _ = urllib.request.urlretrieve(download) 389 | with zipfile.ZipFile(zip_file, 'r') as zip_ref: 390 | zip_ref.extractall(azrelay_folder) 391 | os.remove(zip_file) 392 | else: 393 | filestream = urllib.request.urlopen(download, context=ctx) 394 | with tarfile.open(fileobj=filestream, mode="r|gz") as thetarfile: 395 | thetarfile.extractall(azrelay_folder) 396 | 397 | if not DebugRelay.is_windows: 398 | st = os.stat(relay_file) 399 | os.chmod(relay_file, st.st_mode | stat.S_IEXEC) 400 | 401 | if not os.path.exists(DebugRelay._relay_config_file): 402 | with open(DebugRelay._relay_config_file, "a") as yml: 403 | yml.write("ExitOnForwardFailure: true") 404 | 405 | existing_path_var = os.environ["PATH"] 406 | paths = existing_path_var.split(os.pathsep) 407 | if azrelay_folder not in paths: 408 | os.environ["PATH"] = azrelay_folder + os.pathsep + os.environ["PATH"] 409 | 410 | 411 | def _main(connect: bool, host: str, ports: typing.List[str] = ["5678"], connection_string: str = None, relay_connection_name: str = None, config_file: str = None): 412 | """CLI main function 413 | 414 | Args: 415 | connect (bool): Connect (if True) or listen for incoming connections 416 | host (string): local hostname/address the debugging starts on (127.0.0.1) 417 | port (int): Azure Relay Bridge port 418 | connection_string (str): Optional connection string of an Azure Relay Hybrid Connection 419 | relay_connection_name (str): Optional hybrid connection name 420 | config_file (str): Optional configuration file path. Only used if connection_string is None. 421 | 422 | Raises: 423 | ValueError: Invalid arguments 424 | Exception: Cannot load configuration 425 | """ 426 | print("Debugging Relay Initialization...") 427 | 428 | mode = DebugMode.Connect if connect else DebugMode.WaitForConnection 429 | 430 | if connection_string is not None: 431 | if relay_connection_name is None: 432 | msg = "Both connection string and connection name must be provided." 433 | print(msg) 434 | raise ValueError(msg) 435 | debug_relay = DebugRelay( 436 | connection_string, relay_connection_name, mode, None, host, ports=ports) 437 | elif config_file is not None: 438 | if os.path.exists(config_file): 439 | debug_relay = DebugRelay.from_config(config_file, debug_mode=mode, host=host, ports=ports) 440 | else: 441 | config_file = os.path.normpath(config_file) 442 | logging.warning(f"Cannot load configuration file {config_file}. Trying with environment variables.") 443 | debug_relay = None 444 | else: 445 | debug_relay = None 446 | 447 | if debug_relay is None: 448 | debug_relay = DebugRelay.from_environment( 449 | debug_mode=mode, host=host, ports=ports) 450 | 451 | if debug_relay is None: 452 | raise Exception("Cannot create a Debugging Relay object. Configuration may be missing.") 453 | 454 | print(f"Starting Debugging Relay...") 455 | relay = debug_relay.background_launch() 456 | relay.wait() 457 | 458 | 459 | def _cli_main(argv): 460 | """CLI entry function 461 | 462 | Args: 463 | argv: Command Line arguments 464 | 465 | --no-kill - optional, 466 | If presented, prevents existing Azure Relay Bridge processes from being nuked. 467 | If omitted, all existing Azure Azure Relay Bridge processes will be killed. 468 | --mode - required, 469 | Debugging mode: listen, connect or none (default). 470 | --host - optional, defaults to 127.0.0.1, 471 | Local hostname/address the debugging starts on (127.0.0.1) 472 | --ports - optional, defaults to 5678 473 | Azure Relay Bridge port 474 | --connection-string - optional, defaults to None 475 | Connection string of an Azure Relay Hybrid Connection 476 | --connection-name - optional, defaults to None 477 | Hybrid connection name. Required if --connection-string is specified. 478 | --config_file - optional, defaults to None 479 | Configuration file path. Only used if connection_string is not specified. 480 | """ 481 | parser = argparse.ArgumentParser() 482 | parser.add_argument('--no-kill', action='store_true', 483 | default=False, required=False, help="Don't terminate existing azrelay processes.") 484 | parser.add_argument('--mode', action='store', 485 | default="none", choices=['connect', 'listen', "none"], required=False, 486 | help="Debugging mode: listen, connect or none") 487 | parser.add_argument('--ports', type=str, 488 | default="5678", required=False, help="One or more Azure Relay Bridge ports.") 489 | parser.add_argument('--host', action='store', 490 | default="127.0.0.1", required=False, help="Local hostname/address the debugging starts on") 491 | parser.add_argument('--connection-string', action='store', 492 | default=None, required=False, help="Connection string of an Azure Relay Hybrid Connection") 493 | parser.add_argument('--connection-name', action='store', 494 | default=None, required=False, help="Azure Relay Hybrid Connection name") 495 | parser.add_argument('--config-file', action='store', 496 | default=None, required=False, help="Path to the configuration file. Defaults to None.") 497 | options = parser.parse_args(args=argv) 498 | 499 | logging.root.setLevel(logging.INFO) 500 | if not options.no_kill: 501 | print("Closing existing Azure Debugging Relay processes.") 502 | DebugRelay.kill_relays() 503 | 504 | if options.mode != "none": 505 | connect = True if options.mode == "connect" else False 506 | ports = options.ports.strip() 507 | ports = ports.replace(", ", ",").replace(" ,", "").replace(" ", ",") 508 | ports_list = ports.split(",") 509 | _main(connect, options.host, ports_list, options.connection_string, 510 | options.connection_name, options.config_file) 511 | 512 | 513 | # DebugRelays can work as a CLI tool. 514 | if __name__ == '__main__': 515 | _cli_main(sys.argv[1:]) 516 | -------------------------------------------------------------------------------- /azdebugrelay/debugpyex.py: -------------------------------------------------------------------------------- 1 | import debugpy 2 | import logging 3 | import threading 4 | from .threads import StoppableThread 5 | 6 | 7 | class DebugPyEx(): 8 | """Use this class instead of debugpy. 9 | It provides an additional manageability layer on top of debugpy calls. 10 | """ 11 | _debugpy_connected = False 12 | _connect_lock = threading.Lock() 13 | 14 | def _thread_connect_proc(host, port): 15 | try: 16 | debugpy.connect((str(host), int(port))) 17 | DebugPyEx._debugpy_connected = True 18 | except SystemExit: 19 | # SystemExit is a "legal" way to terminate this thread. 20 | logging.warn("Debugpy thread has been terminated.") 21 | 22 | 23 | @staticmethod 24 | def connect(host, port, connect_timeout_seconds) -> bool: 25 | with DebugPyEx._connect_lock: 26 | DebugPyEx._debugpy_connected = False 27 | thread = StoppableThread(target=DebugPyEx._thread_connect_proc, args=( 28 | host, port,), daemon=True) 29 | thread.start() 30 | thread.join(connect_timeout_seconds) 31 | if(thread.is_alive()): 32 | # kill the thread "gracefully"! 33 | thread.stop() 34 | return False 35 | elif DebugPyEx._debugpy_connected: 36 | debugpy.debug_this_thread() 37 | return True 38 | else: 39 | return False 40 | 41 | -------------------------------------------------------------------------------- /azdebugrelay/threads.py: -------------------------------------------------------------------------------- 1 | import ctypes 2 | import inspect 3 | import threading 4 | 5 | 6 | def _async_raise(tid, exctype): 7 | '''Raises an exception in the threads with id tid''' 8 | if not inspect.isclass(exctype): 9 | raise TypeError("Only types can be raised (not instances)") 10 | res = ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(tid), 11 | ctypes.py_object(exctype)) 12 | if res == 0: 13 | raise ValueError("Invalid thread id") 14 | elif res != 1: 15 | # "if it returns a number greater than one, you're in trouble, 16 | # and you should call it again with exc=NULL to revert the effect" 17 | ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(tid), None) 18 | raise SystemError("PyThreadState_SetAsyncExc failed") 19 | 20 | 21 | class StoppableThread(threading.Thread): 22 | '''A thread class that supports raising an exception in the thread from 23 | another thread. 24 | ''' 25 | def _get_my_tid(self): 26 | """determines this (self's) thread id 27 | 28 | CAREFUL: this function is executed in the context of the caller 29 | thread, to get the identity of the thread represented by this 30 | instance. 31 | """ 32 | if not self.isAlive(): 33 | raise threading.ThreadError("the thread is not active") 34 | 35 | # do we have it cached? 36 | if hasattr(self, "_thread_id"): 37 | return self._thread_id 38 | 39 | # no, look for it in the _active dict 40 | for tid, tobj in threading._active.items(): 41 | if tobj is self: 42 | self._thread_id = tid 43 | return tid 44 | 45 | raise AssertionError("could not determine the thread's id") 46 | 47 | 48 | def raise_exception(self, exctype): 49 | """Raises the given exception type in the context of this thread. 50 | 51 | If the thread is busy in a system call (time.sleep(), 52 | socket.accept(), ...), the exception is simply ignored. 53 | 54 | If you are sure that your exception should terminate the thread, 55 | one way to ensure that it works is: 56 | 57 | t = ThreadWithExc( ... ) 58 | ... 59 | t.raiseExc( SomeException ) 60 | while t.isAlive(): 61 | time.sleep( 0.1 ) 62 | t.raiseExc( SomeException ) 63 | 64 | If the exception is to be caught by the thread, you need a way to 65 | check that your thread has caught it. 66 | 67 | CAREFUL: this function is executed in the context of the 68 | caller thread, to raise an exception in the context of the 69 | thread represented by this instance. 70 | """ 71 | _async_raise(self._get_my_tid(), exctype) 72 | 73 | 74 | def stop(self): 75 | self.raise_exception(SystemExit) 76 | -------------------------------------------------------------------------------- /images/azdebugrelay-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tarockey/azure-debug-relay/aab0dd147b8416d20cbd5d15eb717c964d1954b9/images/azdebugrelay-icon.png -------------------------------------------------------------------------------- /images/debug-relay-diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tarockey/azure-debug-relay/aab0dd147b8416d20cbd5d15eb717c964d1954b9/images/debug-relay-diagram.png -------------------------------------------------------------------------------- /make.vsix.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" 4 | 5 | rm -rf "${DIR}/build/vscode" 6 | mkdir -p "${DIR}/build/vscode" 7 | vsce package --out "${DIR}/build/vscode" 8 | -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "azure-debug-relay", 3 | "version": "0.5.1", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "@babel/code-frame": { 8 | "version": "7.12.11", 9 | "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", 10 | "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==", 11 | "dev": true, 12 | "requires": { 13 | "@babel/highlight": "^7.10.4" 14 | } 15 | }, 16 | "@babel/helper-validator-identifier": { 17 | "version": "7.12.11", 18 | "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz", 19 | "integrity": "sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw==", 20 | "dev": true 21 | }, 22 | "@babel/highlight": { 23 | "version": "7.12.13", 24 | "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.12.13.tgz", 25 | "integrity": "sha512-kocDQvIbgMKlWxXe9fof3TQ+gkIPOUSEYhJjqUjvKMez3krV7vbzYCDq39Oj11UAVK7JqPVGQPlgE85dPNlQww==", 26 | "dev": true, 27 | "requires": { 28 | "@babel/helper-validator-identifier": "^7.12.11", 29 | "chalk": "^2.0.0", 30 | "js-tokens": "^4.0.0" 31 | }, 32 | "dependencies": { 33 | "ansi-styles": { 34 | "version": "3.2.1", 35 | "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", 36 | "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", 37 | "dev": true, 38 | "requires": { 39 | "color-convert": "^1.9.0" 40 | } 41 | }, 42 | "chalk": { 43 | "version": "2.4.2", 44 | "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", 45 | "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", 46 | "dev": true, 47 | "requires": { 48 | "ansi-styles": "^3.2.1", 49 | "escape-string-regexp": "^1.0.5", 50 | "supports-color": "^5.3.0" 51 | } 52 | }, 53 | "color-convert": { 54 | "version": "1.9.3", 55 | "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", 56 | "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", 57 | "dev": true, 58 | "requires": { 59 | "color-name": "1.1.3" 60 | } 61 | }, 62 | "color-name": { 63 | "version": "1.1.3", 64 | "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", 65 | "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", 66 | "dev": true 67 | }, 68 | "has-flag": { 69 | "version": "3.0.0", 70 | "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", 71 | "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", 72 | "dev": true 73 | }, 74 | "supports-color": { 75 | "version": "5.5.0", 76 | "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", 77 | "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", 78 | "dev": true, 79 | "requires": { 80 | "has-flag": "^3.0.0" 81 | } 82 | } 83 | } 84 | }, 85 | "@eslint/eslintrc": { 86 | "version": "0.3.0", 87 | "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.3.0.tgz", 88 | "integrity": "sha512-1JTKgrOKAHVivSvOYw+sJOunkBjUOvjqWk1DPja7ZFhIS2mX/4EgTT8M7eTK9jrKhL/FvXXEbQwIs3pg1xp3dg==", 89 | "dev": true, 90 | "requires": { 91 | "ajv": "^6.12.4", 92 | "debug": "^4.1.1", 93 | "espree": "^7.3.0", 94 | "globals": "^12.1.0", 95 | "ignore": "^4.0.6", 96 | "import-fresh": "^3.2.1", 97 | "js-yaml": "^3.13.1", 98 | "lodash": "^4.17.20", 99 | "minimatch": "^3.0.4", 100 | "strip-json-comments": "^3.1.1" 101 | } 102 | }, 103 | "@types/eslint-visitor-keys": { 104 | "version": "1.0.0", 105 | "resolved": "https://registry.npmjs.org/@types/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz", 106 | "integrity": "sha512-OCutwjDZ4aFS6PB1UZ988C4YgwlBHJd6wCeQqaLdmadZ/7e+w79+hbMUFC1QXDNCmdyoRfAFdm0RypzwR+Qpag==", 107 | "dev": true 108 | }, 109 | "@types/json-schema": { 110 | "version": "7.0.7", 111 | "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.7.tgz", 112 | "integrity": "sha512-cxWFQVseBm6O9Gbw1IWb8r6OS4OhSt3hPZLkFApLjM8TEXROBuQGLAH2i2gZpcXdLBIrpXuTDhH7Vbm1iXmNGA==", 113 | "dev": true 114 | }, 115 | "@types/node": { 116 | "version": "12.20.4", 117 | "resolved": "https://registry.npmjs.org/@types/node/-/node-12.20.4.tgz", 118 | "integrity": "sha512-xRCgeE0Q4pT5UZ189TJ3SpYuX/QGl6QIAOAIeDSbAVAd2gX1NxSZup4jNVK7cxIeP8KDSbJgcckun495isP1jQ==", 119 | "dev": true 120 | }, 121 | "@types/vscode": { 122 | "version": "1.53.0", 123 | "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.53.0.tgz", 124 | "integrity": "sha512-XjFWbSPOM0EKIT2XhhYm3D3cx3nn3lshMUcWNy1eqefk+oqRuBq8unVb6BYIZqXy9lQZyeUl7eaBCOZWv+LcXQ==", 125 | "dev": true 126 | }, 127 | "@typescript-eslint/eslint-plugin": { 128 | "version": "3.10.1", 129 | "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-3.10.1.tgz", 130 | "integrity": "sha512-PQg0emRtzZFWq6PxBcdxRH3QIQiyFO3WCVpRL3fgj5oQS3CDs3AeAKfv4DxNhzn8ITdNJGJ4D3Qw8eAJf3lXeQ==", 131 | "dev": true, 132 | "requires": { 133 | "@typescript-eslint/experimental-utils": "3.10.1", 134 | "debug": "^4.1.1", 135 | "functional-red-black-tree": "^1.0.1", 136 | "regexpp": "^3.0.0", 137 | "semver": "^7.3.2", 138 | "tsutils": "^3.17.1" 139 | } 140 | }, 141 | "@typescript-eslint/experimental-utils": { 142 | "version": "3.10.1", 143 | "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-3.10.1.tgz", 144 | "integrity": "sha512-DewqIgscDzmAfd5nOGe4zm6Bl7PKtMG2Ad0KG8CUZAHlXfAKTF9Ol5PXhiMh39yRL2ChRH1cuuUGOcVyyrhQIw==", 145 | "dev": true, 146 | "requires": { 147 | "@types/json-schema": "^7.0.3", 148 | "@typescript-eslint/types": "3.10.1", 149 | "@typescript-eslint/typescript-estree": "3.10.1", 150 | "eslint-scope": "^5.0.0", 151 | "eslint-utils": "^2.0.0" 152 | } 153 | }, 154 | "@typescript-eslint/parser": { 155 | "version": "3.10.1", 156 | "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-3.10.1.tgz", 157 | "integrity": "sha512-Ug1RcWcrJP02hmtaXVS3axPPTTPnZjupqhgj+NnZ6BCkwSImWk/283347+x9wN+lqOdK9Eo3vsyiyDHgsmiEJw==", 158 | "dev": true, 159 | "requires": { 160 | "@types/eslint-visitor-keys": "^1.0.0", 161 | "@typescript-eslint/experimental-utils": "3.10.1", 162 | "@typescript-eslint/types": "3.10.1", 163 | "@typescript-eslint/typescript-estree": "3.10.1", 164 | "eslint-visitor-keys": "^1.1.0" 165 | } 166 | }, 167 | "@typescript-eslint/types": { 168 | "version": "3.10.1", 169 | "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-3.10.1.tgz", 170 | "integrity": "sha512-+3+FCUJIahE9q0lDi1WleYzjCwJs5hIsbugIgnbB+dSCYUxl8L6PwmsyOPFZde2hc1DlTo/xnkOgiTLSyAbHiQ==", 171 | "dev": true 172 | }, 173 | "@typescript-eslint/typescript-estree": { 174 | "version": "3.10.1", 175 | "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-3.10.1.tgz", 176 | "integrity": "sha512-QbcXOuq6WYvnB3XPsZpIwztBoquEYLXh2MtwVU+kO8jgYCiv4G5xrSP/1wg4tkvrEE+esZVquIPX/dxPlePk1w==", 177 | "dev": true, 178 | "requires": { 179 | "@typescript-eslint/types": "3.10.1", 180 | "@typescript-eslint/visitor-keys": "3.10.1", 181 | "debug": "^4.1.1", 182 | "glob": "^7.1.6", 183 | "is-glob": "^4.0.1", 184 | "lodash": "^4.17.15", 185 | "semver": "^7.3.2", 186 | "tsutils": "^3.17.1" 187 | } 188 | }, 189 | "@typescript-eslint/visitor-keys": { 190 | "version": "3.10.1", 191 | "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-3.10.1.tgz", 192 | "integrity": "sha512-9JgC82AaQeglebjZMgYR5wgmfUdUc+EitGUUMW8u2nDckaeimzW+VsoLV6FoimPv2id3VQzfjwBxEMVz08ameQ==", 193 | "dev": true, 194 | "requires": { 195 | "eslint-visitor-keys": "^1.1.0" 196 | } 197 | }, 198 | "acorn": { 199 | "version": "7.4.1", 200 | "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", 201 | "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", 202 | "dev": true 203 | }, 204 | "acorn-jsx": { 205 | "version": "5.3.1", 206 | "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.1.tgz", 207 | "integrity": "sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng==", 208 | "dev": true 209 | }, 210 | "ajv": { 211 | "version": "6.12.6", 212 | "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", 213 | "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", 214 | "dev": true, 215 | "requires": { 216 | "fast-deep-equal": "^3.1.1", 217 | "fast-json-stable-stringify": "^2.0.0", 218 | "json-schema-traverse": "^0.4.1", 219 | "uri-js": "^4.2.2" 220 | } 221 | }, 222 | "ansi-colors": { 223 | "version": "4.1.1", 224 | "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", 225 | "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", 226 | "dev": true 227 | }, 228 | "ansi-regex": { 229 | "version": "5.0.0", 230 | "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", 231 | "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", 232 | "dev": true 233 | }, 234 | "ansi-styles": { 235 | "version": "4.3.0", 236 | "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", 237 | "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", 238 | "dev": true, 239 | "requires": { 240 | "color-convert": "^2.0.1" 241 | } 242 | }, 243 | "argparse": { 244 | "version": "1.0.10", 245 | "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", 246 | "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", 247 | "dev": true, 248 | "requires": { 249 | "sprintf-js": "~1.0.2" 250 | } 251 | }, 252 | "astral-regex": { 253 | "version": "2.0.0", 254 | "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", 255 | "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", 256 | "dev": true 257 | }, 258 | "balanced-match": { 259 | "version": "1.0.0", 260 | "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", 261 | "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", 262 | "dev": true 263 | }, 264 | "brace-expansion": { 265 | "version": "1.1.11", 266 | "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", 267 | "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", 268 | "dev": true, 269 | "requires": { 270 | "balanced-match": "^1.0.0", 271 | "concat-map": "0.0.1" 272 | } 273 | }, 274 | "callsites": { 275 | "version": "3.1.0", 276 | "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", 277 | "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", 278 | "dev": true 279 | }, 280 | "chalk": { 281 | "version": "4.1.0", 282 | "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", 283 | "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", 284 | "dev": true, 285 | "requires": { 286 | "ansi-styles": "^4.1.0", 287 | "supports-color": "^7.1.0" 288 | } 289 | }, 290 | "color-convert": { 291 | "version": "2.0.1", 292 | "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", 293 | "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", 294 | "dev": true, 295 | "requires": { 296 | "color-name": "~1.1.4" 297 | } 298 | }, 299 | "color-name": { 300 | "version": "1.1.4", 301 | "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", 302 | "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", 303 | "dev": true 304 | }, 305 | "concat-map": { 306 | "version": "0.0.1", 307 | "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", 308 | "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", 309 | "dev": true 310 | }, 311 | "cross-spawn": { 312 | "version": "7.0.3", 313 | "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", 314 | "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", 315 | "dev": true, 316 | "requires": { 317 | "path-key": "^3.1.0", 318 | "shebang-command": "^2.0.0", 319 | "which": "^2.0.1" 320 | } 321 | }, 322 | "debug": { 323 | "version": "4.3.1", 324 | "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", 325 | "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", 326 | "dev": true, 327 | "requires": { 328 | "ms": "2.1.2" 329 | } 330 | }, 331 | "deep-is": { 332 | "version": "0.1.3", 333 | "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", 334 | "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", 335 | "dev": true 336 | }, 337 | "doctrine": { 338 | "version": "3.0.0", 339 | "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", 340 | "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", 341 | "dev": true, 342 | "requires": { 343 | "esutils": "^2.0.2" 344 | } 345 | }, 346 | "dotenv": { 347 | "version": "8.2.0", 348 | "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.2.0.tgz", 349 | "integrity": "sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw==" 350 | }, 351 | "emoji-regex": { 352 | "version": "8.0.0", 353 | "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", 354 | "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", 355 | "dev": true 356 | }, 357 | "enquirer": { 358 | "version": "2.3.6", 359 | "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", 360 | "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", 361 | "dev": true, 362 | "requires": { 363 | "ansi-colors": "^4.1.1" 364 | } 365 | }, 366 | "escape-string-regexp": { 367 | "version": "1.0.5", 368 | "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", 369 | "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", 370 | "dev": true 371 | }, 372 | "eslint": { 373 | "version": "7.20.0", 374 | "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.20.0.tgz", 375 | "integrity": "sha512-qGi0CTcOGP2OtCQBgWZlQjcTuP0XkIpYFj25XtRTQSHC+umNnp7UMshr2G8SLsRFYDdAPFeHOsiteadmMH02Yw==", 376 | "dev": true, 377 | "requires": { 378 | "@babel/code-frame": "7.12.11", 379 | "@eslint/eslintrc": "^0.3.0", 380 | "ajv": "^6.10.0", 381 | "chalk": "^4.0.0", 382 | "cross-spawn": "^7.0.2", 383 | "debug": "^4.0.1", 384 | "doctrine": "^3.0.0", 385 | "enquirer": "^2.3.5", 386 | "eslint-scope": "^5.1.1", 387 | "eslint-utils": "^2.1.0", 388 | "eslint-visitor-keys": "^2.0.0", 389 | "espree": "^7.3.1", 390 | "esquery": "^1.4.0", 391 | "esutils": "^2.0.2", 392 | "file-entry-cache": "^6.0.0", 393 | "functional-red-black-tree": "^1.0.1", 394 | "glob-parent": "^5.0.0", 395 | "globals": "^12.1.0", 396 | "ignore": "^4.0.6", 397 | "import-fresh": "^3.0.0", 398 | "imurmurhash": "^0.1.4", 399 | "is-glob": "^4.0.0", 400 | "js-yaml": "^3.13.1", 401 | "json-stable-stringify-without-jsonify": "^1.0.1", 402 | "levn": "^0.4.1", 403 | "lodash": "^4.17.20", 404 | "minimatch": "^3.0.4", 405 | "natural-compare": "^1.4.0", 406 | "optionator": "^0.9.1", 407 | "progress": "^2.0.0", 408 | "regexpp": "^3.1.0", 409 | "semver": "^7.2.1", 410 | "strip-ansi": "^6.0.0", 411 | "strip-json-comments": "^3.1.0", 412 | "table": "^6.0.4", 413 | "text-table": "^0.2.0", 414 | "v8-compile-cache": "^2.0.3" 415 | }, 416 | "dependencies": { 417 | "eslint-visitor-keys": { 418 | "version": "2.0.0", 419 | "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.0.0.tgz", 420 | "integrity": "sha512-QudtT6av5WXels9WjIM7qz1XD1cWGvX4gGXvp/zBn9nXG02D0utdU3Em2m/QjTnrsk6bBjmCygl3rmj118msQQ==", 421 | "dev": true 422 | } 423 | } 424 | }, 425 | "eslint-scope": { 426 | "version": "5.1.1", 427 | "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", 428 | "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", 429 | "dev": true, 430 | "requires": { 431 | "esrecurse": "^4.3.0", 432 | "estraverse": "^4.1.1" 433 | } 434 | }, 435 | "eslint-utils": { 436 | "version": "2.1.0", 437 | "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", 438 | "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", 439 | "dev": true, 440 | "requires": { 441 | "eslint-visitor-keys": "^1.1.0" 442 | } 443 | }, 444 | "eslint-visitor-keys": { 445 | "version": "1.3.0", 446 | "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", 447 | "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", 448 | "dev": true 449 | }, 450 | "espree": { 451 | "version": "7.3.1", 452 | "resolved": "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz", 453 | "integrity": "sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==", 454 | "dev": true, 455 | "requires": { 456 | "acorn": "^7.4.0", 457 | "acorn-jsx": "^5.3.1", 458 | "eslint-visitor-keys": "^1.3.0" 459 | } 460 | }, 461 | "esprima": { 462 | "version": "4.0.1", 463 | "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", 464 | "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", 465 | "dev": true 466 | }, 467 | "esquery": { 468 | "version": "1.4.0", 469 | "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", 470 | "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", 471 | "dev": true, 472 | "requires": { 473 | "estraverse": "^5.1.0" 474 | }, 475 | "dependencies": { 476 | "estraverse": { 477 | "version": "5.2.0", 478 | "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", 479 | "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", 480 | "dev": true 481 | } 482 | } 483 | }, 484 | "esrecurse": { 485 | "version": "4.3.0", 486 | "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", 487 | "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", 488 | "dev": true, 489 | "requires": { 490 | "estraverse": "^5.2.0" 491 | }, 492 | "dependencies": { 493 | "estraverse": { 494 | "version": "5.2.0", 495 | "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", 496 | "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", 497 | "dev": true 498 | } 499 | } 500 | }, 501 | "estraverse": { 502 | "version": "4.3.0", 503 | "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", 504 | "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", 505 | "dev": true 506 | }, 507 | "esutils": { 508 | "version": "2.0.3", 509 | "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", 510 | "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", 511 | "dev": true 512 | }, 513 | "fast-deep-equal": { 514 | "version": "3.1.3", 515 | "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", 516 | "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", 517 | "dev": true 518 | }, 519 | "fast-json-stable-stringify": { 520 | "version": "2.1.0", 521 | "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", 522 | "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", 523 | "dev": true 524 | }, 525 | "fast-levenshtein": { 526 | "version": "2.0.6", 527 | "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", 528 | "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", 529 | "dev": true 530 | }, 531 | "file-entry-cache": { 532 | "version": "6.0.1", 533 | "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", 534 | "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", 535 | "dev": true, 536 | "requires": { 537 | "flat-cache": "^3.0.4" 538 | } 539 | }, 540 | "flat-cache": { 541 | "version": "3.0.4", 542 | "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", 543 | "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", 544 | "dev": true, 545 | "requires": { 546 | "flatted": "^3.1.0", 547 | "rimraf": "^3.0.2" 548 | } 549 | }, 550 | "flatted": { 551 | "version": "3.1.1", 552 | "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.1.1.tgz", 553 | "integrity": "sha512-zAoAQiudy+r5SvnSw3KJy5os/oRJYHzrzja/tBDqrZtNhUw8bt6y8OBzMWcjWr+8liV8Eb6yOhw8WZ7VFZ5ZzA==", 554 | "dev": true 555 | }, 556 | "fs.realpath": { 557 | "version": "1.0.0", 558 | "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", 559 | "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", 560 | "dev": true 561 | }, 562 | "functional-red-black-tree": { 563 | "version": "1.0.1", 564 | "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", 565 | "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", 566 | "dev": true 567 | }, 568 | "glob": { 569 | "version": "7.1.6", 570 | "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", 571 | "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", 572 | "dev": true, 573 | "requires": { 574 | "fs.realpath": "^1.0.0", 575 | "inflight": "^1.0.4", 576 | "inherits": "2", 577 | "minimatch": "^3.0.4", 578 | "once": "^1.3.0", 579 | "path-is-absolute": "^1.0.0" 580 | } 581 | }, 582 | "glob-parent": { 583 | "version": "5.1.1", 584 | "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.1.tgz", 585 | "integrity": "sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==", 586 | "dev": true, 587 | "requires": { 588 | "is-glob": "^4.0.1" 589 | } 590 | }, 591 | "globals": { 592 | "version": "12.4.0", 593 | "resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz", 594 | "integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==", 595 | "dev": true, 596 | "requires": { 597 | "type-fest": "^0.8.1" 598 | } 599 | }, 600 | "has-flag": { 601 | "version": "4.0.0", 602 | "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", 603 | "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", 604 | "dev": true 605 | }, 606 | "ignore": { 607 | "version": "4.0.6", 608 | "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", 609 | "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", 610 | "dev": true 611 | }, 612 | "import-fresh": { 613 | "version": "3.3.0", 614 | "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", 615 | "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", 616 | "dev": true, 617 | "requires": { 618 | "parent-module": "^1.0.0", 619 | "resolve-from": "^4.0.0" 620 | } 621 | }, 622 | "imurmurhash": { 623 | "version": "0.1.4", 624 | "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", 625 | "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", 626 | "dev": true 627 | }, 628 | "inflight": { 629 | "version": "1.0.6", 630 | "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", 631 | "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", 632 | "dev": true, 633 | "requires": { 634 | "once": "^1.3.0", 635 | "wrappy": "1" 636 | } 637 | }, 638 | "inherits": { 639 | "version": "2.0.4", 640 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", 641 | "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", 642 | "dev": true 643 | }, 644 | "is-extglob": { 645 | "version": "2.1.1", 646 | "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", 647 | "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", 648 | "dev": true 649 | }, 650 | "is-fullwidth-code-point": { 651 | "version": "3.0.0", 652 | "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", 653 | "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", 654 | "dev": true 655 | }, 656 | "is-glob": { 657 | "version": "4.0.1", 658 | "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", 659 | "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", 660 | "dev": true, 661 | "requires": { 662 | "is-extglob": "^2.1.1" 663 | } 664 | }, 665 | "isexe": { 666 | "version": "2.0.0", 667 | "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", 668 | "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", 669 | "dev": true 670 | }, 671 | "js-tokens": { 672 | "version": "4.0.0", 673 | "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", 674 | "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", 675 | "dev": true 676 | }, 677 | "js-yaml": { 678 | "version": "3.14.1", 679 | "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", 680 | "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", 681 | "dev": true, 682 | "requires": { 683 | "argparse": "^1.0.7", 684 | "esprima": "^4.0.0" 685 | } 686 | }, 687 | "json-schema-traverse": { 688 | "version": "0.4.1", 689 | "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", 690 | "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", 691 | "dev": true 692 | }, 693 | "json-stable-stringify-without-jsonify": { 694 | "version": "1.0.1", 695 | "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", 696 | "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", 697 | "dev": true 698 | }, 699 | "levn": { 700 | "version": "0.4.1", 701 | "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", 702 | "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", 703 | "dev": true, 704 | "requires": { 705 | "prelude-ls": "^1.2.1", 706 | "type-check": "~0.4.0" 707 | } 708 | }, 709 | "lodash": { 710 | "version": "4.17.21", 711 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", 712 | "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", 713 | "dev": true 714 | }, 715 | "lru-cache": { 716 | "version": "6.0.0", 717 | "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", 718 | "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", 719 | "dev": true, 720 | "requires": { 721 | "yallist": "^4.0.0" 722 | } 723 | }, 724 | "minimatch": { 725 | "version": "3.0.4", 726 | "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", 727 | "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", 728 | "dev": true, 729 | "requires": { 730 | "brace-expansion": "^1.1.7" 731 | } 732 | }, 733 | "ms": { 734 | "version": "2.1.2", 735 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", 736 | "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", 737 | "dev": true 738 | }, 739 | "natural-compare": { 740 | "version": "1.4.0", 741 | "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", 742 | "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", 743 | "dev": true 744 | }, 745 | "once": { 746 | "version": "1.4.0", 747 | "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", 748 | "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", 749 | "dev": true, 750 | "requires": { 751 | "wrappy": "1" 752 | } 753 | }, 754 | "optionator": { 755 | "version": "0.9.1", 756 | "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", 757 | "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", 758 | "dev": true, 759 | "requires": { 760 | "deep-is": "^0.1.3", 761 | "fast-levenshtein": "^2.0.6", 762 | "levn": "^0.4.1", 763 | "prelude-ls": "^1.2.1", 764 | "type-check": "^0.4.0", 765 | "word-wrap": "^1.2.3" 766 | } 767 | }, 768 | "parent-module": { 769 | "version": "1.0.1", 770 | "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", 771 | "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", 772 | "dev": true, 773 | "requires": { 774 | "callsites": "^3.0.0" 775 | } 776 | }, 777 | "path-is-absolute": { 778 | "version": "1.0.1", 779 | "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", 780 | "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", 781 | "dev": true 782 | }, 783 | "path-key": { 784 | "version": "3.1.1", 785 | "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", 786 | "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", 787 | "dev": true 788 | }, 789 | "prelude-ls": { 790 | "version": "1.2.1", 791 | "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", 792 | "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", 793 | "dev": true 794 | }, 795 | "progress": { 796 | "version": "2.0.3", 797 | "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", 798 | "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", 799 | "dev": true 800 | }, 801 | "punycode": { 802 | "version": "2.1.1", 803 | "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", 804 | "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", 805 | "dev": true 806 | }, 807 | "regexpp": { 808 | "version": "3.1.0", 809 | "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.1.0.tgz", 810 | "integrity": "sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q==", 811 | "dev": true 812 | }, 813 | "require-from-string": { 814 | "version": "2.0.2", 815 | "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", 816 | "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", 817 | "dev": true 818 | }, 819 | "resolve-from": { 820 | "version": "4.0.0", 821 | "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", 822 | "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", 823 | "dev": true 824 | }, 825 | "rimraf": { 826 | "version": "3.0.2", 827 | "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", 828 | "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", 829 | "dev": true, 830 | "requires": { 831 | "glob": "^7.1.3" 832 | } 833 | }, 834 | "semver": { 835 | "version": "7.3.4", 836 | "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.4.tgz", 837 | "integrity": "sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw==", 838 | "dev": true, 839 | "requires": { 840 | "lru-cache": "^6.0.0" 841 | } 842 | }, 843 | "shebang-command": { 844 | "version": "2.0.0", 845 | "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", 846 | "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", 847 | "dev": true, 848 | "requires": { 849 | "shebang-regex": "^3.0.0" 850 | } 851 | }, 852 | "shebang-regex": { 853 | "version": "3.0.0", 854 | "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", 855 | "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", 856 | "dev": true 857 | }, 858 | "slice-ansi": { 859 | "version": "4.0.0", 860 | "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", 861 | "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", 862 | "dev": true, 863 | "requires": { 864 | "ansi-styles": "^4.0.0", 865 | "astral-regex": "^2.0.0", 866 | "is-fullwidth-code-point": "^3.0.0" 867 | } 868 | }, 869 | "sprintf-js": { 870 | "version": "1.0.3", 871 | "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", 872 | "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", 873 | "dev": true 874 | }, 875 | "string-width": { 876 | "version": "4.2.0", 877 | "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", 878 | "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", 879 | "dev": true, 880 | "requires": { 881 | "emoji-regex": "^8.0.0", 882 | "is-fullwidth-code-point": "^3.0.0", 883 | "strip-ansi": "^6.0.0" 884 | } 885 | }, 886 | "strip-ansi": { 887 | "version": "6.0.0", 888 | "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", 889 | "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", 890 | "dev": true, 891 | "requires": { 892 | "ansi-regex": "^5.0.0" 893 | } 894 | }, 895 | "strip-json-comments": { 896 | "version": "3.1.1", 897 | "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", 898 | "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", 899 | "dev": true 900 | }, 901 | "supports-color": { 902 | "version": "7.2.0", 903 | "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", 904 | "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", 905 | "dev": true, 906 | "requires": { 907 | "has-flag": "^4.0.0" 908 | } 909 | }, 910 | "table": { 911 | "version": "6.0.7", 912 | "resolved": "https://registry.npmjs.org/table/-/table-6.0.7.tgz", 913 | "integrity": "sha512-rxZevLGTUzWna/qBLObOe16kB2RTnnbhciwgPbMMlazz1yZGVEgnZK762xyVdVznhqxrfCeBMmMkgOOaPwjH7g==", 914 | "dev": true, 915 | "requires": { 916 | "ajv": "^7.0.2", 917 | "lodash": "^4.17.20", 918 | "slice-ansi": "^4.0.0", 919 | "string-width": "^4.2.0" 920 | }, 921 | "dependencies": { 922 | "ajv": { 923 | "version": "7.1.1", 924 | "resolved": "https://registry.npmjs.org/ajv/-/ajv-7.1.1.tgz", 925 | "integrity": "sha512-ga/aqDYnUy/o7vbsRTFhhTsNeXiYb5JWDIcRIeZfwRNCefwjNTVYCGdGSUrEmiu3yDK3vFvNbgJxvrQW4JXrYQ==", 926 | "dev": true, 927 | "requires": { 928 | "fast-deep-equal": "^3.1.1", 929 | "json-schema-traverse": "^1.0.0", 930 | "require-from-string": "^2.0.2", 931 | "uri-js": "^4.2.2" 932 | } 933 | }, 934 | "json-schema-traverse": { 935 | "version": "1.0.0", 936 | "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", 937 | "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", 938 | "dev": true 939 | } 940 | } 941 | }, 942 | "text-table": { 943 | "version": "0.2.0", 944 | "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", 945 | "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", 946 | "dev": true 947 | }, 948 | "tslib": { 949 | "version": "1.14.1", 950 | "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", 951 | "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", 952 | "dev": true 953 | }, 954 | "tsutils": { 955 | "version": "3.20.0", 956 | "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.20.0.tgz", 957 | "integrity": "sha512-RYbuQuvkhuqVeXweWT3tJLKOEJ/UUw9GjNEZGWdrLLlM+611o1gwLHBpxoFJKKl25fLprp2eVthtKs5JOrNeXg==", 958 | "dev": true, 959 | "requires": { 960 | "tslib": "^1.8.1" 961 | } 962 | }, 963 | "type-check": { 964 | "version": "0.4.0", 965 | "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", 966 | "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", 967 | "dev": true, 968 | "requires": { 969 | "prelude-ls": "^1.2.1" 970 | } 971 | }, 972 | "type-fest": { 973 | "version": "0.8.1", 974 | "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", 975 | "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", 976 | "dev": true 977 | }, 978 | "typescript": { 979 | "version": "4.1.5", 980 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.1.5.tgz", 981 | "integrity": "sha512-6OSu9PTIzmn9TCDiovULTnET6BgXtDYL4Gg4szY+cGsc3JP1dQL8qvE8kShTRx1NIw4Q9IBHlwODjkjWEtMUyA==", 982 | "dev": true 983 | }, 984 | "uri-js": { 985 | "version": "4.4.1", 986 | "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", 987 | "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", 988 | "dev": true, 989 | "requires": { 990 | "punycode": "^2.1.0" 991 | } 992 | }, 993 | "v8-compile-cache": { 994 | "version": "2.2.0", 995 | "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.2.0.tgz", 996 | "integrity": "sha512-gTpR5XQNKFwOd4clxfnhaqvfqMpqEwr4tOtCyz4MtYZX2JYhfr1JvBFKdS+7K/9rfpZR3VLX+YWBbKoxCgS43Q==", 997 | "dev": true 998 | }, 999 | "which": { 1000 | "version": "2.0.2", 1001 | "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", 1002 | "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", 1003 | "dev": true, 1004 | "requires": { 1005 | "isexe": "^2.0.0" 1006 | } 1007 | }, 1008 | "word-wrap": { 1009 | "version": "1.2.3", 1010 | "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", 1011 | "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", 1012 | "dev": true 1013 | }, 1014 | "wrappy": { 1015 | "version": "1.0.2", 1016 | "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", 1017 | "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", 1018 | "dev": true 1019 | }, 1020 | "yallist": { 1021 | "version": "4.0.0", 1022 | "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", 1023 | "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", 1024 | "dev": true 1025 | } 1026 | } 1027 | } 1028 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "azure-debug-relay", 3 | "displayName": "Azure Debugging Relay", 4 | "description": "Distributed Debugging Extension for Python", 5 | "version": "0.5.5", 6 | "publisher": "VladKolesnikov-vladkol", 7 | "repository": "https://github.com/vladkol/azure-debug-relay", 8 | "engines": { 9 | "vscode": "^1.34.0" 10 | }, 11 | "icon": "images/azdebugrelay-icon.png", 12 | "categories": [ 13 | "Debuggers" 14 | ], 15 | "keywords": [ 16 | "debug", 17 | "debugging", 18 | "debugger", 19 | "python", 20 | "remote" 21 | ], 22 | "activationEvents": [ 23 | "onStartupFinished", 24 | "workspaceContains:.azrelay.json", 25 | "onDebug" 26 | ], 27 | "main": "./out/extension.js", 28 | "contributes": { 29 | "commands": [ 30 | { 31 | "command": "azdebugrelay.listen", 32 | "title": "Start listening" 33 | }, 34 | { 35 | "command": "azdebugrelay.stop", 36 | "title": "Stop" 37 | } 38 | ], 39 | "configuration": { 40 | "type": "object", 41 | "title": "Azure Debugging Relay", 42 | "properties": { 43 | "azure-debug-relay.azrelay-connection-string": { 44 | "type": [ 45 | "string" 46 | ], 47 | "default": "", 48 | "description": "Connection String of an Azure Relay (Hybrid Connection)", 49 | "scope": "machine-overridable" 50 | }, 51 | "azure-debug-relay.azrelay-connection-name": { 52 | "type": [ 53 | "string" 54 | ], 55 | "default": "", 56 | "description": "Azure Relay Hybrid Connection Name", 57 | "scope": "window" 58 | } 59 | } 60 | } 61 | }, 62 | "scripts": { 63 | "vscode:prepublish": "npm run compile", 64 | "compile": "tsc -p ./", 65 | "lint": "eslint . --ext .ts,.tsx", 66 | "watch": "tsc -watch -p ./" 67 | }, 68 | "devDependencies": { 69 | "@types/node": "^12.12.0", 70 | "@types/vscode": "^1.34.0", 71 | "@typescript-eslint/eslint-plugin": "^3.0.2", 72 | "@typescript-eslint/parser": "^3.0.2", 73 | "eslint": "^7.1.0", 74 | "typescript": "^4.0.2" 75 | }, 76 | "dependencies": { 77 | "dotenv": "^8.2.0" 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "azure-debug-relay" 3 | version = "0.5.1" 4 | description = "Azure Debugging Relay: distributed cross-network remote debugging for Python" 5 | authors = ["Vlad Kolesnikov"] 6 | readme = "README.md" 7 | license = "MIT" 8 | keywords = ["debug", "remote", "azure"] 9 | packages = [{include = "azdebugrelay"}] 10 | include = [ 11 | "LICENSE", 12 | ] 13 | 14 | [tool.poetry.dependencies] 15 | python = "^3.6" 16 | debugpy = { version = "^1.2.1" } 17 | 18 | [tool.poetry.dev-dependencies] 19 | pytest = "^6.1" 20 | 21 | #[tool.poetry.scripts] 22 | #azdebugrelay = "azdebugrelay.debug_relay:main" 23 | 24 | [build-system] 25 | requires = ["poetry-core>=1.0.0"] 26 | build-backend = "poetry.core.masonry.api" -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | debugpy 2 | -------------------------------------------------------------------------------- /samples/README.md: -------------------------------------------------------------------------------- 1 | # Azure Debugging Relay for Python: Samples 2 | 3 | 1. [Simple example of remote debugging](simple_demo). 4 | You will need a remote machine, such as a [VM is Azure](https://ms.portal.azure.com/#create/Canonical.UbuntuServer1804LTS-ARM). 5 | 1. [Azure Machine Learning example](azure_ml_simple). 6 | You will need an [Azure Machine Learning](https://ml.azure.com/) workspace. 7 | 1. [Advanced Azure Machine Learning example](azure_ml_advanced). 8 | -------------------------------------------------------------------------------- /samples/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tarockey/azure-debug-relay/aab0dd147b8416d20cbd5d15eb717c964d1954b9/samples/__init__.py -------------------------------------------------------------------------------- /samples/azure_ml_advanced/README.md: -------------------------------------------------------------------------------- 1 | # Debugging Advanced Azure Machine Learning Pipelines 2 | 3 | This sample demonstrates how to debug a pipeline with [ParallelRunStep](https://docs.microsoft.com/en-us/python/api/azureml-pipeline-steps/azureml.pipeline.steps.parallelrunstep?view=azure-ml-py) and with distributed Tensorflow steps using [MPI](https://docs.microsoft.com/en-us/python/api/azureml-core/azureml.core.runconfig.mpiconfiguration?view=azure-ml-py). 4 | 5 | With ParallelRunStep, to make sure we only debug it on a single node, we check that `AZ_BATCH_IS_CURRENT_NODE_MASTER` environment variable equals `true`. 6 | 7 | With MPIConfiguration, it depends on the distributed training framework. Ultimately, you need identify an instance with **rank equal to zero*. 8 | [Look into this guide](https://azure.github.io/azureml-web/docs/cheatsheet/distributed-training/) to understand how to detect the rank. 9 | 10 | For example, in Horovod MPI Tensorflow steps in would be `horovod.tensorflow.rank()` which must be zero. 11 | 12 | We debug each step using a separate port (5678, 5679, 5680). 13 | VS Code *compound* configuration `Python: AML Advanced 3 Listeners` starts 3 listeners. 14 | With that, we can even debug 3 simultaneously running nodes, 15 | even though in this sample it is only a matter of convenience. 16 | 17 | If you need to debug a distributed step across multiple nodes or processes per node, 18 | you may need to add your own code for picking individual debugging ports for every instance of your training steps. Instead of passing port number as a parameter, steps can choose ports and "reserve" them by [adding an Azure ML Run property](https://docs.microsoft.com/en-us/azure/machine-learning/how-to-manage-runs?tabs=python#tag-and-find-runs) - if a property with a certain name was already added, the port has been utilized and therefore cannot be used. 19 | Multiple processes per node require first starting process to initialize a `DebugRelay` object with a list of ports to connect to. 20 | You may need to employ a shared data structure and a locking mechanism to make sure processes know when DebugRelay 21 | has been already initialized (checking for azrelay process also works). 22 | 23 | ## Configuration 24 | 25 | Create an environment (see `.env.sample`) or set the following environment variables: 26 | 27 | * `WORKSPACE_NAME` - Azure Machine Learning Workspace name 28 | (will be created if doesn't exist) 29 | * `TENANT_ID` - Azure Tenant Id 30 | * `SUBSCRIPTION_ID` - Existing Azure Subscription Id (in Azure Tenant above) 31 | * `RESOURCE_GROUP` - Existing Azure Resource Group (in the subscription above) 32 | * `APP_ID` - Azure Active Directory Registered Application Id (Service Principal) 33 | * `APP_SECRET` - Service Principal Password for the App Id above 34 | * `REGION` - An Azure region to create a workspace in (if one doesn't exist) 35 | * `COMPUTE_NAME` - name of Azure Machine Learning Compute Cluster (will be created if doesn't exist) 36 | * `PIPELINE_NAME` - name of an Azure Machine Learning Pipeline to publish 37 | * `DEBUG_GLOBAL_AZRELAY_CONNECTION_STRING` - Azure Relay Shared Access Policy connection string 38 | (must have `Listen` and `Send` permissions) 39 | * `DEBUG_GLOBAL_CONNECTION_SECRET_NAME` - AML Key Vault secret name to store connection string in. 40 | 41 | ## How to run 42 | 43 | 1. Start debugging with `Python: AML Advanced 3 Listeners` configuration. 44 | 1. Run `python3 samples/azure_ml_advanced/remote_pipeline_demo.py --is-debug true --debug-relay-connection-name ` 45 | in terminal **on the same machine**. Here **hybrid-connection-name** is a name of Azure Relay Hybrid Connection which Azure Relay Shared Access Policy above has `Listen` and `Send` permissions on. 46 | -------------------------------------------------------------------------------- /samples/azure_ml_advanced/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tarockey/azure-debug-relay/aab0dd147b8416d20cbd5d15eb717c964d1954b9/samples/azure_ml_advanced/__init__.py -------------------------------------------------------------------------------- /samples/azure_ml_advanced/publish_pipeline.py: -------------------------------------------------------------------------------- 1 | # The goal of the script is to create and publish Azure ML pipeline 2 | 3 | import sys 4 | import os 5 | from azureml.core import Workspace 6 | from azureml.core.authentication import ServicePrincipalAuthentication, InteractiveLoginAuthentication 7 | from azureml.exceptions import WorkspaceException 8 | from azureml.core.compute import AmlCompute 9 | from azureml.core.compute import ComputeTarget 10 | from azureml.exceptions import ComputeTargetException 11 | from msrest.exceptions import HttpOperationError 12 | from azureml.core.datastore import Datastore 13 | from azureml.core.runconfig import Environment, CondaDependencies 14 | from azureml.pipeline.core import Pipeline, PublishedPipeline, PipelineData 15 | from azureml.core import RunConfiguration 16 | from azureml.pipeline.core import PipelineParameter 17 | from azureml.pipeline.steps import PythonScriptStep 18 | from azureml.pipeline.steps import ParallelRunStep, ParallelRunConfig 19 | from dotenv import load_dotenv 20 | from azureml.core import ScriptRunConfig 21 | from azureml.core.runconfig import MpiConfiguration 22 | 23 | load_dotenv() 24 | 25 | # A set of variables that you are required to provide is below. 26 | workspace_name = os.environ.get("WORKSPACE_NAME") 27 | resource_group = os.environ.get("RESOURCE_GROUP") 28 | subscription_id = os.environ.get("SUBSCRIPTION_ID") 29 | tenant_id = os.environ.get("TENANT_ID") 30 | app_id = os.environ.get("APP_ID") 31 | app_secret = os.environ.get("APP_SECRET") 32 | region = os.environ.get("REGION") 33 | compute_name = os.environ.get("COMPUTE_NAME") 34 | pipeline_name = os.environ.get("PIPELINE_NAME") 35 | debug_connection_string = os.environ.get("DEBUG_GLOBAL_AZRELAY_CONNECTION_STRING") 36 | debug_connection_string_secret_name = os.environ.get("DEBUG_GLOBAL_CONNECTION_SECRET_NAME") 37 | 38 | def create_and_publish_pipeline() -> any: 39 | """ 40 | Creates and publish a pipeline 41 | Returns: 42 | PublishedPipeline: a reference to just published pipeline 43 | Workspace: a reference to the Azure ML Workspace 44 | """ 45 | print("Getting base pipeline objects") 46 | aml_workspace = get_workspace(workspace_name, 47 | resource_group, 48 | subscription_id, 49 | tenant_id, 50 | app_id, 51 | app_secret, 52 | region, 53 | create_if_not_exist=False) 54 | print(aml_workspace) 55 | 56 | # putting secrets to keyvault 57 | aml_workspace.get_default_keyvault().set_secret( 58 | debug_connection_string_secret_name, debug_connection_string) 59 | 60 | # Get Azure machine learning cluster 61 | aml_compute = get_compute(aml_workspace, compute_name) 62 | 63 | print(aml_compute) 64 | 65 | batch_conda_deps = CondaDependencies.create( 66 | conda_packages=[], 67 | pip_packages=[ 68 | 'argparse==1.4.0', 69 | 'azureml-core==1.22.0', 70 | 'azureml-dataset-runtime==1.22.0', 71 | 'debugpy==1.4.0', 72 | 'azure-debug-relay==0.5.1', 73 | ]) 74 | batch_env = Environment(name="train-env") 75 | batch_env.docker.enabled = True 76 | batch_env.python.conda_dependencies = batch_conda_deps 77 | 78 | curated_env_name = 'AzureML-TensorFlow-2.2-CPU' 79 | tf_env = Environment.get(workspace=aml_workspace, name=curated_env_name) 80 | tf_env.save_to_directory("env_tf", overwrite=True) 81 | 82 | tf_env = Environment.load_from_directory("env_tf") 83 | tf_env.name = "traintf" 84 | tf_env.python.conda_dependencies.add_pip_package('argparse==1.4.0') 85 | tf_env.python.conda_dependencies.add_pip_package('debugpy==1.2.1') 86 | tf_env.python.conda_dependencies.add_pip_package( 87 | 'azure-debug-relay==0.5.1') 88 | 89 | print("Create pipeline steps") 90 | steps = get_pipeline( 91 | aml_compute, aml_workspace.get_default_datastore(), batch_env, tf_env) 92 | 93 | print("Publishing pipeline") 94 | published_pipeline = publish_pipeline(aml_workspace, steps, pipeline_name) 95 | 96 | print(f"Pipeline ID: {published_pipeline.id}") 97 | 98 | return published_pipeline, aml_workspace 99 | 100 | 101 | def get_pipeline(aml_compute: ComputeTarget, blob_ds: Datastore, batch_env: Environment, tf_env: Environment) -> str: 102 | """ 103 | Creates pipeline steps 104 | Parameters: 105 | aml_compute (ComputeTarget): a reference to a compute 106 | blob_ds (DataStore): a reference to a datastore 107 | batch_env (Environment): a reference to environment object 108 | tf_env (Environment): a horovod/tf environment 109 | Returns: 110 | string: a set of pipeline steps 111 | """ 112 | 113 | # We need something to generate data by the way 114 | pipeline_files = PipelineData( 115 | "pipeline_files", datastore=blob_ds).as_dataset() 116 | 117 | # Pipeline parameters to use with every run 118 | is_debug = PipelineParameter("is_debug", default_value=False) 119 | relay_connection_name = PipelineParameter( 120 | "debug_relay_connection_name", default_value="none") 121 | 122 | single_step_config = RunConfiguration() 123 | single_step_config.environment = batch_env 124 | single_step = PythonScriptStep( 125 | name=f"single-step", 126 | script_name="samples/azure_ml_advanced/steps/single_step.py", 127 | source_directory=".", 128 | runconfig=single_step_config, 129 | arguments=[ 130 | "--pipeline-files", pipeline_files, 131 | "--is-debug", is_debug, 132 | "--debug-relay-connection-name", relay_connection_name, 133 | "--debug-port", 5678, 134 | "--debug-relay-connection-string-secret", debug_connection_string_secret_name 135 | ], 136 | inputs=[], 137 | outputs=[pipeline_files], 138 | compute_target=aml_compute, 139 | allow_reuse=False 140 | ) 141 | 142 | output_dir = PipelineData("output_dir") 143 | 144 | parallel_run_config = ParallelRunConfig( 145 | entry_script="samples/azure_ml_advanced/steps/parallel_step.py", 146 | source_directory=".", 147 | mini_batch_size="5", 148 | output_action="summary_only", 149 | environment=batch_env, 150 | compute_target=aml_compute, 151 | error_threshold=10, 152 | run_invocation_timeout=600, # very important for debugging 153 | node_count=2, 154 | process_count_per_node=1) 155 | 156 | parallelrun_step = ParallelRunStep( 157 | name="parallel-run-step", 158 | parallel_run_config=parallel_run_config, 159 | inputs=[pipeline_files], 160 | output=output_dir, 161 | arguments=[ 162 | "--is-debug", is_debug, 163 | "--debug-relay-connection-name", relay_connection_name, 164 | "--debug-port", 5679, 165 | "--debug-relay-connection-string-secret", debug_connection_string_secret_name 166 | ], 167 | allow_reuse=False 168 | ) 169 | 170 | parallelrun_step.run_after(single_step) 171 | 172 | distr_config = MpiConfiguration(process_count_per_node=1, node_count=2) 173 | 174 | src = ScriptRunConfig( 175 | source_directory=".", 176 | script="samples/azure_ml_advanced/steps/mpi/mpi_step_starter.py", 177 | arguments=[ 178 | "--input-ds", pipeline_files, 179 | "--is-debug", is_debug, 180 | "--debug-relay-connection-name", relay_connection_name, 181 | "--debug-port", 5680, 182 | "--debug-relay-connection-string-secret", debug_connection_string_secret_name 183 | ], 184 | compute_target=compute_name, 185 | environment=tf_env, 186 | distributed_job_config=distr_config, 187 | ) 188 | 189 | mpi_step = PythonScriptStep( 190 | name="mpi-step", 191 | script_name="samples/azure_ml_advanced/steps/mpi/mpi_step_starter.py", 192 | arguments=[ 193 | "--input-ds", pipeline_files, 194 | "--is-debug", is_debug, 195 | "--debug-relay-connection-name", relay_connection_name, 196 | "--debug-port", 5680, 197 | "--debug-relay-connection-string-secret", debug_connection_string_secret_name 198 | ], 199 | compute_target=aml_compute, 200 | inputs=[pipeline_files], 201 | outputs=[], 202 | runconfig=src.run_config, 203 | source_directory="." 204 | ) 205 | 206 | mpi_step.run_after(parallelrun_step) 207 | 208 | print("Pipeline Steps Created") 209 | 210 | steps = [ 211 | single_step, 212 | parallelrun_step, 213 | mpi_step 214 | ] 215 | 216 | print(f"Returning {len(steps)} steps") 217 | return steps 218 | 219 | 220 | def get_workspace( 221 | name: str, 222 | resource_group: str, 223 | subscription_id: str, 224 | tenant_id: str, 225 | app_id: str, 226 | app_secret: str, 227 | region: str, 228 | create_if_not_exist=False, 229 | ): 230 | """ 231 | Returns a reference to a desired workspace 232 | Parameters: 233 | name (str): name of the workspace 234 | resource_group (str): resource group name 235 | subscription_id (str): subscription id 236 | tenant_id (str): tenant id (aad id) 237 | app_id (str): service principal id 238 | app_secret (str): service principal password 239 | region (str): location of the workspace 240 | create_if_not_exist (bool): Default value is False 241 | Returns: 242 | Workspace: a reference to a workspace 243 | """ 244 | 245 | if tenant_id and app_id and app_secret: 246 | auth = ServicePrincipalAuthentication( 247 | tenant_id=tenant_id, 248 | service_principal_id=app_id, 249 | service_principal_password=app_secret, 250 | ) 251 | else: 252 | auth = InteractiveLoginAuthentication() 253 | 254 | try: 255 | aml_workspace = Workspace.get( 256 | name=name, 257 | subscription_id=subscription_id, 258 | resource_group=resource_group, 259 | auth=auth, 260 | ) 261 | 262 | except WorkspaceException as exp_var: 263 | print("Error while retrieving Workspace...: %s", exp_var) 264 | if create_if_not_exist: 265 | print("Creating AzureML Workspace: %s", name) 266 | aml_workspace = Workspace.create( 267 | name=name, 268 | subscription_id=subscription_id, 269 | resource_group=resource_group, 270 | create_resource_group=True, 271 | location=region, 272 | auth=auth, 273 | ) 274 | print("Workspace %s created.", aml_workspace.name) 275 | else: 276 | sys.exit(-1) 277 | 278 | return aml_workspace 279 | 280 | 281 | def get_compute(workspace: Workspace, compute_name: str, vm_size: str = "Standard_DS3_v2", vm_priority: str = "dedicated", min_nodes: int = 0, max_nodes: int = 4, 282 | scale_down: int = 600): 283 | """ 284 | Returns an existing compute or creates a new one. 285 | Args: 286 | workspace: Workspace: AzureML workspace 287 | compute_name: str: name of the compute 288 | vm_size: str: VM size 289 | vm_priority: str: low priority or dedicated cluster 290 | min_nodes: int: minimum number of nodes 291 | max_nodes: int: maximum number of nodes in the cluster 292 | scale_down: int: number of seconds to wait before scaling down the cluster 293 | Returns: 294 | ComputeTarget: a reference to compute 295 | """ 296 | 297 | try: 298 | if compute_name in workspace.compute_targets: 299 | compute_target = workspace.compute_targets[compute_name] 300 | if compute_target and isinstance(compute_target, AmlCompute): 301 | print("Found existing compute target %s so using it.", compute_name) 302 | else: 303 | compute_config = AmlCompute.provisioning_configuration(vm_size=vm_size, 304 | vm_priority=vm_priority, 305 | min_nodes=min_nodes, 306 | max_nodes=max_nodes, 307 | idle_seconds_before_scaledown=scale_down) 308 | 309 | compute_target = ComputeTarget.create( 310 | workspace, compute_name, compute_config) 311 | compute_target.wait_for_completion(show_output=True) 312 | return compute_target 313 | except ComputeTargetException as ex_var: 314 | print('An error occurred trying to provision compute: %s', str(ex_var)) 315 | sys.exit(-1) 316 | 317 | 318 | def get_blob_datastore(workspace: Workspace, data_store_name: str, storage_name: str, storage_key: str, 319 | container_name: str): 320 | """ 321 | Returns a reference to a datastore 322 | Parameters: 323 | workspace (Workspace): existing AzureML Workspace object 324 | data_store_name (string): data store name 325 | storage_name (string): blob storage account name 326 | storage_key (string): blob storage account key 327 | container_name (string): container name 328 | Returns: 329 | Datastore: a reference to datastore 330 | """ 331 | try: 332 | blob_datastore = Datastore.get(workspace, data_store_name) 333 | print("Found Blob Datastore with name: %s", data_store_name) 334 | except HttpOperationError: 335 | blob_datastore = Datastore.register_azure_blob_container( 336 | workspace=workspace, 337 | datastore_name=data_store_name, 338 | account_name=storage_name, # Storage account name 339 | container_name=container_name, # Name of Azure blob container 340 | account_key=storage_key) # Storage account key 341 | print("Registered blob datastore with name: %s", data_store_name) 342 | return blob_datastore 343 | 344 | 345 | def publish_pipeline(aml_workspace, steps, pipeline_name) -> PublishedPipeline: 346 | """ 347 | Publishes a pipeline to the AzureML Workspace 348 | Parameters: 349 | aml_workspace (Workspace): existing AzureML Workspace object 350 | steps (list): list of PipelineSteps 351 | pipeline_name (string): name of the pipeline to be published 352 | build_id (string): DevOps Pipeline Build Id 353 | Returns: 354 | PublishedPipeline 355 | """ 356 | train_pipeline = Pipeline(workspace=aml_workspace, steps=steps) 357 | train_pipeline.validate() 358 | published_pipeline = train_pipeline.publish( 359 | name=pipeline_name, 360 | description="Model training/retraining pipeline") 361 | print( 362 | f'Published pipeline: {published_pipeline.name}') 363 | 364 | return published_pipeline 365 | -------------------------------------------------------------------------------- /samples/azure_ml_advanced/remote_pipeline_demo.py: -------------------------------------------------------------------------------- 1 | # a file to test pipeline debugging things 2 | import argparse 3 | from publish_pipeline import create_and_publish_pipeline 4 | 5 | 6 | def main(): 7 | """ 8 | CLI entry point 9 | """ 10 | published_pipeline, aml_workspace = create_and_publish_pipeline() 11 | 12 | experiment_name = "debug_experiment" 13 | 14 | parser = argparse.ArgumentParser() 15 | parser.add_argument("--is-debug", type=bool, required=False, default=False) 16 | parser.add_argument("--debug-relay-connection-name", 17 | type=str, required=False, default="") 18 | options, _ = parser.parse_known_args() 19 | 20 | pipeline_parameters = { 21 | "is_debug": options.is_debug 22 | } 23 | if options.is_debug: 24 | if options.debug_relay_connection_name == "": 25 | raise ValueError("Hybrid connection name cannot be empty!") 26 | 27 | pipeline_parameters.update({ 28 | "debug_relay_connection_name": options.debug_relay_connection_name 29 | }) 30 | 31 | published_pipeline.submit(workspace=aml_workspace, experiment_name=experiment_name, 32 | pipeline_parameters=pipeline_parameters, 33 | continue_on_step_failure=True) 34 | 35 | 36 | if __name__ == '__main__': 37 | main() 38 | -------------------------------------------------------------------------------- /samples/azure_ml_advanced/steps/amldebugutils/__init__.py: -------------------------------------------------------------------------------- 1 | from .debugutils import start_remote_debugging, start_remote_debugging_from_args 2 | 3 | __all__ = [ 4 | "start_remote_debugging", 5 | "start_remote_debugging_from_args" 6 | ] 7 | 8 | -------------------------------------------------------------------------------- /samples/azure_ml_advanced/steps/amldebugutils/debugutils.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from copy import Error 3 | import logging 4 | from azureml.core import Run 5 | from azdebugrelay import DebugRelay, DebugMode, debugpy_connect_with_timeout 6 | 7 | 8 | def start_remote_debugging( 9 | debug_relay_connection_string_secret: str, 10 | debug_relay_connection_name:str, 11 | debug_port: int, 12 | debugpy_connect_timeout: float = 15 13 | ): 14 | # get connection string from the workspace Key Vault 15 | run = Run.get_context() 16 | connection_string = run.get_secret( 17 | debug_relay_connection_string_secret) 18 | if connection_string is None or connection_string == "": 19 | err_msg = "Connection string for Azure Relay Hybrid Connection is missing in Key Vault." 20 | logging.fatal(err_msg) 21 | raise ValueError(err_msg) 22 | 23 | print("Remote debugging has been activated. Starting Azure Relay Bridge...") 24 | # your Hybrid Connection name 25 | relay_connection_name = debug_relay_connection_name 26 | debug_mode = DebugMode.Connect 27 | hybrid_connection_url = None # can keep it None because using a connection string 28 | host = "127.0.0.1" # local hostname or ip address the debugger starts on 29 | port = debug_port 30 | 31 | debug_relay = DebugRelay( 32 | connection_string, relay_connection_name, debug_mode, hybrid_connection_url, host, port) 33 | debug_relay.open() 34 | if debug_relay.is_running(): 35 | print(f"Starting debugpy session on {host}:{port} with timeout {debugpy_connect_timeout} seconds.") 36 | if debugpy_connect_with_timeout(host, port, connect_timeout_seconds=debugpy_connect_timeout): 37 | print(f"Debugpy is connected!") 38 | return True 39 | else: 40 | print(f"Could not connect to the debugger!") 41 | return False 42 | else: 43 | err_msg = "Cannot connect to a remote debugger" 44 | print(err_msg) 45 | logging.fatal(err_msg) 46 | raise Error(err_msg) 47 | 48 | 49 | def start_remote_debugging_from_args(ignore_debug_flag: bool = False) -> bool: 50 | parser = argparse.ArgumentParser() 51 | parser.add_argument("--is-debug", type=str, required=True) 52 | parser.add_argument("--debug-relay-connection-name", 53 | type=str, required=True) 54 | parser.add_argument('--debug-port', action='store', type=int, 55 | default=5678, required=False) 56 | parser.add_argument("--debug-relay-connection-string-secret", 57 | type=str, required=True) 58 | options, _ = parser.parse_known_args() 59 | 60 | if not options.is_debug.lower() == "true" and not ignore_debug_flag: 61 | return False 62 | 63 | if options.debug_relay_connection_string_secret == ""\ 64 | or options.debug_relay_connection_name == ""\ 65 | or options.debug_relay_connection_name.lower() == "none": 66 | err_msg = "Azure Relay connection string secret name or hybrid connection name is empty." 67 | logging.fatal(err_msg) 68 | raise ValueError(err_msg) 69 | 70 | return start_remote_debugging( 71 | options.debug_relay_connection_string_secret, 72 | options.debug_relay_connection_name, 73 | options.debug_port) 74 | -------------------------------------------------------------------------------- /samples/azure_ml_advanced/steps/mpi/__init__.py: -------------------------------------------------------------------------------- 1 | from .mpi_step import * 2 | -------------------------------------------------------------------------------- /samples/azure_ml_advanced/steps/mpi/mpi_step.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import horovod.tensorflow as hvd 3 | import tensorflow as tf 4 | import debugpy 5 | from samples.azure_ml_advanced.steps.amldebugutils import start_remote_debugging_from_args 6 | 7 | 8 | hvd.init() 9 | # config = tf.ConfigProto() 10 | 11 | # config.gpu_options.allow_growth = True 12 | # config.gpu_options.visible_device_list = str(hvd.local_rank()) 13 | 14 | # K.set_session(tf.Session(config=config)) 15 | 16 | 17 | def train(): 18 | print("Here I train!") 19 | 20 | 21 | def main(): 22 | 23 | print("Parsing parameters") 24 | parser = argparse.ArgumentParser() 25 | parser.add_argument("--input-ds", type=str, required=True) 26 | parser.add_argument('--is-debug', required=True, type=str) 27 | args, _ = parser.parse_known_args() 28 | 29 | print(f"Input folder {args.input_ds}") 30 | 31 | print("Horovod size:", hvd.size()) 32 | print("Horovod rank:", hvd.rank()) 33 | 34 | if args.is_debug.lower() == 'true' and hvd.rank() == 0: 35 | print("Let's start debugging") 36 | if start_remote_debugging_from_args(): 37 | debugpy.breakpoint() 38 | # the breakpoint will hit on train() call below 39 | 40 | train() 41 | 42 | 43 | if __name__ == "__main__": 44 | main() 45 | -------------------------------------------------------------------------------- /samples/azure_ml_advanced/steps/mpi/mpi_step_starter.py: -------------------------------------------------------------------------------- 1 | from samples.azure_ml_advanced.steps.mpi import mpi_step 2 | 3 | 4 | def main(): 5 | mpi_step.main() 6 | 7 | 8 | if __name__ == "__main__": 9 | main() 10 | -------------------------------------------------------------------------------- /samples/azure_ml_advanced/steps/parallel_step.py: -------------------------------------------------------------------------------- 1 | import os 2 | import argparse 3 | import debugpy 4 | from samples.azure_ml_advanced.steps.amldebugutils import start_remote_debugging_from_args 5 | 6 | 7 | def init(): 8 | global is_debug 9 | 10 | parser = argparse.ArgumentParser(description="Parallel Step parameters") 11 | parser.add_argument('--is-debug', required=True, type=str) 12 | args, _ = parser.parse_known_args() 13 | 14 | is_debug = False 15 | 16 | # debug mode and on the master node 17 | if args.is_debug.lower() == 'true' and bool(os.environ.get('AZ_BATCH_IS_CURRENT_NODE_MASTER')): 18 | is_debug = True 19 | print("This is a mater node. Start a debugging session.") 20 | start_remote_debugging_from_args() 21 | 22 | 23 | def run(input_rows): 24 | """ 25 | Work with files 26 | """ 27 | if is_debug: 28 | print("Debugging a parallel step") 29 | debugpy.breakpoint() 30 | 31 | lines = [] 32 | 33 | for file_item in input_rows: 34 | print(f"Work with a file {file_item}") 35 | 36 | lines.append(file_item) 37 | 38 | return lines 39 | -------------------------------------------------------------------------------- /samples/azure_ml_advanced/steps/single_step.py: -------------------------------------------------------------------------------- 1 | # This is a basic step that we are running on a compute in Azure ML 2 | import argparse 3 | import os 4 | import debugpy 5 | from samples.azure_ml_advanced.steps.amldebugutils import start_remote_debugging_from_args 6 | 7 | 8 | def main(): 9 | 10 | print("Parsing parameters") 11 | parser = argparse.ArgumentParser() 12 | parser.add_argument("--pipeline-files", type=str, required=True) 13 | parser.add_argument('--is-debug', type=str, required=True) 14 | args, _ = parser.parse_known_args() 15 | 16 | print(f"Output folder {args.pipeline_files}") 17 | 18 | if args.is_debug.lower() == 'true': 19 | print("Let's start debugging") 20 | if start_remote_debugging_from_args(): 21 | debugpy.breakpoint() 22 | print("We are debugging!") 23 | else: 24 | print("Could not connect to a debugger!") 25 | 26 | os.makedirs(args.pipeline_files, exist_ok=True) 27 | 28 | # Generate 100 files to use in parallel run step later 29 | for i in range(0, 100): 30 | file_path = os.path.join(args.pipeline_files, f"{i}.txt") 31 | with open(file_path, "w") as f_handler: 32 | f_handler.write(f"Here is the content of the file #{i}") 33 | print("Step has been completed") 34 | 35 | 36 | if __name__ == "__main__": 37 | main() 38 | -------------------------------------------------------------------------------- /samples/azure_ml_simple/README.md: -------------------------------------------------------------------------------- 1 | # Simple Azure Machine Learning debugging example 2 | 3 | 1. Set `AZRELAY_CONNECTION_STRING` and `AZRELAY_CONNECTION_NAME` environment variables 4 | or create `.azrelay.json` configuration file in workspace/repo directory. 5 | 1. Create `config.json` [Azure ML Workspace configuration file](https://docs.microsoft.com/en-us/azure/machine-learning/how-to-configure-environment#workspace) 6 | in this file's or workspace/repo directory. 7 | 1. Start debugging with `Python: Listen 5678` configuration. 8 | 1. Run `python3 samples/azure_ml_simple/deploy_and_run.py` in terminal **on the same machine**. 9 | It will deploy an AML pipeline, and run it on a remote AML Compute Target. 10 | -------------------------------------------------------------------------------- /samples/azure_ml_simple/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tarockey/azure-debug-relay/aab0dd147b8416d20cbd5d15eb717c964d1954b9/samples/azure_ml_simple/__init__.py -------------------------------------------------------------------------------- /samples/azure_ml_simple/deploy_and_run.py: -------------------------------------------------------------------------------- 1 | #pylint: disable=abstract-class-instantiated 2 | import os 3 | import json 4 | import hashlib # for MD5 5 | import azureml.core as amlcore 6 | from azureml.core import Workspace, ComputeTarget, Experiment 7 | from azureml.core.authentication import InteractiveLoginAuthentication 8 | from azureml.core.compute import ComputeTarget, AmlCompute 9 | from azureml.core.compute_target import ComputeTargetException 10 | from azureml.core.runconfig import RunConfiguration 11 | from azureml.pipeline.steps import PythonScriptStep 12 | from azureml.pipeline.core import Pipeline, StepSequence 13 | 14 | 15 | # Connection string for Azure Relay Hybrid Connection 16 | azrelay_connection_string = None 17 | # Hybrid Connection name 18 | azrelay_connection_name = None 19 | # Debugging port 20 | debug_port = 5678 21 | 22 | # AML compute cluster or instance name. 23 | cluster_name = "Debug-Std-DS3v2" 24 | # Experiment name 25 | experiment_name = "Debug-Experiment-1" 26 | 27 | # If azrelay_connection_string or azrelay_connection_name is None, 28 | # trying to get it from .azrelay.json or environment variables 29 | config_file_name = "./.azrelay.json" 30 | if azrelay_connection_string is None or azrelay_connection_name is None: 31 | if os.path.exists(config_file_name): 32 | with open(config_file_name) as cfg_file: 33 | config = json.load(cfg_file) 34 | azrelay_connection_name = config["AZRELAY_CONNECTION_NAME"] 35 | azrelay_connection_string = config["AZRELAY_CONNECTION_STRING"] 36 | else: 37 | azrelay_connection_name = os.environ.get("AZRELAY_CONNECTION_NAME") 38 | azrelay_connection_string = os.environ.get("AZRELAY_CONNECTION_STRING") 39 | 40 | if azrelay_connection_string is None or azrelay_connection_name is None: 41 | print("Azure Relay Hybrid Connection is not configured") 42 | exit(1) 43 | 44 | # load workspace from config.json file 45 | this_script_dir = os.path.dirname(os.path.abspath(__file__)) 46 | interactive_auth = InteractiveLoginAuthentication() 47 | try: 48 | workspace = Workspace.from_config(auth=interactive_auth) 49 | except: 50 | try: 51 | config_path = os.path.join(this_script_dir, "config.json") 52 | workspace = Workspace.from_config(config_path, auth=interactive_auth) 53 | except Exception as ex: 54 | print(f"Cannot get a workspace: {ex}") 55 | exit() 56 | 57 | print('Workspace name: ' + workspace.name, 58 | 'Azure region: ' + workspace.location, 59 | 'Subscription id: ' + workspace.subscription_id, 60 | 'Resource group: ' + workspace.resource_group, sep='\n') 61 | 62 | # Getting an Azure ML Compute Target 63 | try: 64 | compute_target = ComputeTarget(workspace=workspace, name=cluster_name) 65 | print('Found existing compute target') 66 | except ComputeTargetException: 67 | print('Creating a new compute target...') 68 | compute_config = AmlCompute.provisioning_configuration(vm_size='STANDARD_D3_V2', 69 | max_nodes=1) 70 | 71 | # create the cluster 72 | compute_target = ComputeTarget.create( 73 | workspace, cluster_name, compute_config) 74 | 75 | # can poll for a minimum number of nodes and for a specific timeout. 76 | # if no min node count is provided it uses the scale settings for the cluster 77 | compute_target.wait_for_completion( 78 | show_output=True, min_node_count=None, timeout_in_minutes=20) 79 | 80 | # store the connection string in AML workspace Key Vault 81 | # (secret name is 'debugrelay-' + MD5(azrelay_connection_string) ) 82 | hybrid_connection_string_secret =\ 83 | f"debugrelay-{hashlib.md5(azrelay_connection_string.encode('utf-8')).hexdigest()}" 84 | workspace.get_default_keyvault().set_secret(hybrid_connection_string_secret, azrelay_connection_string) 85 | 86 | # Configuring a PythonScriptStep with a RunConfiguration 87 | # that includes debugpy and azure-debug-relay 88 | run_config = RunConfiguration() 89 | conda_dependencies = run_config.environment.python.conda_dependencies 90 | conda_dependencies.add_conda_package("pip") 91 | conda_dependencies.add_pip_package("azureml-sdk==" + amlcore.__version__) 92 | conda_dependencies.add_pip_package("debugpy==1.2.1") 93 | conda_dependencies.add_pip_package("azure-debug-relay==0.5.1") 94 | 95 | train_step = PythonScriptStep(name='Train Step with Debugging', 96 | script_name="samples/azure_ml_simple/steps/train.py", 97 | arguments=[ 98 | "--debug", "attach", 99 | # passing connection string secret's name, not the connection string itself 100 | "--debug-relay-connection-string-secret", hybrid_connection_string_secret, 101 | "--debug-relay-connection-name", azrelay_connection_name, 102 | "--debug-port", debug_port 103 | ], 104 | source_directory=".", 105 | compute_target=compute_target, 106 | runconfig=run_config, 107 | allow_reuse=False) 108 | 109 | # Submitting an Azure ML Pipeline Run 110 | step_sequence = StepSequence(steps=[train_step]) 111 | pipeline = Pipeline(workspace, steps=step_sequence) 112 | experiment = Experiment(workspace=workspace, name=experiment_name) 113 | run = experiment.submit(pipeline) 114 | -------------------------------------------------------------------------------- /samples/azure_ml_simple/steps/train.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import debugpy 4 | from azureml.core import Run 5 | from azdebugrelay import DebugRelay, DebugMode, debugpy_connect_with_timeout 6 | 7 | 8 | def _main(): 9 | parser = argparse.ArgumentParser() 10 | parser.add_argument('--debug', action='store', 11 | default="", choices=['attach', 'none'], required=False) 12 | parser.add_argument('--debug-relay-connection-string-secret', action='store', 13 | default="", required=False) 14 | parser.add_argument('--debug-relay-connection-name', action='store', 15 | default="", required=False) 16 | parser.add_argument('--debug-port', action='store', type=int, 17 | default=5678, required=False) 18 | options, _ = parser.parse_known_args() 19 | 20 | run = Run.get_context() 21 | debug_relay = None 22 | debug = False 23 | 24 | if options.debug == "attach": 25 | if options.debug_relay_connection_string_secret == "" or options.debug_relay_connection_name == "": 26 | err_msg = "Azure Relay connection string secret name or connection name is empty." 27 | logging.fatal(err_msg) 28 | raise ValueError(err_msg) 29 | # get connection string from the workspace Key Vault 30 | connection_string = run.get_secret( 31 | options.debug_relay_connection_string_secret) 32 | if connection_string is None or connection_string == "": 33 | err_msg = "Connection string for Azure Relay Hybrid Connection is missing in Key Vault." 34 | logging.fatal(err_msg) 35 | raise ValueError(err_msg) 36 | debug = True 37 | relay_connection_name = options.debug_relay_connection_name # your Hybrid Connection name 38 | debug_mode = DebugMode.Connect 39 | hybrid_connection_url = None # can keep it None because using a connection string 40 | host = "127.0.0.1" # local hostname or ip address the debugger starts on 41 | port = options.debug_port 42 | debugpy_timeout = 15 43 | 44 | debug_relay = DebugRelay( 45 | connection_string, relay_connection_name, debug_mode, hybrid_connection_url, host, port) 46 | debug_relay.open() 47 | print(f"Starting debugpy session on {host}:{port}") 48 | if debugpy_connect_with_timeout(host, port, debugpy_timeout): 49 | print("Debugpy is connected!") 50 | else: 51 | print("Debugpy could not connect!") 52 | 53 | train_job(debug=debug) 54 | 55 | if debug_relay is not None: 56 | debug_relay.close() 57 | 58 | 59 | def train_job(debug: bool = False): 60 | """This is supposed to be a function with traning code. 61 | We have a breakpoint here! 62 | 63 | Args: 64 | debug (bool, optional): Debugging mode. Defaults to False. 65 | """ 66 | if debug: 67 | debugpy.breakpoint() 68 | print(f"Doing my work. Debug mode is {debug}.") 69 | 70 | 71 | ########################## 72 | if __name__ == '__main__': 73 | _main() 74 | -------------------------------------------------------------------------------- /samples/simple_demo/README.md: -------------------------------------------------------------------------------- 1 | # Simple remote debugging example 2 | 3 | 1. Set `AZRELAY_CONNECTION_STRING` and `AZRELAY_CONNECTION_NAME` environment variables 4 | or create `.azrelay.json` configuration file in the workspace/repo directory. 5 | 1. Start debugging with `Python: Listen 5678` configuration. 6 | 1. Repeat #1 **on a remote machine**. 7 | 1. **On that remote machine**, clone this repo, and run `python3 samples/simple_demo/remote_server_demo.py --debug attach`. 8 | -------------------------------------------------------------------------------- /samples/simple_demo/remote_server_demo.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import argparse 4 | import platform 5 | import pathlib 6 | from signal import signal, SIGINT 7 | import debugpy 8 | 9 | ### This block is only for debugging from samples/simple_demo directory. 10 | ### You don't need it when have azdebugrelay module installed. 11 | import pkg_resources 12 | _AZDEBUGRELYNAME = "azdebugrelay" 13 | _required_azdebugrelay = {_AZDEBUGRELYNAME} 14 | _installed_azdebugrelay = {pkg.key for pkg in pkg_resources.working_set} 15 | _missing_azdebugrelay = _required_azdebugrelay - _installed_azdebugrelay 16 | 17 | if _missing_azdebugrelay: 18 | _workspace_dir = pathlib.Path(__file__).parent.parent.parent.absolute() 19 | _azdebugrelay_dir = os.path.dirname( 20 | os.path.join(_workspace_dir, "azdebugrelay")) 21 | sys.path.insert(0, _azdebugrelay_dir) 22 | ############### 23 | 24 | from azdebugrelay import DebugRelay, DebugMode, debugpy_connect_with_timeout 25 | g_debug_relay = None 26 | 27 | def do_work(): 28 | """Just a demo function. We debug it. 29 | """ 30 | print("Hello world!") 31 | plat = platform.platform() 32 | debugpy.breakpoint() # you can put a real VSCode breakpoint 33 | print(plat) # the debugger will stop here because debugpy.breakpoint() call above 34 | 35 | 36 | def _signal_handler(signal_received, frame): 37 | global g_debug_relay 38 | if g_debug_relay is not None: 39 | g_debug_relay.close() 40 | g_debug_relay = None 41 | exit(0) 42 | 43 | 44 | def _check_for_debugging(args) -> DebugRelay: 45 | """An over-engineered debugger initialization function. 46 | Parses command-line arguments looking for `--debug` option. 47 | If found option's value defines debugging behaviour: 48 | * `attach` - connects to a remote debugger (your VS Code in `listen` mode) 49 | * `listen` - starts listening for a remote debugger to connect 50 | * `none` (default) - do not start a DebugRelay 51 | 52 | Args: 53 | args: Command line arguments 54 | 55 | Returns: 56 | DebugRelay: running DebugRelay object 57 | """ 58 | debug_relay = None 59 | parser = argparse.ArgumentParser() 60 | parser.add_argument('--debug', action='store', 61 | default="none", choices=['attach', 'listen', 'none'], required=False) 62 | options, _ = parser.parse_known_args(args=args) 63 | if options.debug != "none": 64 | print(f"Starting DebugRelay in `{options.debug}` mode.") 65 | 66 | config_file = "./.azrelay.json" 67 | 68 | mode = DebugMode.Connect if options.debug == "attach" else DebugMode.WaitForConnection 69 | if os.path.exists(config_file): 70 | debug_relay = DebugRelay.from_config(config_file, debug_mode=mode) 71 | else: 72 | debug_relay = DebugRelay.from_environment(debug_mode=mode) 73 | 74 | # you can also create DebugRelay directly by providing connection string and the rest of its configuration: 75 | # debug_relay = DebugRelay(access_key_or_connection_string, relay_connection_name, debug_mode, hybrid_connection_url, host, ports) 76 | 77 | if debug_relay is None: 78 | print("Cannot create Debugging Relay due to missing configuration.") 79 | return None 80 | 81 | DebugRelay.kill_relays() 82 | debug_relay.open() 83 | 84 | if debug_relay.is_running(): 85 | print("Connecting to the remote host...") 86 | if options.debug == "attach": 87 | debugpy_connect_with_timeout("127.0.0.1", 5678, 15) 88 | else: 89 | debugpy.listen(("127.0.0.1", 5678)) 90 | debugpy.wait_for_client() 91 | print("Connected!!!") 92 | return debug_relay 93 | 94 | 95 | def _main(args): 96 | """CLI entry point 97 | 98 | Args: 99 | args: Command Line arguments 100 | """ 101 | global g_debug_relay 102 | g_debug_relay = _check_for_debugging(args) 103 | signal(SIGINT, _signal_handler) 104 | 105 | do_work() 106 | 107 | if g_debug_relay is not None: 108 | g_debug_relay.close() 109 | 110 | 111 | if __name__ == '__main__': 112 | _main(sys.argv[1:]) 113 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "target": "es2019", 5 | "lib": [ 6 | "ES2019", "dom" 7 | ], 8 | "outDir": "out", 9 | "sourceMap": true, 10 | "strict": true, 11 | "rootDir": "vscode-extension" 12 | }, 13 | "exclude": [ 14 | "node_modules", 15 | "build", 16 | ".vscode-test" 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /vscode-extension/extension.ts: -------------------------------------------------------------------------------- 1 | import * as vscode from 'vscode'; 2 | var path = require('path') 3 | 4 | interface Listener { 5 | host: string; 6 | port: string; 7 | } 8 | 9 | var taskNamePrefix = "AzureRelayBridge_" 10 | var listeners: Array = new Array() 11 | var initialized_listeners = 0 12 | var azDebugRelayTaskExecution: any 13 | var hybridConnectionName = "" 14 | var hybridConnectionConnectionString = "" 15 | var hasCredentialsFile = false 16 | 17 | function readConfig(){ 18 | var config = vscode.workspace.getConfiguration("azure-debug-relay") 19 | if (config) { 20 | hybridConnectionConnectionString = config.get("azrelay-connection-string") as string 21 | hybridConnectionName = config.get("azrelay-connection-name") as string 22 | } 23 | } 24 | 25 | 26 | function getConfigOption(): string { 27 | var option = "" 28 | 29 | if (hybridConnectionName && hybridConnectionName.length > 0 && 30 | hybridConnectionConnectionString && hybridConnectionConnectionString.length > 0) { 31 | option = `--connection-string \"${hybridConnectionConnectionString}\" --connection-name \"${hybridConnectionName}\"` 32 | } 33 | else if (hasCredentialsFile) { 34 | option = "--config-file .azrelay.json" 35 | } 36 | 37 | return option 38 | } 39 | 40 | function getPythonPath(): string { 41 | var pythonPath = "python" 42 | var pythonConfig = vscode.workspace.getConfiguration("python") 43 | if (pythonConfig !== undefined) { 44 | var pythonPathResult = pythonConfig.get("pythonPath") 45 | if (pythonPathResult !== undefined) { 46 | pythonPath = pythonPathResult as string 47 | } 48 | } 49 | 50 | return pythonPath 51 | } 52 | 53 | function queueRelay(context: vscode.ExtensionContext, host: string, port: any) { 54 | listeners.push({host: host, port: String(port)}) 55 | } 56 | 57 | function startRelayIfCan(context: vscode.ExtensionContext) { 58 | initialized_listeners = 0 59 | if (listeners.length > 0) { 60 | hasCredentialsFile = false 61 | vscode.workspace.findFiles(".azrelay.json").then((files: any) => { 62 | hasCredentialsFile = (files != null && files.length > 0) 63 | }).then(async () => { 64 | var options = getConfigOption() 65 | if (options && options.length > 0) { 66 | var host = listeners[0].host // single host IP, only taken from the first listener 67 | var ports = listeners.map(l => l.port) 68 | await startRelay(context, options, host, ports) 69 | } 70 | else { 71 | listeners = new Array() 72 | } 73 | }); 74 | } 75 | } 76 | 77 | function startRelay(context: vscode.ExtensionContext, credentialOptions: string, host: string, ports: string[]): Thenable | null{ 78 | var portsString = ports.join("_") 79 | var portsArgString = ports.join(",") 80 | var taskType = `azdebugrelay_${host}_${portsString}`; 81 | 82 | var pythonScriptPath = path.join(context.extensionPath, "azdebugrelay", "debug_relay.py") 83 | var pythonPath = getPythonPath() 84 | var cmdLine = `"${pythonPath}" "${pythonScriptPath}" --no-kill --mode listen ` + 85 | `${credentialOptions} ` + 86 | `--ports ${portsArgString} --host ${host}` 87 | var isWindows = process.platform === "win32"; 88 | if (isWindows == true) { 89 | cmdLine = `"${cmdLine}"` 90 | } 91 | var execution = 92 | new vscode.ShellExecution(`${cmdLine}`); 93 | var task_name = `${taskNamePrefix}${host}_${portsString}` 94 | var task = new vscode.Task({ type: taskType }, vscode.TaskScope.Workspace, 95 | task_name, "Azure Relay Bridge", execution) 96 | 97 | if(azDebugRelayTaskExecution == null) 98 | { 99 | azDebugRelayTaskExecution = "starting..." 100 | return vscode.tasks.executeTask(task).then((exec: vscode.TaskExecution) => { 101 | azDebugRelayTaskExecution = exec 102 | }); 103 | } 104 | else 105 | { 106 | return null; 107 | } 108 | } 109 | 110 | function stopRelay(_: vscode.ExtensionContext){ 111 | // We always terminate all debugging tasks 112 | try { 113 | if(azDebugRelayTaskExecution != null){ 114 | var execution = azDebugRelayTaskExecution as vscode.TaskExecution 115 | azDebugRelayTaskExecution = null 116 | if(execution != null){ 117 | execution.terminate(); 118 | } 119 | } 120 | } 121 | catch { } 122 | 123 | } 124 | 125 | export function activate(context: vscode.ExtensionContext) { 126 | console.log('Azure Relay Bridge extension activated.'); 127 | 128 | readConfig() 129 | vscode.workspace.onDidChangeConfiguration((_: any) => { 130 | readConfig() 131 | }) 132 | 133 | vscode.tasks.onDidEndTask((taskEnd: vscode.TaskEndEvent) => { 134 | if(taskEnd.execution.task.name.startsWith(taskNamePrefix)) 135 | azDebugRelayTaskExecution = null 136 | }); 137 | 138 | 139 | vscode.debug.onDidTerminateDebugSession((_: vscode.DebugSession) => { 140 | //stopRelay(context); 141 | }); 142 | 143 | vscode.debug.onDidReceiveDebugSessionCustomEvent(async (event: vscode.DebugSessionCustomEvent) => { 144 | //if (event.event == "debugpyWaitingForServer") { 145 | // startRelayIfCan(context); 146 | //} 147 | }); 148 | 149 | vscode.debug.registerDebugAdapterTrackerFactory('python', { 150 | createDebugAdapterTracker(_: vscode.DebugSession) { 151 | return { 152 | onWillReceiveMessage: (message: any) => { 153 | if (message.type !== undefined && message.command !== undefined) 154 | { 155 | if (message.type == "request") { 156 | if (message.command == "initialize") { 157 | initialized_listeners++ 158 | } 159 | else if (message.command == "attach") { 160 | if (message.arguments !== undefined && message.arguments.listen !== undefined) { 161 | queueRelay(context, message.arguments.listen.host, message.arguments.listen.port); 162 | } 163 | else { 164 | initialized_listeners-- 165 | } 166 | } 167 | else if (message.command == "disconnect") { 168 | if (listeners.length > 0) { 169 | listeners.pop() 170 | } 171 | if (listeners.length == 0) { 172 | initialized_listeners = 0 173 | stopRelay(context); 174 | } 175 | } 176 | else if (message.command == "launch") { 177 | initialized_listeners-- 178 | } 179 | if (initialized_listeners > 0 && initialized_listeners == listeners.length) { 180 | startRelayIfCan(context) 181 | } 182 | } 183 | } 184 | } 185 | }; 186 | } 187 | }); 188 | 189 | } --------------------------------------------------------------------------------