├── .git-blame-ignore-revs ├── .github ├── dependabot.yml └── workflows │ ├── ci.yml │ ├── downstream.yml │ ├── enforce-label.yml │ ├── nightly.yml │ ├── prep-release.yml │ ├── publish-changelog.yml │ └── publish-release.yml ├── .gitignore ├── .mailmap ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── RELEASE.md ├── SECURITY.md ├── docs ├── Makefile ├── api │ ├── ipykernel.comm.rst │ ├── ipykernel.inprocess.rst │ ├── ipykernel.rst │ └── modules.rst ├── conf.py ├── index.rst └── make.bat ├── examples └── embedding │ ├── inprocess_qtconsole.py │ ├── inprocess_terminal.py │ ├── internal_ipkernel.py │ ├── ipkernel_qtapp.py │ └── ipkernel_wxapp.py ├── hatch_build.py ├── ipykernel ├── __init__.py ├── __main__.py ├── _eventloop_macos.py ├── _version.py ├── comm │ ├── __init__.py │ ├── comm.py │ └── manager.py ├── compiler.py ├── connect.py ├── control.py ├── debugger.py ├── displayhook.py ├── embed.py ├── eventloops.py ├── gui │ ├── __init__.py │ ├── gtk3embed.py │ └── gtkembed.py ├── heartbeat.py ├── inprocess │ ├── __init__.py │ ├── blocking.py │ ├── channels.py │ ├── client.py │ ├── constants.py │ ├── ipkernel.py │ ├── manager.py │ ├── session.py │ └── socket.py ├── iostream.py ├── ipkernel.py ├── jsonutil.py ├── kernelapp.py ├── kernelbase.py ├── kernelspec.py ├── log.py ├── parentpoller.py ├── py.typed ├── pylab │ ├── __init__.py │ ├── backend_inline.py │ └── config.py ├── resources │ ├── logo-32x32.png │ ├── logo-64x64.png │ └── logo-svg.svg ├── shellchannel.py ├── subshell.py ├── subshell_manager.py ├── thread.py └── zmqshell.py ├── ipykernel_launcher.py ├── pyproject.toml └── tests ├── __init__.py ├── conftest.py ├── inprocess ├── __init__.py ├── test_kernel.py └── test_kernelmanager.py ├── test_async.py ├── test_comm.py ├── test_connect.py ├── test_debugger.py ├── test_embed_kernel.py ├── test_eventloop.py ├── test_heartbeat.py ├── test_io.py ├── test_ipkernel_direct.py ├── test_jsonutil.py ├── test_kernel.py ├── test_kernel_direct.py ├── test_kernelapp.py ├── test_kernelspec.py ├── test_message_spec.py ├── test_parentpoller.py ├── test_start_kernel.py ├── test_subshells.py ├── test_zmq_shell.py └── utils.py /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Black formatting: https://github.com/ipython/ipykernel/pull/892 2 | c5bca730f82bbdfb005ab93969ff5a1d028c2341 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | groups: 8 | actions: 9 | patterns: 10 | - "*" 11 | - package-ecosystem: "pip" 12 | directory: "/" 13 | schedule: 14 | interval: "weekly" 15 | groups: 16 | actions: 17 | patterns: 18 | - "*" 19 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ipykernel tests 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | pull_request: 7 | schedule: 8 | - cron: "0 0 * * *" 9 | 10 | concurrency: 11 | group: ci-${{ github.ref }} 12 | cancel-in-progress: true 13 | 14 | defaults: 15 | run: 16 | shell: bash -eux {0} 17 | 18 | jobs: 19 | build: 20 | runs-on: ${{ matrix.os }} 21 | strategy: 22 | fail-fast: false 23 | matrix: 24 | os: [ubuntu-latest, windows-latest, macos-latest] 25 | python-version: 26 | - "3.9" 27 | - "3.10" 28 | - "3.11" 29 | - "3.12" 30 | - "3.13" 31 | - "pypy-3.9" 32 | - "pypy-3.10" 33 | steps: 34 | - name: Checkout 35 | uses: actions/checkout@v4 36 | 37 | - uses: actions/setup-python@v5 38 | with: 39 | python-version: ${{ matrix.python-version }} 40 | 41 | - name: Install hatch 42 | run: | 43 | python --version 44 | python -m pip install hatch 45 | 46 | - name: Run the tests 47 | timeout-minutes: 15 48 | if: ${{ !startsWith( matrix.python-version, 'pypy' ) && !startsWith(matrix.os, 'windows') }} 49 | run: | 50 | hatch run cov:test --cov-fail-under 50 51 | 52 | - name: Run the tests on pypy 53 | timeout-minutes: 15 54 | if: ${{ startsWith( matrix.python-version, 'pypy' ) }} 55 | run: | 56 | hatch run test:nowarn 57 | 58 | - name: Run the tests on Windows 59 | timeout-minutes: 15 60 | if: ${{ startsWith(matrix.os, 'windows') }} 61 | run: | 62 | hatch run cov:nowarn 63 | 64 | - name: Check Launcher 65 | run: | 66 | pip install . 67 | cd $HOME 68 | python -m ipykernel_launcher --help 69 | 70 | - uses: jupyterlab/maintainer-tools/.github/actions/upload-coverage@v1 71 | 72 | coverage: 73 | runs-on: ubuntu-latest 74 | needs: 75 | - build 76 | steps: 77 | - uses: actions/checkout@v4 78 | - uses: jupyterlab/maintainer-tools/.github/actions/report-coverage@v1 79 | with: 80 | fail_under: 80 81 | 82 | test_lint: 83 | name: Test Lint 84 | runs-on: ubuntu-latest 85 | steps: 86 | - uses: actions/checkout@v4 87 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 88 | - name: Run Linters 89 | run: | 90 | hatch run typing:test 91 | pipx run interrogate -vv . --fail-under 90 92 | hatch run lint:build 93 | pipx run doc8 --max-line-length=200 94 | 95 | check_release: 96 | runs-on: ubuntu-latest 97 | steps: 98 | - uses: actions/checkout@v4 99 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 100 | - uses: jupyter-server/jupyter_releaser/.github/actions/check-release@v2 101 | with: 102 | token: ${{ secrets.GITHUB_TOKEN }} 103 | 104 | test_docs: 105 | runs-on: ubuntu-latest 106 | steps: 107 | - uses: actions/checkout@v4 108 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 109 | - name: Build API docs 110 | run: | 111 | hatch run docs:api 112 | # If this fails run `hatch run docs:api` locally 113 | # and commit. 114 | git status --porcelain 115 | git status -s | grep "A" && exit 1 116 | git status -s | grep "M" && exit 1 117 | echo "API docs done" 118 | - run: hatch run docs:build 119 | 120 | test_without_debugpy: 121 | runs-on: ${{ matrix.os }} 122 | strategy: 123 | fail-fast: false 124 | matrix: 125 | os: [ubuntu-latest] 126 | python-version: ["3.9"] 127 | steps: 128 | - name: Checkout 129 | uses: actions/checkout@v4 130 | 131 | - name: Base Setup 132 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 133 | 134 | - name: Install the Python dependencies without debugpy 135 | run: | 136 | pip install .[test] 137 | pip uninstall --yes debugpy 138 | 139 | - name: List installed packages 140 | run: | 141 | pip freeze 142 | 143 | - name: Run the tests 144 | timeout-minutes: 15 145 | run: pytest -W default -vv 146 | 147 | test_miniumum_versions: 148 | name: Test Minimum Versions 149 | timeout-minutes: 20 150 | runs-on: ubuntu-latest 151 | steps: 152 | - uses: actions/checkout@v4 153 | - name: Base Setup 154 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 155 | with: 156 | dependency_type: minimum 157 | python_version: "3.9" 158 | 159 | - name: List installed packages 160 | run: | 161 | hatch -v run test:list 162 | 163 | - name: Run the unit tests 164 | run: | 165 | hatch -v run test:nowarn 166 | 167 | test_prereleases: 168 | name: Test Prereleases 169 | runs-on: ubuntu-latest 170 | timeout-minutes: 20 171 | steps: 172 | - name: Checkout 173 | uses: actions/checkout@v4 174 | - name: Base Setup 175 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 176 | with: 177 | dependency_type: pre 178 | - name: Run the tests 179 | run: | 180 | hatch run test:nowarn 181 | 182 | make_sdist: 183 | name: Make SDist 184 | runs-on: ubuntu-latest 185 | timeout-minutes: 20 186 | steps: 187 | - uses: actions/checkout@v4 188 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 189 | - uses: jupyterlab/maintainer-tools/.github/actions/make-sdist@v1 190 | 191 | test_sdist: 192 | runs-on: ubuntu-latest 193 | needs: [make_sdist] 194 | name: Install from SDist and Test 195 | timeout-minutes: 20 196 | steps: 197 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 198 | - uses: jupyterlab/maintainer-tools/.github/actions/test-sdist@v1 199 | 200 | link_check: 201 | runs-on: ubuntu-latest 202 | steps: 203 | - uses: actions/checkout@v4 204 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 205 | - uses: jupyterlab/maintainer-tools/.github/actions/check-links@v1 206 | -------------------------------------------------------------------------------- /.github/workflows/downstream.yml: -------------------------------------------------------------------------------- 1 | name: Test downstream projects 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | pull_request: 7 | 8 | concurrency: 9 | group: downstream-${{ github.ref }} 10 | cancel-in-progress: true 11 | 12 | jobs: 13 | nbclient: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - name: Checkout 17 | uses: actions/checkout@v4 18 | 19 | - name: Base Setup 20 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 21 | 22 | - name: Run Test 23 | uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 24 | with: 25 | package_name: nbclient 26 | env_values: IPYKERNEL_CELL_NAME=\ 27 | 28 | ipywidgets: 29 | runs-on: ubuntu-latest 30 | steps: 31 | - name: Checkout 32 | uses: actions/checkout@v4 33 | 34 | - name: Base Setup 35 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 36 | 37 | - name: Run Test 38 | uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 39 | with: 40 | package_name: ipywidgets 41 | test_command: pytest -vv -raXxs -W default --durations 10 --color=yes 42 | 43 | jupyter_client: 44 | runs-on: ubuntu-latest 45 | steps: 46 | - name: Checkout 47 | uses: actions/checkout@v4 48 | 49 | - name: Base Setup 50 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 51 | 52 | - name: Run Test 53 | uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 54 | with: 55 | package_name: jupyter_client 56 | 57 | ipyparallel: 58 | runs-on: ubuntu-latest 59 | timeout-minutes: 20 60 | steps: 61 | - name: Checkout 62 | uses: actions/checkout@v4 63 | 64 | - name: Base Setup 65 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 66 | 67 | - name: Run Test 68 | uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 69 | with: 70 | package_name: ipyparallel 71 | package_spec: '-e ".[test]"' 72 | 73 | jupyter_kernel_test: 74 | runs-on: ubuntu-latest 75 | steps: 76 | - name: Checkout 77 | uses: actions/checkout@v4 78 | 79 | - name: Base Setup 80 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 81 | 82 | - name: Run Test 83 | run: | 84 | git clone https://github.com/jupyter/jupyter_kernel_test.git 85 | cd jupyter_kernel_test 86 | pip install -e ".[test]" 87 | python test_ipykernel.py 88 | 89 | qtconsole: 90 | runs-on: ubuntu-latest 91 | timeout-minutes: 20 92 | steps: 93 | - name: Checkout 94 | uses: actions/checkout@v4 95 | - name: Setup Python 96 | uses: actions/setup-python@v5 97 | with: 98 | python-version: "3.9" 99 | architecture: "x64" 100 | - name: Install System Packages 101 | run: | 102 | sudo apt-get update 103 | sudo apt-get install -y --no-install-recommends '^libxcb.*-dev' libx11-xcb-dev libglu1-mesa-dev libxrender-dev libxi-dev libxkbcommon-dev libxkbcommon-x11-dev 104 | - name: Install qtconsole dependencies 105 | shell: bash -l {0} 106 | run: | 107 | cd ${GITHUB_WORKSPACE}/.. 108 | git clone https://github.com/jupyter/qtconsole.git 109 | cd qtconsole 110 | ${pythonLocation}/bin/python -m pip install -e ".[test]" 111 | ${pythonLocation}/bin/python -m pip install pyqt5 112 | - name: Install Ipykernel changes 113 | shell: bash -l {0} 114 | run: ${pythonLocation}/bin/python -m pip install -e . 115 | - name: Test qtconsole 116 | shell: bash -l {0} 117 | run: | 118 | cd ${GITHUB_WORKSPACE}/../qtconsole 119 | xvfb-run --auto-servernum ${pythonLocation}/bin/python -m pytest -vv -s --full-trace --color=yes -k "not test_execute" qtconsole 120 | 121 | spyder_kernels: 122 | runs-on: ubuntu-latest 123 | timeout-minutes: 20 124 | steps: 125 | - name: Checkout 126 | uses: actions/checkout@v4 127 | - name: Setup Python 128 | uses: actions/setup-python@v5 129 | with: 130 | python-version: "3.9" 131 | architecture: "x64" 132 | - name: Install System Packages 133 | run: | 134 | sudo apt-get update 135 | sudo apt-get install -y --no-install-recommends libgl1 libglx-mesa0 136 | - name: Install spyder-kernels dependencies 137 | shell: bash -l {0} 138 | run: | 139 | cd ${GITHUB_WORKSPACE}/.. 140 | git clone https://github.com/spyder-ide/spyder-kernels.git 141 | cd spyder-kernels 142 | ${pythonLocation}/bin/python -m pip install -e ".[test]" 143 | - name: Install IPykernel changes 144 | shell: bash -l {0} 145 | run: ${pythonLocation}/bin/python -m pip install -e . 146 | - name: Test spyder-kernels 147 | shell: bash -l {0} 148 | run: | 149 | cd ${GITHUB_WORKSPACE}/../spyder-kernels 150 | xvfb-run --auto-servernum ${pythonLocation}/bin/python -m pytest -vv -s --full-trace --color=yes -k 'not test_interrupt and not test_enter_debug_after_interruption' spyder_kernels 151 | -------------------------------------------------------------------------------- /.github/workflows/enforce-label.yml: -------------------------------------------------------------------------------- 1 | name: Enforce PR label 2 | 3 | concurrency: 4 | group: label-${{ github.ref }} 5 | cancel-in-progress: true 6 | 7 | on: 8 | pull_request: 9 | types: [labeled, unlabeled, opened, edited, synchronize] 10 | jobs: 11 | enforce-label: 12 | runs-on: ubuntu-latest 13 | permissions: 14 | pull-requests: write 15 | steps: 16 | - name: enforce-triage-label 17 | uses: jupyterlab/maintainer-tools/.github/actions/enforce-label@v1 18 | -------------------------------------------------------------------------------- /.github/workflows/nightly.yml: -------------------------------------------------------------------------------- 1 | name: nightly build and upload 2 | on: 3 | workflow_dispatch: 4 | schedule: 5 | - cron: "0 0 * * *" 6 | 7 | defaults: 8 | run: 9 | shell: bash -eux {0} 10 | 11 | jobs: 12 | build: 13 | runs-on: "ubuntu-latest" 14 | strategy: 15 | fail-fast: false 16 | matrix: 17 | python-version: ["3.12"] 18 | steps: 19 | - name: Checkout 20 | uses: actions/checkout@v4 21 | 22 | - name: Base Setup 23 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 24 | 25 | - name: Build 26 | run: | 27 | python -m pip install build 28 | python -m build 29 | - name: Upload wheel 30 | uses: scientific-python/upload-nightly-action@82396a2ed4269ba06c6b2988bb4fd568ef3c3d6b # 0.6.1 31 | with: 32 | artifacts_path: dist 33 | anaconda_nightly_upload_token: ${{secrets.UPLOAD_TOKEN}} 34 | -------------------------------------------------------------------------------- /.github/workflows/prep-release.yml: -------------------------------------------------------------------------------- 1 | name: "Step 1: Prep Release" 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | version_spec: 6 | description: "New Version Specifier" 7 | default: "next" 8 | required: false 9 | branch: 10 | description: "The branch to target" 11 | required: false 12 | post_version_spec: 13 | description: "Post Version Specifier" 14 | required: false 15 | silent: 16 | description: "Set a placeholder in the changelog and don't publish the release." 17 | required: false 18 | type: boolean 19 | since: 20 | description: "Use PRs with activity since this date or git reference" 21 | required: false 22 | since_last_stable: 23 | description: "Use PRs with activity since the last stable git tag" 24 | required: false 25 | type: boolean 26 | jobs: 27 | prep_release: 28 | runs-on: ubuntu-latest 29 | permissions: 30 | contents: write 31 | steps: 32 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 33 | 34 | - name: Prep Release 35 | id: prep-release 36 | uses: jupyter-server/jupyter_releaser/.github/actions/prep-release@v2 37 | with: 38 | token: ${{ secrets.GITHUB_TOKEN }} 39 | version_spec: ${{ github.event.inputs.version_spec }} 40 | silent: ${{ github.event.inputs.silent }} 41 | post_version_spec: ${{ github.event.inputs.post_version_spec }} 42 | target: ${{ github.event.inputs.target }} 43 | branch: ${{ github.event.inputs.branch }} 44 | since: ${{ github.event.inputs.since }} 45 | since_last_stable: ${{ github.event.inputs.since_last_stable }} 46 | 47 | - name: "** Next Step **" 48 | run: | 49 | echo "Optional): Review Draft Release: ${{ steps.prep-release.outputs.release_url }}" 50 | -------------------------------------------------------------------------------- /.github/workflows/publish-changelog.yml: -------------------------------------------------------------------------------- 1 | name: "Publish Changelog" 2 | on: 3 | release: 4 | types: [published] 5 | 6 | workflow_dispatch: 7 | inputs: 8 | branch: 9 | description: "The branch to target" 10 | required: false 11 | 12 | jobs: 13 | publish_changelog: 14 | runs-on: ubuntu-latest 15 | environment: release 16 | steps: 17 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 18 | 19 | - uses: actions/create-github-app-token@v1 20 | id: app-token 21 | with: 22 | app-id: ${{ vars.APP_ID }} 23 | private-key: ${{ secrets.APP_PRIVATE_KEY }} 24 | 25 | - name: Publish changelog 26 | id: publish-changelog 27 | uses: jupyter-server/jupyter_releaser/.github/actions/publish-changelog@v2 28 | with: 29 | token: ${{ steps.app-token.outputs.token }} 30 | branch: ${{ github.event.inputs.branch }} 31 | 32 | - name: "** Next Step **" 33 | run: | 34 | echo "Merge the changelog update PR: ${{ steps.publish-changelog.outputs.pr_url }}" 35 | -------------------------------------------------------------------------------- /.github/workflows/publish-release.yml: -------------------------------------------------------------------------------- 1 | name: "Step 2: Publish Release" 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | branch: 6 | description: "The target branch" 7 | required: false 8 | release_url: 9 | description: "The URL of the draft GitHub release" 10 | required: false 11 | steps_to_skip: 12 | description: "Comma separated list of steps to skip" 13 | required: false 14 | 15 | jobs: 16 | publish_release: 17 | runs-on: ubuntu-latest 18 | environment: release 19 | permissions: 20 | id-token: write 21 | steps: 22 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 23 | 24 | - uses: actions/create-github-app-token@v1 25 | id: app-token 26 | with: 27 | app-id: ${{ vars.APP_ID }} 28 | private-key: ${{ secrets.APP_PRIVATE_KEY }} 29 | 30 | - name: Populate Release 31 | id: populate-release 32 | uses: jupyter-server/jupyter_releaser/.github/actions/populate-release@v2 33 | with: 34 | token: ${{ steps.app-token.outputs.token }} 35 | branch: ${{ github.event.inputs.branch }} 36 | release_url: ${{ github.event.inputs.release_url }} 37 | steps_to_skip: ${{ github.event.inputs.steps_to_skip }} 38 | 39 | - name: Finalize Release 40 | id: finalize-release 41 | uses: jupyter-server/jupyter_releaser/.github/actions/finalize-release@v2 42 | with: 43 | token: ${{ steps.app-token.outputs.token }} 44 | release_url: ${{ steps.populate-release.outputs.release_url }} 45 | 46 | - name: "** Next Step **" 47 | if: ${{ success() }} 48 | run: | 49 | echo "Verify the final release" 50 | echo ${{ steps.finalize-release.outputs.release_url }} 51 | 52 | - name: "** Failure Message **" 53 | if: ${{ failure() }} 54 | run: | 55 | echo "Failed to Publish the Draft Release Url:" 56 | echo ${{ steps.populate-release.outputs.release_url }} 57 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | MANIFEST 2 | build 3 | cover 4 | dist 5 | _build 6 | docs/man/*.gz 7 | docs/source/api/generated 8 | docs/source/config/options 9 | docs/source/interactive/magics-generated.txt 10 | docs/gh-pages 11 | IPython/html/notebook/static/mathjax 12 | IPython/html/static/style/*.map 13 | *.py[co] 14 | __pycache__ 15 | *.egg-info 16 | *~ 17 | *.bak 18 | .ipynb_checkpoints 19 | .tox 20 | .DS_Store 21 | \#*# 22 | .#* 23 | .coverage 24 | .cache 25 | 26 | data_kernelspec 27 | .pytest_cache 28 | 29 | # copied changelog file 30 | docs/changelog.md 31 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | ci: 2 | autoupdate_commit_msg: "chore: update pre-commit hooks" 3 | autoupdate_schedule: weekly 4 | 5 | repos: 6 | - repo: https://github.com/pre-commit/pre-commit-hooks 7 | rev: v5.0.0 8 | hooks: 9 | - id: check-case-conflict 10 | - id: check-ast 11 | - id: check-docstring-first 12 | - id: check-executables-have-shebangs 13 | - id: check-added-large-files 14 | - id: check-case-conflict 15 | - id: check-merge-conflict 16 | - id: check-json 17 | - id: check-toml 18 | - id: check-yaml 19 | - id: debug-statements 20 | exclude: ipykernel/kernelapp.py 21 | - id: end-of-file-fixer 22 | - id: trailing-whitespace 23 | 24 | - repo: https://github.com/python-jsonschema/check-jsonschema 25 | rev: 0.32.1 26 | hooks: 27 | - id: check-github-workflows 28 | 29 | - repo: https://github.com/executablebooks/mdformat 30 | rev: 0.7.22 31 | hooks: 32 | - id: mdformat 33 | additional_dependencies: 34 | [mdformat-gfm, mdformat-frontmatter, mdformat-footnote] 35 | 36 | - repo: https://github.com/pre-commit/mirrors-prettier 37 | rev: "v4.0.0-alpha.8" 38 | hooks: 39 | - id: prettier 40 | types_or: [yaml, html, json] 41 | 42 | - repo: https://github.com/pre-commit/mirrors-mypy 43 | rev: "v1.15.0" 44 | hooks: 45 | - id: mypy 46 | files: ipykernel 47 | args: ["--install-types", "--non-interactive"] 48 | additional_dependencies: 49 | [ 50 | "traitlets>=5.13", 51 | "ipython>=8.16.1", 52 | "jupyter_client>=8.5", 53 | "appnope", 54 | "types-psutil", 55 | ] 56 | 57 | - repo: https://github.com/adamchainz/blacken-docs 58 | rev: "1.19.1" 59 | hooks: 60 | - id: blacken-docs 61 | additional_dependencies: [black==23.7.0] 62 | 63 | - repo: https://github.com/codespell-project/codespell 64 | rev: "v2.4.1" 65 | hooks: 66 | - id: codespell 67 | args: ["-L", "sur,nd"] 68 | 69 | - repo: https://github.com/pre-commit/pygrep-hooks 70 | rev: "v1.10.0" 71 | hooks: 72 | - id: rst-backticks 73 | - id: rst-directive-colons 74 | - id: rst-inline-touching-normal 75 | 76 | - repo: https://github.com/astral-sh/ruff-pre-commit 77 | rev: v0.11.4 78 | hooks: 79 | - id: ruff 80 | types_or: [python, jupyter] 81 | args: ["--fix", "--show-fixes"] 82 | - id: ruff-format 83 | types_or: [python, jupyter] 84 | 85 | - repo: https://github.com/scientific-python/cookie 86 | rev: "2025.01.22" 87 | hooks: 88 | - id: sp-repo-review 89 | additional_dependencies: ["repo-review[cli]"] 90 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-22.04 5 | tools: 6 | python: "3.13" 7 | 8 | sphinx: 9 | configuration: docs/conf.py 10 | 11 | python: 12 | install: 13 | # install itself with pip install . 14 | - method: pip 15 | path: . 16 | extra_requirements: 17 | - docs 18 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Welcome! 4 | 5 | For contributing tips, follow the [Jupyter Contributing Guide](https://jupyter.readthedocs.io/en/latest/contributing/content-contributor.html). 6 | Please make sure to follow the [Jupyter Code of Conduct](https://github.com/jupyter/governance/blob/master/conduct/code_of_conduct.md). 7 | 8 | ## Installing ipykernel for development 9 | 10 | ipykernel is a pure Python package, so setting up for development is the same as most other Python projects: 11 | 12 | ```bash 13 | # clone the repo 14 | git clone https://github.com/ipython/ipykernel 15 | cd ipykernel 16 | # do a 'development' or 'editable' install with pip: 17 | pip install -e . 18 | ``` 19 | 20 | ## Code Styling 21 | 22 | `ipykernel` has adopted automatic code formatting so you shouldn't 23 | need to worry too much about your code style. 24 | As long as your code is valid, 25 | the pre-commit hook should take care of how it should look. 26 | To install `pre-commit`, run the following:: 27 | 28 | ``` 29 | pip install pre-commit 30 | pre-commit install 31 | ``` 32 | 33 | You can invoke the pre-commit hook by hand at any time with:: 34 | 35 | ``` 36 | pre-commit run 37 | ``` 38 | 39 | which should run any autoformatting on your code 40 | and tell you about any errors it couldn't fix automatically. 41 | You may also install [black integration](https://github.com/psf/black#editor-integration) 42 | into your text editor to format code automatically. 43 | 44 | If you have already committed files before setting up the pre-commit 45 | hook with `pre-commit install`, you can fix everything up using 46 | `pre-commit run --all-files`. You need to make the fixing commit 47 | yourself after that. 48 | 49 | Some of the hooks only run on CI by default, but you can invoke them by 50 | running with the `--hook-stage manual` argument. 51 | 52 | ## Releasing ipykernel 53 | 54 | Releasing ipykernel is _almost_ standard for a Python package: 55 | 56 | - set version for release 57 | - make and publish tag 58 | - publish release to PyPI 59 | - set version back to development 60 | 61 | The one extra step for ipykernel is that we need to make separate wheels for Python 2 and 3 62 | because the bundled kernelspec has different contents for Python 2 and 3. This 63 | affects only the 4.x branch of ipykernel as the 5+ version is only compatible 64 | Python 3. 65 | 66 | The full release process is available below: 67 | 68 | ```bash 69 | # make sure version is set in ipykernel/_version.py 70 | VERSION="4.9.0" 71 | # commit the version and make a release tag 72 | git add ipykernel/_version.py 73 | git commit -m "release $VERSION" 74 | git tag -am "release $VERSION" $VERSION 75 | 76 | # push the changes to the repo 77 | git push 78 | git push --tags 79 | 80 | # publish the release to PyPI 81 | # note the extra `python2 setup.py bdist_wheel` for creating 82 | # the wheel for Python 2 83 | pip install --upgrade twine 84 | git clean -xfd 85 | python3 setup.py sdist bdist_wheel 86 | python2 setup.py bdist_wheel # the extra step for the 4.x branch. 87 | twine upload dist/* 88 | 89 | # set the version back to '.dev' in ipykernel/_version.py 90 | # e.g. 4.10.0.dev if we just released 4.9.0 91 | git add ipykernel/_version.py 92 | git commit -m "back to dev" 93 | git push 94 | ``` 95 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2015, IPython Development Team 4 | 5 | All rights reserved. 6 | 7 | Redistribution and use in source and binary forms, with or without 8 | modification, are permitted provided that the following conditions are met: 9 | 10 | 1. Redistributions of source code must retain the above copyright notice, this 11 | list of conditions and the following disclaimer. 12 | 13 | 2. Redistributions in binary form must reproduce the above copyright notice, 14 | this list of conditions and the following disclaimer in the documentation 15 | and/or other materials provided with the distribution. 16 | 17 | 3. Neither the name of the copyright holder nor the names of its 18 | contributors may be used to endorse or promote products derived from 19 | this software without specific prior written permission. 20 | 21 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 22 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 23 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 24 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 25 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 26 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 27 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 28 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 29 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # IPython Kernel for Jupyter 2 | 3 | [![Build Status](https://github.com/ipython/ipykernel/actions/workflows/ci.yml/badge.svg?query=branch%3Amain++)](https://github.com/ipython/ipykernel/actions/workflows/ci.yml/badge.svg?query=branch%3Amain++) 4 | [![Documentation Status](https://readthedocs.org/projects/ipykernel/badge/?version=latest)](http://ipykernel.readthedocs.io/en/latest/?badge=latest) 5 | 6 | This package provides the IPython kernel for Jupyter. 7 | 8 | ## Installation from source 9 | 10 | 1. `git clone` 11 | 1. `cd ipykernel` 12 | 1. `pip install -e ".[test]"` 13 | 14 | After that, all normal `ipython` commands will use this newly-installed version of the kernel. 15 | 16 | ## Running tests 17 | 18 | Follow the instructions from `Installation from source`. 19 | 20 | and then from the root directory 21 | 22 | ```bash 23 | pytest 24 | ``` 25 | 26 | ## Running tests with coverage 27 | 28 | Follow the instructions from `Installation from source`. 29 | 30 | and then from the root directory 31 | 32 | ```bash 33 | pytest -vv -s --cov ipykernel --cov-branch --cov-report term-missing:skip-covered --durations 10 34 | ``` 35 | 36 | ## About the IPython Development Team 37 | 38 | The IPython Development Team is the set of all contributors to the IPython project. 39 | This includes all of the IPython subprojects. 40 | 41 | The core team that coordinates development on GitHub can be found here: 42 | https://github.com/ipython/. 43 | 44 | ## Our Copyright Policy 45 | 46 | IPython uses a shared copyright model. Each contributor maintains copyright 47 | over their contributions to IPython. But, it is important to note that these 48 | contributions are typically only changes to the repositories. Thus, the IPython 49 | source code, in its entirety is not the copyright of any single person or 50 | institution. Instead, it is the collective copyright of the entire IPython 51 | Development Team. If individual contributors want to maintain a record of what 52 | changes/contributions they have specific copyright on, they should indicate 53 | their copyright in the commit message of the change, when they commit the 54 | change to one of the IPython repositories. 55 | 56 | With this in mind, the following banner should be used in any source code file 57 | to indicate the copyright and license terms: 58 | 59 | ``` 60 | # Copyright (c) IPython Development Team. 61 | # Distributed under the terms of the Modified BSD License. 62 | ``` 63 | -------------------------------------------------------------------------------- /RELEASE.md: -------------------------------------------------------------------------------- 1 | # Release Guide 2 | 3 | ## Using `jupyter_releaser` 4 | 5 | The recommended way to make a release is to use [`jupyter_releaser`](https://jupyter-releaser.readthedocs.io/en/latest/get_started/making_release_from_repo.html). 6 | 7 | ## Manual Release 8 | 9 | - Update `CHANGELOG` 10 | 11 | - Run the following: 12 | 13 | ```bash 14 | export VERSION= 15 | pip install pipx 16 | pipx run hatch version $VERSION 17 | git commit -a -m "Release $VERSION" 18 | git tag $VERSION; true; 19 | git push --all 20 | git push --tags 21 | rm -rf dist build 22 | pipx run build . 23 | pipx run twine check dist/* 24 | pipx run twine upload dist/* 25 | ``` 26 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Reporting a Vulnerability 4 | 5 | All IPython and Jupyter security are handled via security@ipython.org. 6 | You can find more information on the Jupyter website. https://jupyter.org/security 7 | 8 | ## Tidelift 9 | 10 | You can report security concerns for ipykernel via the [Tidelift platform](https://tidelift.com/security). 11 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " applehelp to make an Apple Help Book" 34 | @echo " devhelp to make HTML files and a Devhelp project" 35 | @echo " epub to make an epub" 36 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 37 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 38 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 39 | @echo " text to make text files" 40 | @echo " man to make manual pages" 41 | @echo " texinfo to make Texinfo files" 42 | @echo " info to make Texinfo files and run them through makeinfo" 43 | @echo " gettext to make PO message catalogs" 44 | @echo " changes to make an overview of all changed/added/deprecated items" 45 | @echo " xml to make Docutils-native XML files" 46 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 47 | @echo " linkcheck to check all external links for integrity" 48 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 49 | @echo " coverage to run coverage check of the documentation (if enabled)" 50 | 51 | clean: 52 | rm -rf $(BUILDDIR)/* 53 | 54 | html: 55 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 56 | @echo 57 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 58 | 59 | dirhtml: 60 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 61 | @echo 62 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 63 | 64 | singlehtml: 65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 66 | @echo 67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 68 | 69 | pickle: 70 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 71 | @echo 72 | @echo "Build finished; now you can process the pickle files." 73 | 74 | json: 75 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 76 | @echo 77 | @echo "Build finished; now you can process the JSON files." 78 | 79 | htmlhelp: 80 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 81 | @echo 82 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 83 | ".hhp project file in $(BUILDDIR)/htmlhelp." 84 | 85 | qthelp: 86 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 87 | @echo 88 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 89 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 90 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/IPythonKernel.qhcp" 91 | @echo "To view the help file:" 92 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/IPythonKernel.qhc" 93 | 94 | applehelp: 95 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 96 | @echo 97 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 98 | @echo "N.B. You won't be able to view it unless you put it in" \ 99 | "~/Library/Documentation/Help or install it in your application" \ 100 | "bundle." 101 | 102 | devhelp: 103 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 104 | @echo 105 | @echo "Build finished." 106 | @echo "To view the help file:" 107 | @echo "# mkdir -p $$HOME/.local/share/devhelp/IPythonKernel" 108 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/IPythonKernel" 109 | @echo "# devhelp" 110 | 111 | epub: 112 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 113 | @echo 114 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 115 | 116 | latex: 117 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 118 | @echo 119 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 120 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 121 | "(use \`make latexpdf' here to do that automatically)." 122 | 123 | latexpdf: 124 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 125 | @echo "Running LaTeX files through pdflatex..." 126 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 127 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 128 | 129 | latexpdfja: 130 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 131 | @echo "Running LaTeX files through platex and dvipdfmx..." 132 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 133 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 134 | 135 | text: 136 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 137 | @echo 138 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 139 | 140 | man: 141 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 142 | @echo 143 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 144 | 145 | texinfo: 146 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 147 | @echo 148 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 149 | @echo "Run \`make' in that directory to run these through makeinfo" \ 150 | "(use \`make info' here to do that automatically)." 151 | 152 | info: 153 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 154 | @echo "Running Texinfo files through makeinfo..." 155 | make -C $(BUILDDIR)/texinfo info 156 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 157 | 158 | gettext: 159 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 160 | @echo 161 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 162 | 163 | changes: 164 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 165 | @echo 166 | @echo "The overview file is in $(BUILDDIR)/changes." 167 | 168 | linkcheck: 169 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 170 | @echo 171 | @echo "Link check complete; look for any errors in the above output " \ 172 | "or in $(BUILDDIR)/linkcheck/output.txt." 173 | 174 | doctest: 175 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 176 | @echo "Testing of doctests in the sources finished, look at the " \ 177 | "results in $(BUILDDIR)/doctest/output.txt." 178 | 179 | coverage: 180 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 181 | @echo "Testing of coverage in the sources finished, look at the " \ 182 | "results in $(BUILDDIR)/coverage/python.txt." 183 | 184 | xml: 185 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 186 | @echo 187 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 188 | 189 | pseudoxml: 190 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 191 | @echo 192 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 193 | -------------------------------------------------------------------------------- /docs/api/ipykernel.comm.rst: -------------------------------------------------------------------------------- 1 | ipykernel.comm package 2 | ====================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | 8 | .. automodule:: ipykernel.comm.comm 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | 13 | 14 | .. automodule:: ipykernel.comm.manager 15 | :members: 16 | :undoc-members: 17 | :show-inheritance: 18 | 19 | Module contents 20 | --------------- 21 | 22 | .. automodule:: ipykernel.comm 23 | :members: 24 | :undoc-members: 25 | :show-inheritance: 26 | -------------------------------------------------------------------------------- /docs/api/ipykernel.inprocess.rst: -------------------------------------------------------------------------------- 1 | ipykernel.inprocess package 2 | =========================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | 8 | .. automodule:: ipykernel.inprocess.blocking 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | 13 | 14 | .. automodule:: ipykernel.inprocess.channels 15 | :members: 16 | :undoc-members: 17 | :show-inheritance: 18 | 19 | 20 | .. automodule:: ipykernel.inprocess.client 21 | :members: 22 | :undoc-members: 23 | :show-inheritance: 24 | 25 | 26 | .. automodule:: ipykernel.inprocess.constants 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | 32 | .. automodule:: ipykernel.inprocess.ipkernel 33 | :members: 34 | :undoc-members: 35 | :show-inheritance: 36 | 37 | 38 | .. automodule:: ipykernel.inprocess.manager 39 | :members: 40 | :undoc-members: 41 | :show-inheritance: 42 | 43 | 44 | .. automodule:: ipykernel.inprocess.session 45 | :members: 46 | :undoc-members: 47 | :show-inheritance: 48 | 49 | 50 | .. automodule:: ipykernel.inprocess.socket 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | Module contents 56 | --------------- 57 | 58 | .. automodule:: ipykernel.inprocess 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | -------------------------------------------------------------------------------- /docs/api/ipykernel.rst: -------------------------------------------------------------------------------- 1 | ipykernel package 2 | ================= 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | :maxdepth: 4 9 | 10 | ipykernel.comm 11 | ipykernel.inprocess 12 | 13 | Submodules 14 | ---------- 15 | 16 | 17 | .. automodule:: ipykernel.compiler 18 | :members: 19 | :undoc-members: 20 | :show-inheritance: 21 | 22 | 23 | .. automodule:: ipykernel.connect 24 | :members: 25 | :undoc-members: 26 | :show-inheritance: 27 | 28 | 29 | .. automodule:: ipykernel.control 30 | :members: 31 | :undoc-members: 32 | :show-inheritance: 33 | 34 | 35 | .. automodule:: ipykernel.debugger 36 | :members: 37 | :undoc-members: 38 | :show-inheritance: 39 | 40 | 41 | .. automodule:: ipykernel.displayhook 42 | :members: 43 | :undoc-members: 44 | :show-inheritance: 45 | 46 | 47 | .. automodule:: ipykernel.embed 48 | :members: 49 | :undoc-members: 50 | :show-inheritance: 51 | 52 | 53 | .. automodule:: ipykernel.eventloops 54 | :members: 55 | :undoc-members: 56 | :show-inheritance: 57 | 58 | 59 | .. automodule:: ipykernel.heartbeat 60 | :members: 61 | :undoc-members: 62 | :show-inheritance: 63 | 64 | 65 | .. automodule:: ipykernel.iostream 66 | :members: 67 | :undoc-members: 68 | :show-inheritance: 69 | 70 | 71 | .. automodule:: ipykernel.ipkernel 72 | :members: 73 | :undoc-members: 74 | :show-inheritance: 75 | 76 | 77 | .. automodule:: ipykernel.jsonutil 78 | :members: 79 | :undoc-members: 80 | :show-inheritance: 81 | 82 | 83 | .. automodule:: ipykernel.kernelapp 84 | :members: 85 | :undoc-members: 86 | :show-inheritance: 87 | 88 | 89 | .. automodule:: ipykernel.kernelbase 90 | :members: 91 | :undoc-members: 92 | :show-inheritance: 93 | 94 | 95 | .. automodule:: ipykernel.kernelspec 96 | :members: 97 | :undoc-members: 98 | :show-inheritance: 99 | 100 | 101 | .. automodule:: ipykernel.log 102 | :members: 103 | :undoc-members: 104 | :show-inheritance: 105 | 106 | 107 | .. automodule:: ipykernel.parentpoller 108 | :members: 109 | :undoc-members: 110 | :show-inheritance: 111 | 112 | 113 | .. automodule:: ipykernel.shellchannel 114 | :members: 115 | :undoc-members: 116 | :show-inheritance: 117 | 118 | 119 | .. automodule:: ipykernel.subshell 120 | :members: 121 | :undoc-members: 122 | :show-inheritance: 123 | 124 | 125 | .. automodule:: ipykernel.subshell_manager 126 | :members: 127 | :undoc-members: 128 | :show-inheritance: 129 | 130 | 131 | .. automodule:: ipykernel.thread 132 | :members: 133 | :undoc-members: 134 | :show-inheritance: 135 | 136 | 137 | .. automodule:: ipykernel.zmqshell 138 | :members: 139 | :undoc-members: 140 | :show-inheritance: 141 | 142 | Module contents 143 | --------------- 144 | 145 | .. automodule:: ipykernel 146 | :members: 147 | :undoc-members: 148 | :show-inheritance: 149 | -------------------------------------------------------------------------------- /docs/api/modules.rst: -------------------------------------------------------------------------------- 1 | ipykernel 2 | ========= 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | ipykernel 8 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. _index: 2 | 3 | IPython Kernel Docs 4 | =================== 5 | 6 | This contains minimal version-sensitive documentation for the IPython kernel package. 7 | Most IPython kernel documentation is in the `IPython documentation `_. 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 1 13 | 14 | changelog 15 | API docs 16 | 17 | 18 | Indices and tables 19 | ================== 20 | 21 | * :ref:`genindex` 22 | * :ref:`modindex` 23 | * :ref:`search` 24 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | echo. coverage to run coverage check of the documentation if enabled 41 | goto end 42 | ) 43 | 44 | if "%1" == "clean" ( 45 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 46 | del /q /s %BUILDDIR%\* 47 | goto end 48 | ) 49 | 50 | 51 | REM Check if sphinx-build is available and fallback to Python version if any 52 | %SPHINXBUILD% 2> nul 53 | if errorlevel 9009 goto sphinx_python 54 | goto sphinx_ok 55 | 56 | :sphinx_python 57 | 58 | set SPHINXBUILD=python -m sphinx.__init__ 59 | %SPHINXBUILD% 2> nul 60 | if errorlevel 9009 ( 61 | echo. 62 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 63 | echo.installed, then set the SPHINXBUILD environment variable to point 64 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 65 | echo.may add the Sphinx directory to PATH. 66 | echo. 67 | echo.If you don't have Sphinx installed, grab it from 68 | echo.http://sphinx-doc.org/ 69 | exit /b 1 70 | ) 71 | 72 | :sphinx_ok 73 | 74 | 75 | if "%1" == "html" ( 76 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 77 | if errorlevel 1 exit /b 1 78 | echo. 79 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 80 | goto end 81 | ) 82 | 83 | if "%1" == "dirhtml" ( 84 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 85 | if errorlevel 1 exit /b 1 86 | echo. 87 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 88 | goto end 89 | ) 90 | 91 | if "%1" == "singlehtml" ( 92 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 93 | if errorlevel 1 exit /b 1 94 | echo. 95 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 96 | goto end 97 | ) 98 | 99 | if "%1" == "pickle" ( 100 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 101 | if errorlevel 1 exit /b 1 102 | echo. 103 | echo.Build finished; now you can process the pickle files. 104 | goto end 105 | ) 106 | 107 | if "%1" == "json" ( 108 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 109 | if errorlevel 1 exit /b 1 110 | echo. 111 | echo.Build finished; now you can process the JSON files. 112 | goto end 113 | ) 114 | 115 | if "%1" == "htmlhelp" ( 116 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 117 | if errorlevel 1 exit /b 1 118 | echo. 119 | echo.Build finished; now you can run HTML Help Workshop with the ^ 120 | .hhp project file in %BUILDDIR%/htmlhelp. 121 | goto end 122 | ) 123 | 124 | if "%1" == "qthelp" ( 125 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 129 | .qhcp project file in %BUILDDIR%/qthelp, like this: 130 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\IPythonKernel.qhcp 131 | echo.To view the help file: 132 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\IPythonKernel.ghc 133 | goto end 134 | ) 135 | 136 | if "%1" == "devhelp" ( 137 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 138 | if errorlevel 1 exit /b 1 139 | echo. 140 | echo.Build finished. 141 | goto end 142 | ) 143 | 144 | if "%1" == "epub" ( 145 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 146 | if errorlevel 1 exit /b 1 147 | echo. 148 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 149 | goto end 150 | ) 151 | 152 | if "%1" == "latex" ( 153 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 154 | if errorlevel 1 exit /b 1 155 | echo. 156 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 157 | goto end 158 | ) 159 | 160 | if "%1" == "latexpdf" ( 161 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 162 | cd %BUILDDIR%/latex 163 | make all-pdf 164 | cd %~dp0 165 | echo. 166 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 167 | goto end 168 | ) 169 | 170 | if "%1" == "latexpdfja" ( 171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 172 | cd %BUILDDIR%/latex 173 | make all-pdf-ja 174 | cd %~dp0 175 | echo. 176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 177 | goto end 178 | ) 179 | 180 | if "%1" == "text" ( 181 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 182 | if errorlevel 1 exit /b 1 183 | echo. 184 | echo.Build finished. The text files are in %BUILDDIR%/text. 185 | goto end 186 | ) 187 | 188 | if "%1" == "man" ( 189 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 190 | if errorlevel 1 exit /b 1 191 | echo. 192 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 193 | goto end 194 | ) 195 | 196 | if "%1" == "texinfo" ( 197 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 198 | if errorlevel 1 exit /b 1 199 | echo. 200 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 201 | goto end 202 | ) 203 | 204 | if "%1" == "gettext" ( 205 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 206 | if errorlevel 1 exit /b 1 207 | echo. 208 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 209 | goto end 210 | ) 211 | 212 | if "%1" == "changes" ( 213 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 214 | if errorlevel 1 exit /b 1 215 | echo. 216 | echo.The overview file is in %BUILDDIR%/changes. 217 | goto end 218 | ) 219 | 220 | if "%1" == "linkcheck" ( 221 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 222 | if errorlevel 1 exit /b 1 223 | echo. 224 | echo.Link check complete; look for any errors in the above output ^ 225 | or in %BUILDDIR%/linkcheck/output.txt. 226 | goto end 227 | ) 228 | 229 | if "%1" == "doctest" ( 230 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 231 | if errorlevel 1 exit /b 1 232 | echo. 233 | echo.Testing of doctests in the sources finished, look at the ^ 234 | results in %BUILDDIR%/doctest/output.txt. 235 | goto end 236 | ) 237 | 238 | if "%1" == "coverage" ( 239 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage 240 | if errorlevel 1 exit /b 1 241 | echo. 242 | echo.Testing of coverage in the sources finished, look at the ^ 243 | results in %BUILDDIR%/coverage/python.txt. 244 | goto end 245 | ) 246 | 247 | if "%1" == "xml" ( 248 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 249 | if errorlevel 1 exit /b 1 250 | echo. 251 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 252 | goto end 253 | ) 254 | 255 | if "%1" == "pseudoxml" ( 256 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 257 | if errorlevel 1 exit /b 1 258 | echo. 259 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 260 | goto end 261 | ) 262 | 263 | :end 264 | -------------------------------------------------------------------------------- /examples/embedding/inprocess_qtconsole.py: -------------------------------------------------------------------------------- 1 | """An in-process qt console app.""" 2 | 3 | import os 4 | 5 | import tornado 6 | from IPython.lib import guisupport 7 | from qtconsole.inprocess import QtInProcessKernelManager 8 | from qtconsole.rich_ipython_widget import RichIPythonWidget 9 | 10 | assert tornado.version_info >= (6, 1) 11 | 12 | 13 | def print_process_id(): 14 | """Print the process id.""" 15 | print("Process ID is:", os.getpid()) 16 | 17 | 18 | def main(): 19 | """The main entry point.""" 20 | # Print the ID of the main process 21 | print_process_id() 22 | 23 | app = guisupport.get_app_qt4() 24 | 25 | # Create an in-process kernel 26 | # >>> print_process_id() 27 | # will print the same process ID as the main process 28 | kernel_manager = QtInProcessKernelManager() 29 | kernel_manager.start_kernel() 30 | kernel = kernel_manager.kernel 31 | kernel.gui = "qt4" 32 | kernel.shell.push({"foo": 43, "print_process_id": print_process_id}) 33 | 34 | kernel_client = kernel_manager.client() 35 | kernel_client.start_channels() 36 | 37 | def stop(): 38 | kernel_client.stop_channels() 39 | kernel_manager.shutdown_kernel() 40 | app.exit() 41 | 42 | control = RichIPythonWidget() 43 | control.kernel_manager = kernel_manager 44 | control.kernel_client = kernel_client 45 | control.exit_requested.connect(stop) 46 | control.show() 47 | 48 | guisupport.start_event_loop_qt4(app) 49 | 50 | 51 | if __name__ == "__main__": 52 | main() 53 | -------------------------------------------------------------------------------- /examples/embedding/inprocess_terminal.py: -------------------------------------------------------------------------------- 1 | """An in-process terminal example.""" 2 | 3 | import os 4 | 5 | from anyio import run 6 | from jupyter_console.ptshell import ZMQTerminalInteractiveShell 7 | 8 | from ipykernel.inprocess.manager import InProcessKernelManager 9 | 10 | 11 | def print_process_id(): 12 | """Print the process id.""" 13 | print("Process ID is:", os.getpid()) 14 | 15 | 16 | async def main(): 17 | """The main function.""" 18 | print_process_id() 19 | 20 | # Create an in-process kernel 21 | # >>> print_process_id() 22 | # will print the same process ID as the main process 23 | kernel_manager = InProcessKernelManager() 24 | await kernel_manager.start_kernel() 25 | kernel = kernel_manager.kernel 26 | kernel.gui = "qt4" 27 | kernel.shell.push({"foo": 43, "print_process_id": print_process_id}) 28 | client = kernel_manager.client() 29 | client.start_channels() 30 | 31 | shell = ZMQTerminalInteractiveShell(manager=kernel_manager, client=client) 32 | shell.mainloop() 33 | 34 | 35 | if __name__ == "__main__": 36 | run(main) 37 | -------------------------------------------------------------------------------- /examples/embedding/internal_ipkernel.py: -------------------------------------------------------------------------------- 1 | """An internal ipykernel example.""" 2 | # ----------------------------------------------------------------------------- 3 | # Imports 4 | # ----------------------------------------------------------------------------- 5 | 6 | import sys 7 | 8 | from IPython.lib.kernel import connect_qtconsole 9 | 10 | from ipykernel.kernelapp import IPKernelApp 11 | 12 | 13 | # ----------------------------------------------------------------------------- 14 | # Functions and classes 15 | # ----------------------------------------------------------------------------- 16 | def mpl_kernel(gui): 17 | """Launch and return an IPython kernel with matplotlib support for the desired gui""" 18 | kernel = IPKernelApp.instance() 19 | kernel.initialize( 20 | [ 21 | "python", 22 | "--matplotlib=%s" % gui, 23 | #'--log-level=10' 24 | ] 25 | ) 26 | return kernel 27 | 28 | 29 | class InternalIPKernel: 30 | """An internal ipykernel class.""" 31 | 32 | def init_ipkernel(self, backend): 33 | """Start IPython kernel with GUI event loop and mpl support.""" 34 | self.ipkernel = mpl_kernel(backend) 35 | # To create and track active qt consoles 36 | self.consoles = [] 37 | 38 | # This application will also act on the shell user namespace 39 | self.namespace = self.ipkernel.shell.user_ns 40 | 41 | # Example: a variable that will be seen by the user in the shell, and 42 | # that the GUI modifies (the 'Counter++' button increments it): 43 | self.namespace["app_counter"] = 0 44 | # self.namespace['ipkernel'] = self.ipkernel # dbg 45 | 46 | def print_namespace(self, evt=None): 47 | """Print the namespace.""" 48 | print("\n***Variables in User namespace***") 49 | for k, v in self.namespace.items(): 50 | if not k.startswith("_"): 51 | print(f"{k} -> {v!r}") 52 | sys.stdout.flush() 53 | 54 | def new_qt_console(self, evt=None): 55 | """start a new qtconsole connected to our kernel""" 56 | return connect_qtconsole(self.ipkernel.abs_connection_file, profile=self.ipkernel.profile) 57 | 58 | def count(self, evt=None): 59 | """Get the app counter value.""" 60 | self.namespace["app_counter"] += 1 61 | 62 | def cleanup_consoles(self, evt=None): 63 | """Clean up the consoles.""" 64 | for c in self.consoles: 65 | c.kill() 66 | -------------------------------------------------------------------------------- /examples/embedding/ipkernel_qtapp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Example integrating an IPython kernel into a GUI App. 3 | 4 | This trivial GUI application internally starts an IPython kernel, to which Qt 5 | consoles can be connected either by the user at the command line or started 6 | from the GUI itself, via a button. The GUI can also manipulate one variable in 7 | the kernel's namespace, and print the namespace to the console. 8 | 9 | Play with it by running the script and then opening one or more consoles, and 10 | pushing the 'Counter++' and 'Namespace' buttons. 11 | 12 | Upon exit, it should automatically close all consoles opened from the GUI. 13 | 14 | Consoles attached separately from a terminal will not be terminated, though 15 | they will notice that their kernel died. 16 | """ 17 | # ----------------------------------------------------------------------------- 18 | # Imports 19 | # ----------------------------------------------------------------------------- 20 | 21 | from internal_ipkernel import InternalIPKernel 22 | from PyQt4 import Qt 23 | 24 | 25 | # ----------------------------------------------------------------------------- 26 | # Functions and classes 27 | # ----------------------------------------------------------------------------- 28 | class SimpleWindow(Qt.QWidget, InternalIPKernel): 29 | """A custom Qt widget for IPykernel.""" 30 | 31 | def __init__(self, app): 32 | """Initialize the widget.""" 33 | Qt.QWidget.__init__(self) 34 | self.app = app 35 | self.add_widgets() 36 | self.init_ipkernel("qt") 37 | 38 | def add_widgets(self): 39 | """Add the widget.""" 40 | self.setGeometry(300, 300, 400, 70) 41 | self.setWindowTitle("IPython in your app") 42 | 43 | # Add simple buttons: 44 | console = Qt.QPushButton("Qt Console", self) 45 | console.setGeometry(10, 10, 100, 35) 46 | self.connect(console, Qt.SIGNAL("clicked()"), self.new_qt_console) 47 | 48 | namespace = Qt.QPushButton("Namespace", self) 49 | namespace.setGeometry(120, 10, 100, 35) 50 | self.connect(namespace, Qt.SIGNAL("clicked()"), self.print_namespace) 51 | 52 | count = Qt.QPushButton("Count++", self) 53 | count.setGeometry(230, 10, 80, 35) 54 | self.connect(count, Qt.SIGNAL("clicked()"), self.count) 55 | 56 | # Quit and cleanup 57 | quit = Qt.QPushButton("Quit", self) 58 | quit.setGeometry(320, 10, 60, 35) 59 | self.connect(quit, Qt.SIGNAL("clicked()"), Qt.qApp, Qt.SLOT("quit()")) 60 | 61 | self.app.connect(self.app, Qt.SIGNAL("lastWindowClosed()"), self.app, Qt.SLOT("quit()")) 62 | 63 | self.app.aboutToQuit.connect(self.cleanup_consoles) 64 | 65 | 66 | # ----------------------------------------------------------------------------- 67 | # Main script 68 | # ----------------------------------------------------------------------------- 69 | 70 | if __name__ == "__main__": 71 | app = Qt.QApplication([]) 72 | # Create our window 73 | win = SimpleWindow(app) 74 | win.show() 75 | 76 | # Very important, IPython-specific step: this gets GUI event loop 77 | # integration going, and it replaces calling app.exec_() 78 | win.ipkernel.start() 79 | -------------------------------------------------------------------------------- /examples/embedding/ipkernel_wxapp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Example integrating an IPython kernel into a GUI App. 3 | 4 | This trivial GUI application internally starts an IPython kernel, to which Qt 5 | consoles can be connected either by the user at the command line or started 6 | from the GUI itself, via a button. The GUI can also manipulate one variable in 7 | the kernel's namespace, and print the namespace to the console. 8 | 9 | Play with it by running the script and then opening one or more consoles, and 10 | pushing the 'Counter++' and 'Namespace' buttons. 11 | 12 | Upon exit, it should automatically close all consoles opened from the GUI. 13 | 14 | Consoles attached separately from a terminal will not be terminated, though 15 | they will notice that their kernel died. 16 | 17 | Ref: Modified from wxPython source code wxPython/samples/simple/simple.py 18 | """ 19 | 20 | # ----------------------------------------------------------------------------- 21 | # Imports 22 | # ----------------------------------------------------------------------------- 23 | import sys 24 | 25 | import wx 26 | from internal_ipkernel import InternalIPKernel 27 | 28 | # ----------------------------------------------------------------------------- 29 | # Functions and classes 30 | # ----------------------------------------------------------------------------- 31 | 32 | 33 | class MyFrame(wx.Frame, InternalIPKernel): 34 | """ 35 | This is MyFrame. It just shows a few controls on a wxPanel, 36 | and has a simple menu. 37 | """ 38 | 39 | def __init__(self, parent, title): 40 | """Initialize the frame.""" 41 | wx.Frame.__init__(self, parent, -1, title, pos=(150, 150), size=(350, 285)) 42 | 43 | # Create the menubar 44 | menuBar = wx.MenuBar() 45 | 46 | # and a menu 47 | menu = wx.Menu() 48 | 49 | # add an item to the menu, using \tKeyName automatically 50 | # creates an accelerator, the third param is some help text 51 | # that will show up in the statusbar 52 | menu.Append(wx.ID_EXIT, "E&xit\tAlt-X", "Exit this simple sample") 53 | 54 | # bind the menu event to an event handler 55 | self.Bind(wx.EVT_MENU, self.OnTimeToClose, id=wx.ID_EXIT) 56 | 57 | # and put the menu on the menubar 58 | menuBar.Append(menu, "&File") 59 | self.SetMenuBar(menuBar) 60 | 61 | self.CreateStatusBar() 62 | 63 | # Now create the Panel to put the other controls on. 64 | panel = wx.Panel(self) 65 | 66 | # and a few controls 67 | text = wx.StaticText(panel, -1, "Hello World!") 68 | text.SetFont(wx.Font(14, wx.SWISS, wx.NORMAL, wx.BOLD)) 69 | text.SetSize(text.GetBestSize()) 70 | qtconsole_btn = wx.Button(panel, -1, "Qt Console") 71 | ns_btn = wx.Button(panel, -1, "Namespace") 72 | count_btn = wx.Button(panel, -1, "Count++") 73 | close_btn = wx.Button(panel, -1, "Quit") 74 | 75 | # bind the button events to handlers 76 | self.Bind(wx.EVT_BUTTON, self.new_qt_console, qtconsole_btn) 77 | self.Bind(wx.EVT_BUTTON, self.print_namespace, ns_btn) 78 | self.Bind(wx.EVT_BUTTON, self.count, count_btn) 79 | self.Bind(wx.EVT_BUTTON, self.OnTimeToClose, close_btn) 80 | 81 | # Use a sizer to layout the controls, stacked vertically and with 82 | # a 10 pixel border around each 83 | sizer = wx.BoxSizer(wx.VERTICAL) 84 | for ctrl in [text, qtconsole_btn, ns_btn, count_btn, close_btn]: 85 | sizer.Add(ctrl, 0, wx.ALL, 10) 86 | panel.SetSizer(sizer) 87 | panel.Layout() 88 | 89 | # Start the IPython kernel with gui support 90 | self.init_ipkernel("wx") 91 | 92 | def OnTimeToClose(self, evt): 93 | """Event handler for the button click.""" 94 | print("See ya later!") 95 | sys.stdout.flush() 96 | self.cleanup_consoles(evt) 97 | self.Close() 98 | # Not sure why, but our IPython kernel seems to prevent normal WX 99 | # shutdown, so an explicit exit() call is needed. 100 | sys.exit() 101 | 102 | 103 | class MyApp(wx.App): 104 | """A custom wx app.""" 105 | 106 | def OnInit(self): 107 | """Initialize app.""" 108 | frame = MyFrame(None, "Simple wxPython App") 109 | self.SetTopWindow(frame) 110 | frame.Show(True) 111 | self.ipkernel = frame.ipkernel 112 | return True 113 | 114 | 115 | # ----------------------------------------------------------------------------- 116 | # Main script 117 | # ----------------------------------------------------------------------------- 118 | 119 | if __name__ == "__main__": 120 | app = MyApp(redirect=False, clearSigInt=False) 121 | 122 | # Very important, IPython-specific step: this gets GUI event loop 123 | # integration going, and it replaces calling app.MainLoop() 124 | app.ipkernel.start() 125 | -------------------------------------------------------------------------------- /hatch_build.py: -------------------------------------------------------------------------------- 1 | """A custom hatch build hook for ipykernel.""" 2 | 3 | import shutil 4 | import sys 5 | from pathlib import Path 6 | 7 | from hatchling.builders.hooks.plugin.interface import BuildHookInterface 8 | 9 | 10 | class CustomHook(BuildHookInterface): 11 | """The IPykernel build hook.""" 12 | 13 | def initialize(self, version, build_data): 14 | """Initialize the hook.""" 15 | here = Path(__file__).parent.resolve() 16 | sys.path.insert(0, str(here)) 17 | from ipykernel.kernelspec import make_ipkernel_cmd, write_kernel_spec 18 | 19 | overrides = {} 20 | 21 | # When building a standard wheel, the executable specified in the kernelspec is simply 'python'. 22 | if version == "standard": 23 | overrides["metadata"] = dict(debugger=True) 24 | argv = make_ipkernel_cmd(executable="python") 25 | 26 | # When installing an editable wheel, the full `sys.executable` can be used. 27 | else: 28 | argv = make_ipkernel_cmd() 29 | 30 | overrides["argv"] = argv 31 | 32 | dest = Path(here) / "data_kernelspec" 33 | if Path(dest).exists(): 34 | shutil.rmtree(dest) 35 | 36 | write_kernel_spec(dest, overrides=overrides) 37 | -------------------------------------------------------------------------------- /ipykernel/__init__.py: -------------------------------------------------------------------------------- 1 | from ._version import ( 2 | __version__, 3 | kernel_protocol_version, 4 | kernel_protocol_version_info, 5 | version_info, 6 | ) 7 | from .connect import * # noqa: F403 8 | -------------------------------------------------------------------------------- /ipykernel/__main__.py: -------------------------------------------------------------------------------- 1 | """The cli entry point for ipykernel.""" 2 | 3 | if __name__ == "__main__": 4 | from ipykernel import kernelapp as app 5 | 6 | app.launch_new_instance() 7 | -------------------------------------------------------------------------------- /ipykernel/_eventloop_macos.py: -------------------------------------------------------------------------------- 1 | """Eventloop hook for OS X 2 | 3 | Calls NSApp / CoreFoundation APIs via ctypes. 4 | """ 5 | 6 | # cribbed heavily from IPython.terminal.pt_inputhooks.osx 7 | # obj-c boilerplate from appnope, used under BSD 2-clause 8 | 9 | import ctypes 10 | import ctypes.util 11 | from threading import Event 12 | 13 | objc = ctypes.cdll.LoadLibrary(ctypes.util.find_library("objc")) # type:ignore[arg-type] 14 | 15 | void_p = ctypes.c_void_p 16 | 17 | objc.objc_getClass.restype = void_p 18 | objc.sel_registerName.restype = void_p 19 | objc.objc_msgSend.restype = void_p 20 | 21 | msg = objc.objc_msgSend 22 | 23 | 24 | def _utf8(s): 25 | """ensure utf8 bytes""" 26 | if not isinstance(s, bytes): 27 | s = s.encode("utf8") 28 | return s 29 | 30 | 31 | def n(name): 32 | """create a selector name (for ObjC methods)""" 33 | return objc.sel_registerName(_utf8(name)) 34 | 35 | 36 | def C(classname): 37 | """get an ObjC Class by name""" 38 | return objc.objc_getClass(_utf8(classname)) 39 | 40 | 41 | # end obj-c boilerplate from appnope 42 | 43 | # CoreFoundation C-API calls we will use: 44 | CoreFoundation = ctypes.cdll.LoadLibrary( 45 | ctypes.util.find_library("CoreFoundation") # type:ignore[arg-type] 46 | ) 47 | 48 | CFAbsoluteTimeGetCurrent = CoreFoundation.CFAbsoluteTimeGetCurrent 49 | CFAbsoluteTimeGetCurrent.restype = ctypes.c_double 50 | 51 | CFRunLoopGetCurrent = CoreFoundation.CFRunLoopGetCurrent 52 | CFRunLoopGetCurrent.restype = void_p 53 | 54 | CFRunLoopGetMain = CoreFoundation.CFRunLoopGetMain 55 | CFRunLoopGetMain.restype = void_p 56 | 57 | CFRunLoopStop = CoreFoundation.CFRunLoopStop 58 | CFRunLoopStop.restype = None 59 | CFRunLoopStop.argtypes = [void_p] 60 | 61 | CFRunLoopTimerCreate = CoreFoundation.CFRunLoopTimerCreate 62 | CFRunLoopTimerCreate.restype = void_p 63 | CFRunLoopTimerCreate.argtypes = [ 64 | void_p, # allocator (NULL) 65 | ctypes.c_double, # fireDate 66 | ctypes.c_double, # interval 67 | ctypes.c_int, # flags (0) 68 | ctypes.c_int, # order (0) 69 | void_p, # callout 70 | void_p, # context 71 | ] 72 | 73 | CFRunLoopAddTimer = CoreFoundation.CFRunLoopAddTimer 74 | CFRunLoopAddTimer.restype = None 75 | CFRunLoopAddTimer.argtypes = [void_p, void_p, void_p] 76 | 77 | kCFRunLoopCommonModes = void_p.in_dll(CoreFoundation, "kCFRunLoopCommonModes") 78 | 79 | 80 | def _NSApp(): 81 | """Return the global NSApplication instance (NSApp)""" 82 | objc.objc_msgSend.argtypes = [void_p, void_p] 83 | return msg(C("NSApplication"), n("sharedApplication")) 84 | 85 | 86 | def _wake(NSApp): 87 | """Wake the Application""" 88 | objc.objc_msgSend.argtypes = [ 89 | void_p, 90 | void_p, 91 | void_p, 92 | void_p, 93 | void_p, 94 | void_p, 95 | void_p, 96 | void_p, 97 | void_p, 98 | void_p, 99 | void_p, 100 | ] 101 | event = msg( 102 | C("NSEvent"), 103 | n( 104 | "otherEventWithType:location:modifierFlags:" 105 | "timestamp:windowNumber:context:subtype:data1:data2:" 106 | ), 107 | 15, # Type 108 | 0, # location 109 | 0, # flags 110 | 0, # timestamp 111 | 0, # window 112 | None, # context 113 | 0, # subtype 114 | 0, # data1 115 | 0, # data2 116 | ) 117 | objc.objc_msgSend.argtypes = [void_p, void_p, void_p, void_p] 118 | msg(NSApp, n("postEvent:atStart:"), void_p(event), True) 119 | 120 | 121 | _triggered = Event() 122 | 123 | 124 | def stop(timer=None, loop=None): 125 | """Callback to fire when there's input to be read""" 126 | _triggered.set() 127 | NSApp = _NSApp() 128 | # if NSApp is not running, stop CFRunLoop directly, 129 | # otherwise stop and wake NSApp 130 | objc.objc_msgSend.argtypes = [void_p, void_p] 131 | if msg(NSApp, n("isRunning")): 132 | objc.objc_msgSend.argtypes = [void_p, void_p, void_p] 133 | msg(NSApp, n("stop:"), NSApp) 134 | _wake(NSApp) 135 | else: 136 | CFRunLoopStop(CFRunLoopGetCurrent()) 137 | 138 | 139 | _c_callback_func_type = ctypes.CFUNCTYPE(None, void_p, void_p) 140 | _c_stop_callback = _c_callback_func_type(stop) 141 | 142 | 143 | def _stop_after(delay): 144 | """Register callback to stop eventloop after a delay""" 145 | timer = CFRunLoopTimerCreate( 146 | None, # allocator 147 | CFAbsoluteTimeGetCurrent() + delay, # fireDate 148 | 0, # interval 149 | 0, # flags 150 | 0, # order 151 | _c_stop_callback, 152 | None, 153 | ) 154 | CFRunLoopAddTimer( 155 | CFRunLoopGetMain(), 156 | timer, 157 | kCFRunLoopCommonModes, 158 | ) 159 | 160 | 161 | def mainloop(duration=1): 162 | """run the Cocoa eventloop for the specified duration (seconds)""" 163 | 164 | _triggered.clear() 165 | NSApp = _NSApp() 166 | _stop_after(duration) 167 | objc.objc_msgSend.argtypes = [void_p, void_p] 168 | msg(NSApp, n("run")) 169 | if not _triggered.is_set(): 170 | # app closed without firing callback, 171 | # probably due to last window being closed. 172 | # Run the loop manually in this case, 173 | # since there may be events still to process (ipython/ipython#9734) 174 | CoreFoundation.CFRunLoopRun() 175 | -------------------------------------------------------------------------------- /ipykernel/_version.py: -------------------------------------------------------------------------------- 1 | """ 2 | store the current version info of the server. 3 | """ 4 | 5 | from __future__ import annotations 6 | 7 | import re 8 | 9 | # Version string must appear intact for hatch versioning 10 | __version__ = "7.0.0a1" 11 | 12 | # Build up version_info tuple for backwards compatibility 13 | pattern = r"(?P\d+).(?P\d+).(?P\d+)(?P.*)" 14 | match = re.match(pattern, __version__) 15 | assert match is not None 16 | parts: list[object] = [int(match[part]) for part in ["major", "minor", "patch"]] 17 | if match["rest"]: 18 | parts.append(match["rest"]) 19 | version_info = tuple(parts) 20 | 21 | kernel_protocol_version_info = (5, 3) 22 | kernel_protocol_version = "{}.{}".format(*kernel_protocol_version_info) 23 | -------------------------------------------------------------------------------- /ipykernel/comm/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ["Comm", "CommManager"] 2 | 3 | from .comm import Comm 4 | from .manager import CommManager 5 | -------------------------------------------------------------------------------- /ipykernel/comm/comm.py: -------------------------------------------------------------------------------- 1 | """Base class for a Comm""" 2 | 3 | # Copyright (c) IPython Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | import uuid 7 | from typing import Optional 8 | from warnings import warn 9 | 10 | import comm.base_comm 11 | import traitlets.config 12 | from traitlets import Bool, Bytes, Instance, Unicode, default 13 | 14 | from ipykernel.jsonutil import json_clean 15 | from ipykernel.kernelbase import Kernel 16 | 17 | 18 | # this is the class that will be created if we do comm.create_comm 19 | class BaseComm(comm.base_comm.BaseComm): # type:ignore[misc] 20 | """The base class for comms.""" 21 | 22 | kernel: Optional["Kernel"] = None 23 | 24 | def publish_msg(self, msg_type, data=None, metadata=None, buffers=None, **keys): 25 | """Helper for sending a comm message on IOPub""" 26 | if not Kernel.initialized(): 27 | return 28 | 29 | data = {} if data is None else data 30 | metadata = {} if metadata is None else metadata 31 | content = json_clean(dict(data=data, comm_id=self.comm_id, **keys)) 32 | 33 | if self.kernel is None: 34 | self.kernel = Kernel.instance() 35 | 36 | assert self.kernel.session is not None 37 | self.kernel.session.send( 38 | self.kernel.iopub_socket, 39 | msg_type, 40 | content, 41 | metadata=json_clean(metadata), 42 | parent=self.kernel.get_parent(), 43 | ident=self.topic, 44 | buffers=buffers, 45 | ) 46 | 47 | 48 | # but for backwards compatibility, we need to inherit from LoggingConfigurable 49 | class Comm(BaseComm, traitlets.config.LoggingConfigurable): 50 | """Class for communicating between a Frontend and a Kernel""" 51 | 52 | kernel = Instance("ipykernel.kernelbase.Kernel", allow_none=True) # type:ignore[assignment] 53 | comm_id = Unicode() 54 | primary = Bool(True, help="Am I the primary or secondary Comm?") 55 | 56 | target_name = Unicode("comm") 57 | target_module = Unicode( 58 | None, 59 | allow_none=True, 60 | help="""requirejs module from 61 | which to load comm target.""", 62 | ) 63 | 64 | topic = Bytes() 65 | 66 | @default("kernel") 67 | def _default_kernel(self): 68 | if Kernel.initialized(): 69 | return Kernel.instance() 70 | return None 71 | 72 | @default("comm_id") 73 | def _default_comm_id(self): 74 | return uuid.uuid4().hex 75 | 76 | def __init__( 77 | self, target_name="", data=None, metadata=None, buffers=None, show_warning=True, **kwargs 78 | ): 79 | """Initialize a comm.""" 80 | if show_warning: 81 | warn( 82 | "The `ipykernel.comm.Comm` class has been deprecated. Please use the `comm` module instead." 83 | "For creating comms, use the function `from comm import create_comm`.", 84 | DeprecationWarning, 85 | stacklevel=2, 86 | ) 87 | 88 | # Handle differing arguments between base classes. 89 | had_kernel = "kernel" in kwargs 90 | kernel = kwargs.pop("kernel", None) 91 | if target_name: 92 | kwargs["target_name"] = target_name 93 | BaseComm.__init__(self, data=data, metadata=metadata, buffers=buffers, **kwargs) # type:ignore[call-arg] 94 | # only re-add kernel if explicitly provided 95 | if had_kernel: 96 | kwargs["kernel"] = kernel 97 | traitlets.config.LoggingConfigurable.__init__(self, **kwargs) 98 | 99 | 100 | __all__ = ["Comm"] 101 | -------------------------------------------------------------------------------- /ipykernel/comm/manager.py: -------------------------------------------------------------------------------- 1 | """Base class to manage comms""" 2 | 3 | # Copyright (c) IPython Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | import logging 7 | 8 | import comm.base_comm 9 | import traitlets 10 | import traitlets.config 11 | 12 | from .comm import Comm 13 | 14 | logger = logging.getLogger("ipykernel.comm") 15 | 16 | 17 | class CommManager(comm.base_comm.CommManager, traitlets.config.LoggingConfigurable): # type:ignore[misc] 18 | """A comm manager.""" 19 | 20 | kernel = traitlets.Instance("ipykernel.kernelbase.Kernel") 21 | comms = traitlets.Dict() 22 | targets = traitlets.Dict() 23 | 24 | def __init__(self, **kwargs): 25 | """Initialize the manager.""" 26 | # CommManager doesn't take arguments, so we explicitly forward arguments 27 | comm.base_comm.CommManager.__init__(self) 28 | traitlets.config.LoggingConfigurable.__init__(self, **kwargs) 29 | 30 | def comm_open(self, stream, ident, msg): 31 | """Handler for comm_open messages""" 32 | # This is for backward compatibility, the comm_open creates a a new ipykernel.comm.Comm 33 | # but we should let the base class create the comm with comm.create_comm in a major release 34 | content = msg["content"] 35 | comm_id = content["comm_id"] 36 | target_name = content["target_name"] 37 | f = self.targets.get(target_name, None) 38 | comm = Comm( 39 | comm_id=comm_id, 40 | primary=False, 41 | target_name=target_name, 42 | show_warning=False, 43 | ) 44 | self.register_comm(comm) 45 | if f is None: 46 | logger.error("No such comm target registered: %s", target_name) 47 | else: 48 | try: 49 | f(comm, msg) 50 | return 51 | except Exception: 52 | logger.error("Exception opening comm with target: %s", target_name, exc_info=True) # noqa: G201 53 | 54 | # Failure. 55 | try: 56 | comm.close() 57 | except Exception: 58 | logger.error( # noqa: G201 59 | """Could not close comm during `comm_open` failure 60 | clean-up. The comm may not have been opened yet.""", 61 | exc_info=True, 62 | ) 63 | -------------------------------------------------------------------------------- /ipykernel/compiler.py: -------------------------------------------------------------------------------- 1 | """Compiler helpers for the debugger.""" 2 | 3 | import os 4 | import sys 5 | import tempfile 6 | 7 | from IPython.core.compilerop import CachingCompiler 8 | 9 | 10 | def murmur2_x86(data, seed): 11 | """Get the murmur2 hash.""" 12 | m = 0x5BD1E995 13 | data = [chr(d) for d in str.encode(data, "utf8")] 14 | length = len(data) 15 | h = seed ^ length 16 | rounded_end = length & 0xFFFFFFFC 17 | for i in range(0, rounded_end, 4): 18 | k = ( 19 | (ord(data[i]) & 0xFF) 20 | | ((ord(data[i + 1]) & 0xFF) << 8) 21 | | ((ord(data[i + 2]) & 0xFF) << 16) 22 | | (ord(data[i + 3]) << 24) 23 | ) 24 | k = (k * m) & 0xFFFFFFFF 25 | k ^= k >> 24 26 | k = (k * m) & 0xFFFFFFFF 27 | 28 | h = (h * m) & 0xFFFFFFFF 29 | h ^= k 30 | 31 | val = length & 0x03 32 | k = 0 33 | if val == 3: 34 | k = (ord(data[rounded_end + 2]) & 0xFF) << 16 35 | if val in [2, 3]: 36 | k |= (ord(data[rounded_end + 1]) & 0xFF) << 8 37 | if val in [1, 2, 3]: 38 | k |= ord(data[rounded_end]) & 0xFF 39 | h ^= k 40 | h = (h * m) & 0xFFFFFFFF 41 | 42 | h ^= h >> 13 43 | h = (h * m) & 0xFFFFFFFF 44 | h ^= h >> 15 45 | 46 | return h 47 | 48 | 49 | convert_to_long_pathname = lambda filename: filename # noqa: E731 50 | 51 | if sys.platform == "win32": 52 | try: 53 | import ctypes 54 | from ctypes.wintypes import DWORD, LPCWSTR, LPWSTR, MAX_PATH 55 | 56 | _GetLongPathName = ctypes.windll.kernel32.GetLongPathNameW 57 | _GetLongPathName.argtypes = [LPCWSTR, LPWSTR, DWORD] 58 | _GetLongPathName.restype = DWORD 59 | 60 | def _convert_to_long_pathname(filename): 61 | buf = ctypes.create_unicode_buffer(MAX_PATH) 62 | rv = _GetLongPathName(filename, buf, MAX_PATH) 63 | if rv != 0 and rv <= MAX_PATH: 64 | filename = buf.value 65 | return filename 66 | 67 | # test that it works so if there are any issues we fail just once here 68 | _convert_to_long_pathname(__file__) 69 | except Exception: 70 | pass 71 | else: 72 | convert_to_long_pathname = _convert_to_long_pathname 73 | 74 | 75 | def get_tmp_directory(): 76 | """Get a temp directory.""" 77 | tmp_dir = convert_to_long_pathname(tempfile.gettempdir()) 78 | pid = os.getpid() 79 | return tmp_dir + os.sep + "ipykernel_" + str(pid) 80 | 81 | 82 | def get_tmp_hash_seed(): 83 | """Get a temp hash seed.""" 84 | return 0xC70F6907 85 | 86 | 87 | def get_file_name(code): 88 | """Get a file name.""" 89 | cell_name = os.environ.get("IPYKERNEL_CELL_NAME") 90 | if cell_name is None: 91 | name = murmur2_x86(code, get_tmp_hash_seed()) 92 | cell_name = get_tmp_directory() + os.sep + str(name) + ".py" 93 | return cell_name 94 | 95 | 96 | class XCachingCompiler(CachingCompiler): 97 | """A custom caching compiler.""" 98 | 99 | def __init__(self, *args, **kwargs): 100 | """Initialize the compiler.""" 101 | super().__init__(*args, **kwargs) 102 | self.log = None 103 | 104 | def get_code_name(self, raw_code, code, number): 105 | """Get the code name.""" 106 | return get_file_name(raw_code) 107 | -------------------------------------------------------------------------------- /ipykernel/connect.py: -------------------------------------------------------------------------------- 1 | """Connection file-related utilities for the kernel""" 2 | 3 | # Copyright (c) IPython Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | from __future__ import annotations 6 | 7 | import json 8 | import sys 9 | from subprocess import PIPE, Popen 10 | from typing import TYPE_CHECKING, Any 11 | 12 | import jupyter_client 13 | from jupyter_client import write_connection_file 14 | 15 | if TYPE_CHECKING: 16 | from ipykernel.kernelapp import IPKernelApp 17 | 18 | 19 | def get_connection_file(app: IPKernelApp | None = None) -> str: 20 | """Return the path to the connection file of an app 21 | 22 | Parameters 23 | ---------- 24 | app : IPKernelApp instance [optional] 25 | If unspecified, the currently running app will be used 26 | """ 27 | from traitlets.utils import filefind 28 | 29 | if app is None: 30 | from ipykernel.kernelapp import IPKernelApp 31 | 32 | if not IPKernelApp.initialized(): 33 | msg = "app not specified, and not in a running Kernel" 34 | raise RuntimeError(msg) 35 | 36 | app = IPKernelApp.instance() 37 | return filefind(app.connection_file, [".", app.connection_dir]) 38 | 39 | 40 | def _find_connection_file(connection_file): 41 | """Return the absolute path for a connection file 42 | 43 | - If nothing specified, return current Kernel's connection file 44 | - Otherwise, call jupyter_client.find_connection_file 45 | """ 46 | if connection_file is None: 47 | # get connection file from current kernel 48 | return get_connection_file() 49 | return jupyter_client.find_connection_file(connection_file) 50 | 51 | 52 | def get_connection_info( 53 | connection_file: str | None = None, unpack: bool = False 54 | ) -> str | dict[str, Any]: 55 | """Return the connection information for the current Kernel. 56 | 57 | Parameters 58 | ---------- 59 | connection_file : str [optional] 60 | The connection file to be used. Can be given by absolute path, or 61 | IPython will search in the security directory. 62 | If run from IPython, 63 | 64 | If unspecified, the connection file for the currently running 65 | IPython Kernel will be used, which is only allowed from inside a kernel. 66 | 67 | unpack : bool [default: False] 68 | if True, return the unpacked dict, otherwise just the string contents 69 | of the file. 70 | 71 | Returns 72 | ------- 73 | The connection dictionary of the current kernel, as string or dict, 74 | depending on `unpack`. 75 | """ 76 | cf = _find_connection_file(connection_file) 77 | 78 | with open(cf) as f: 79 | info_str = f.read() 80 | 81 | if unpack: 82 | info = json.loads(info_str) 83 | # ensure key is bytes: 84 | info["key"] = info.get("key", "").encode() 85 | return info # type:ignore[no-any-return] 86 | 87 | return info_str 88 | 89 | 90 | def connect_qtconsole( 91 | connection_file: str | None = None, argv: list[str] | None = None 92 | ) -> Popen[Any]: 93 | """Connect a qtconsole to the current kernel. 94 | 95 | This is useful for connecting a second qtconsole to a kernel, or to a 96 | local notebook. 97 | 98 | Parameters 99 | ---------- 100 | connection_file : str [optional] 101 | The connection file to be used. Can be given by absolute path, or 102 | IPython will search in the security directory. 103 | If run from IPython, 104 | 105 | If unspecified, the connection file for the currently running 106 | IPython Kernel will be used, which is only allowed from inside a kernel. 107 | 108 | argv : list [optional] 109 | Any extra args to be passed to the console. 110 | 111 | Returns 112 | ------- 113 | :class:`subprocess.Popen` instance running the qtconsole frontend 114 | """ 115 | argv = [] if argv is None else argv 116 | 117 | cf = _find_connection_file(connection_file) 118 | 119 | cmd = ";".join(["from qtconsole import qtconsoleapp", "qtconsoleapp.main()"]) 120 | 121 | kwargs: dict[str, Any] = {} 122 | # Launch the Qt console in a separate session & process group, so 123 | # interrupting the kernel doesn't kill it. 124 | kwargs["start_new_session"] = True 125 | 126 | return Popen( 127 | [sys.executable, "-c", cmd, "--existing", cf, *argv], 128 | stdout=PIPE, 129 | stderr=PIPE, 130 | close_fds=(sys.platform != "win32"), 131 | **kwargs, 132 | ) 133 | 134 | 135 | __all__ = [ 136 | "connect_qtconsole", 137 | "get_connection_file", 138 | "get_connection_info", 139 | "write_connection_file", 140 | ] 141 | -------------------------------------------------------------------------------- /ipykernel/control.py: -------------------------------------------------------------------------------- 1 | """A thread for a control channel.""" 2 | 3 | from .thread import CONTROL_THREAD_NAME, BaseThread 4 | 5 | 6 | class ControlThread(BaseThread): 7 | """A thread for a control channel.""" 8 | 9 | def __init__(self, **kwargs): 10 | """Initialize the thread.""" 11 | super().__init__(name=CONTROL_THREAD_NAME, **kwargs) 12 | -------------------------------------------------------------------------------- /ipykernel/displayhook.py: -------------------------------------------------------------------------------- 1 | """Replacements for sys.displayhook that publish over ZMQ.""" 2 | 3 | # Copyright (c) IPython Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | from __future__ import annotations 6 | 7 | import builtins 8 | import sys 9 | import typing as t 10 | 11 | from IPython.core.displayhook import DisplayHook 12 | from jupyter_client.session import Session, extract_header 13 | from traitlets import Any, Dict, Instance 14 | 15 | from ipykernel.jsonutil import encode_images, json_clean 16 | 17 | 18 | class ZMQDisplayHook: 19 | """A simple displayhook that publishes the object's repr over a ZeroMQ 20 | socket.""" 21 | 22 | topic = b"execute_result" 23 | 24 | def __init__(self, session, pub_socket): 25 | """Initialize the hook.""" 26 | self.session = session 27 | self.pub_socket = pub_socket 28 | self.parent_header = {} 29 | 30 | def get_execution_count(self): 31 | """This method is replaced in kernelapp""" 32 | return 0 33 | 34 | def __call__(self, obj): 35 | """Handle a hook call.""" 36 | if obj is None: 37 | return 38 | 39 | builtins._ = obj # type:ignore[attr-defined] 40 | sys.stdout.flush() 41 | sys.stderr.flush() 42 | contents = { 43 | "execution_count": self.get_execution_count(), 44 | "data": {"text/plain": repr(obj)}, 45 | "metadata": {}, 46 | } 47 | self.session.send( 48 | self.pub_socket, "execute_result", contents, parent=self.parent_header, ident=self.topic 49 | ) 50 | 51 | def set_parent(self, parent): 52 | """Set the parent header.""" 53 | self.parent_header = extract_header(parent) 54 | 55 | 56 | class ZMQShellDisplayHook(DisplayHook): 57 | """A displayhook subclass that publishes data using ZeroMQ. This is intended 58 | to work with an InteractiveShell instance. It sends a dict of different 59 | representations of the object.""" 60 | 61 | topic = None 62 | 63 | session = Instance(Session, allow_none=True) 64 | pub_socket = Any(allow_none=True) 65 | parent_header = Dict({}) 66 | msg: dict[str, t.Any] | None 67 | 68 | def set_parent(self, parent): 69 | """Set the parent for outbound messages.""" 70 | self.parent_header = extract_header(parent) 71 | 72 | def start_displayhook(self): 73 | """Start the display hook.""" 74 | if self.session: 75 | self.msg = self.session.msg( 76 | "execute_result", 77 | { 78 | "data": {}, 79 | "metadata": {}, 80 | }, 81 | parent=self.parent_header, 82 | ) 83 | 84 | def write_output_prompt(self): 85 | """Write the output prompt.""" 86 | if self.msg: 87 | self.msg["content"]["execution_count"] = self.prompt_count 88 | 89 | def write_format_data(self, format_dict, md_dict=None): 90 | """Write format data to the message.""" 91 | if self.msg: 92 | self.msg["content"]["data"] = json_clean(encode_images(format_dict)) 93 | self.msg["content"]["metadata"] = md_dict 94 | 95 | def finish_displayhook(self): 96 | """Finish up all displayhook activities.""" 97 | sys.stdout.flush() 98 | sys.stderr.flush() 99 | if self.msg and self.msg["content"]["data"] and self.session: 100 | self.session.send(self.pub_socket, self.msg, ident=self.topic) 101 | self.msg = None 102 | -------------------------------------------------------------------------------- /ipykernel/embed.py: -------------------------------------------------------------------------------- 1 | """Simple function for embedding an IPython kernel""" 2 | # ----------------------------------------------------------------------------- 3 | # Imports 4 | # ----------------------------------------------------------------------------- 5 | 6 | import sys 7 | 8 | from IPython.utils.frame import extract_module_locals 9 | 10 | from .kernelapp import IPKernelApp 11 | 12 | # ----------------------------------------------------------------------------- 13 | # Code 14 | # ----------------------------------------------------------------------------- 15 | 16 | 17 | def embed_kernel(module=None, local_ns=None, **kwargs): 18 | """Embed and start an IPython kernel in a given scope. 19 | 20 | Parameters 21 | ---------- 22 | module : ModuleType, optional 23 | The module to load into IPython globals (default: caller) 24 | local_ns : dict, optional 25 | The namespace to load into IPython user namespace (default: caller) 26 | kwargs : dict, optional 27 | Further keyword args are relayed to the IPKernelApp constructor, 28 | allowing configuration of the Kernel. Will only have an effect 29 | on the first embed_kernel call for a given process. 30 | 31 | """ 32 | # get the app if it exists, or set it up if it doesn't 33 | if IPKernelApp.initialized(): 34 | app = IPKernelApp.instance() 35 | else: 36 | app = IPKernelApp.instance(**kwargs) 37 | app.initialize([]) 38 | # Undo unnecessary sys module mangling from init_sys_modules. 39 | # This would not be necessary if we could prevent it 40 | # in the first place by using a different InteractiveShell 41 | # subclass, as in the regular embed case. 42 | main = app.kernel.shell._orig_sys_modules_main_mod 43 | if main is not None: 44 | sys.modules[app.kernel.shell._orig_sys_modules_main_name] = main 45 | 46 | # load the calling scope if not given 47 | (caller_module, caller_locals) = extract_module_locals(1) 48 | if module is None: 49 | module = caller_module 50 | if local_ns is None: 51 | local_ns = dict(**caller_locals) 52 | 53 | app.kernel.user_module = module 54 | assert isinstance(local_ns, dict) 55 | app.kernel.user_ns = local_ns 56 | app.shell.set_completer_frame() # type:ignore[union-attr] 57 | app.start() 58 | app.close() 59 | -------------------------------------------------------------------------------- /ipykernel/gui/__init__.py: -------------------------------------------------------------------------------- 1 | """GUI support for the IPython ZeroMQ kernel. 2 | 3 | This package contains the various toolkit-dependent utilities we use to enable 4 | coordination between the IPython kernel and the event loops of the various GUI 5 | toolkits. 6 | """ 7 | 8 | # ----------------------------------------------------------------------------- 9 | # Copyright (C) 2010-2011 The IPython Development Team. 10 | # 11 | # Distributed under the terms of the BSD License. 12 | # 13 | # The full license is in the file LICENSE, distributed as part of this 14 | # software. 15 | # ----------------------------------------------------------------------------- 16 | -------------------------------------------------------------------------------- /ipykernel/gui/gtk3embed.py: -------------------------------------------------------------------------------- 1 | """GUI support for the IPython ZeroMQ kernel - GTK toolkit support.""" 2 | # ----------------------------------------------------------------------------- 3 | # Copyright (C) 2010-2011 The IPython Development Team 4 | # 5 | # Distributed under the terms of the BSD License. The full license is in 6 | # the file LICENSE, distributed as part of this software. 7 | # ----------------------------------------------------------------------------- 8 | 9 | # ----------------------------------------------------------------------------- 10 | # Imports 11 | # ----------------------------------------------------------------------------- 12 | # stdlib 13 | import sys 14 | import warnings 15 | 16 | # Third-party 17 | import gi 18 | 19 | gi.require_version("Gdk", "3.0") 20 | gi.require_version("Gtk", "3.0") 21 | from gi.repository import GObject, Gtk # noqa: E402 22 | 23 | warnings.warn( 24 | "The Gtk3 event loop for ipykernel is deprecated", category=DeprecationWarning, stacklevel=2 25 | ) 26 | 27 | # ----------------------------------------------------------------------------- 28 | # Classes and functions 29 | # ----------------------------------------------------------------------------- 30 | 31 | 32 | class GTKEmbed: 33 | """A class to embed a kernel into the GTK main event loop.""" 34 | 35 | def __init__(self, kernel): 36 | """Initialize the embed.""" 37 | self.kernel = kernel 38 | # These two will later store the real gtk functions when we hijack them 39 | self.gtk_main = None 40 | self.gtk_main_quit = None 41 | 42 | def start(self): 43 | """Starts the GTK main event loop and sets our kernel startup routine.""" 44 | # Register our function to initiate the kernel and start gtk 45 | GObject.idle_add(self._wire_kernel) 46 | Gtk.main() 47 | 48 | def _wire_kernel(self): 49 | """Initializes the kernel inside GTK. 50 | 51 | This is meant to run only once at startup, so it does its job and 52 | returns False to ensure it doesn't get run again by GTK. 53 | """ 54 | self.gtk_main, self.gtk_main_quit = self._hijack_gtk() 55 | GObject.timeout_add(int(1000 * self.kernel._poll_interval), self.iterate_kernel) 56 | return False 57 | 58 | def iterate_kernel(self): 59 | """Run one iteration of the kernel and return True. 60 | 61 | GTK timer functions must return True to be called again, so we make the 62 | call to :meth:`do_one_iteration` and then return True for GTK. 63 | """ 64 | self.kernel.do_one_iteration() 65 | return True 66 | 67 | def stop(self): 68 | """Stop the embed.""" 69 | # FIXME: this one isn't getting called because we have no reliable 70 | # kernel shutdown. We need to fix that: once the kernel has a 71 | # shutdown mechanism, it can call this. 72 | if self.gtk_main_quit: 73 | self.gtk_main_quit() 74 | sys.exit() 75 | 76 | def _hijack_gtk(self): 77 | """Hijack a few key functions in GTK for IPython integration. 78 | 79 | Modifies pyGTK's main and main_quit with a dummy so user code does not 80 | block IPython. This allows us to use %run to run arbitrary pygtk 81 | scripts from a long-lived IPython session, and when they attempt to 82 | start or stop 83 | 84 | Returns 85 | ------- 86 | The original functions that have been hijacked: 87 | - Gtk.main 88 | - Gtk.main_quit 89 | """ 90 | 91 | def dummy(*args, **kw): 92 | """No-op.""" 93 | 94 | # save and trap main and main_quit from gtk 95 | orig_main, Gtk.main = Gtk.main, dummy 96 | orig_main_quit, Gtk.main_quit = Gtk.main_quit, dummy 97 | return orig_main, orig_main_quit 98 | -------------------------------------------------------------------------------- /ipykernel/gui/gtkembed.py: -------------------------------------------------------------------------------- 1 | """GUI support for the IPython ZeroMQ kernel - GTK toolkit support.""" 2 | # ----------------------------------------------------------------------------- 3 | # Copyright (C) 2010-2011 The IPython Development Team 4 | # 5 | # Distributed under the terms of the BSD License. The full license is in 6 | # the file LICENSE, distributed as part of this software. 7 | # ----------------------------------------------------------------------------- 8 | 9 | # ----------------------------------------------------------------------------- 10 | # Imports 11 | # ----------------------------------------------------------------------------- 12 | # stdlib 13 | import sys 14 | import warnings 15 | 16 | # Third-party 17 | import gobject 18 | import gtk 19 | 20 | warnings.warn( 21 | "The Gtk3 event loop for ipykernel is deprecated", category=DeprecationWarning, stacklevel=2 22 | ) 23 | 24 | # ----------------------------------------------------------------------------- 25 | # Classes and functions 26 | # ----------------------------------------------------------------------------- 27 | 28 | 29 | class GTKEmbed: 30 | """A class to embed a kernel into the GTK main event loop.""" 31 | 32 | def __init__(self, kernel): 33 | """Initialize the embed.""" 34 | self.kernel = kernel 35 | # These two will later store the real gtk functions when we hijack them 36 | self.gtk_main = None 37 | self.gtk_main_quit = None 38 | 39 | def start(self): 40 | """Starts the GTK main event loop and sets our kernel startup routine.""" 41 | # Register our function to initiate the kernel and start gtk 42 | gobject.idle_add(self._wire_kernel) 43 | gtk.main() 44 | 45 | def _wire_kernel(self): 46 | """Initializes the kernel inside GTK. 47 | 48 | This is meant to run only once at startup, so it does its job and 49 | returns False to ensure it doesn't get run again by GTK. 50 | """ 51 | self.gtk_main, self.gtk_main_quit = self._hijack_gtk() 52 | gobject.timeout_add(int(1000 * self.kernel._poll_interval), self.iterate_kernel) 53 | return False 54 | 55 | def iterate_kernel(self): 56 | """Run one iteration of the kernel and return True. 57 | 58 | GTK timer functions must return True to be called again, so we make the 59 | call to :meth:`do_one_iteration` and then return True for GTK. 60 | """ 61 | self.kernel.do_one_iteration() 62 | return True 63 | 64 | def stop(self): 65 | """Stop the embed.""" 66 | # FIXME: this one isn't getting called because we have no reliable 67 | # kernel shutdown. We need to fix that: once the kernel has a 68 | # shutdown mechanism, it can call this. 69 | if self.gtk_main_quit: 70 | self.gtk_main_quit() 71 | sys.exit() 72 | 73 | def _hijack_gtk(self): 74 | """Hijack a few key functions in GTK for IPython integration. 75 | 76 | Modifies pyGTK's main and main_quit with a dummy so user code does not 77 | block IPython. This allows us to use %run to run arbitrary pygtk 78 | scripts from a long-lived IPython session, and when they attempt to 79 | start or stop 80 | 81 | Returns 82 | ------- 83 | The original functions that have been hijacked: 84 | - gtk.main 85 | - gtk.main_quit 86 | """ 87 | 88 | def dummy(*args, **kw): 89 | """No-op.""" 90 | 91 | # save and trap main and main_quit from gtk 92 | orig_main, gtk.main = gtk.main, dummy 93 | orig_main_quit, gtk.main_quit = gtk.main_quit, dummy 94 | return orig_main, orig_main_quit 95 | -------------------------------------------------------------------------------- /ipykernel/heartbeat.py: -------------------------------------------------------------------------------- 1 | """The client and server for a basic ping-pong style heartbeat.""" 2 | 3 | # ----------------------------------------------------------------------------- 4 | # Copyright (C) 2008-2011 The IPython Development Team 5 | # 6 | # Distributed under the terms of the BSD License. The full license is in 7 | # the file LICENSE, distributed as part of this software. 8 | # ----------------------------------------------------------------------------- 9 | 10 | # ----------------------------------------------------------------------------- 11 | # Imports 12 | # ----------------------------------------------------------------------------- 13 | 14 | import errno 15 | import socket 16 | from pathlib import Path 17 | from threading import Thread 18 | 19 | import zmq 20 | from jupyter_client.localinterfaces import localhost 21 | 22 | # ----------------------------------------------------------------------------- 23 | # Code 24 | # ----------------------------------------------------------------------------- 25 | 26 | 27 | class Heartbeat(Thread): 28 | """A simple ping-pong style heartbeat that runs in a thread.""" 29 | 30 | def __init__(self, context, addr=None): 31 | """Initialize the heartbeat thread.""" 32 | if addr is None: 33 | addr = ("tcp", localhost(), 0) 34 | super().__init__(name="Heartbeat") 35 | self.context = context 36 | self.transport, self.ip, self.port = addr 37 | self.original_port = self.port 38 | if self.original_port == 0: 39 | self.pick_port() 40 | self.addr = (self.ip, self.port) 41 | self.daemon = True 42 | self.pydev_do_not_trace = True 43 | self.is_pydev_daemon_thread = True 44 | self.name = "Heartbeat" 45 | 46 | def pick_port(self): 47 | """Pick a port for the heartbeat.""" 48 | if self.transport == "tcp": 49 | s = socket.socket() 50 | # '*' means all interfaces to 0MQ, which is '' to socket.socket 51 | s.bind(("" if self.ip == "*" else self.ip, 0)) 52 | self.port = s.getsockname()[1] 53 | s.close() 54 | elif self.transport == "ipc": 55 | self.port = 1 56 | while Path(f"{self.ip}-{self.port}").exists(): 57 | self.port = self.port + 1 58 | else: 59 | raise ValueError("Unrecognized zmq transport: %s" % self.transport) 60 | return self.port 61 | 62 | def _try_bind_socket(self): 63 | c = ":" if self.transport == "tcp" else "-" 64 | return self.socket.bind(f"{self.transport}://{self.ip}" + c + str(self.port)) 65 | 66 | def _bind_socket(self): 67 | try: 68 | win_in_use = errno.WSAEADDRINUSE # type:ignore[attr-defined] 69 | except AttributeError: 70 | win_in_use = None 71 | 72 | # Try up to 100 times to bind a port when in conflict to avoid 73 | # infinite attempts in bad setups 74 | max_attempts = 1 if self.original_port else 100 75 | for attempt in range(max_attempts): 76 | try: 77 | self._try_bind_socket() 78 | except zmq.ZMQError as ze: 79 | if attempt == max_attempts - 1: 80 | raise 81 | # Raise if we have any error not related to socket binding 82 | if ze.errno != errno.EADDRINUSE and ze.errno != win_in_use: 83 | raise 84 | # Raise if we have any error not related to socket binding 85 | if self.original_port == 0: 86 | self.pick_port() 87 | else: 88 | raise 89 | else: 90 | return 91 | 92 | def run(self): 93 | """Run the heartbeat thread.""" 94 | self.name = "Heartbeat" 95 | 96 | try: 97 | self.socket = self.context.socket(zmq.ROUTER) 98 | self.socket.linger = 1000 99 | self._bind_socket() 100 | except Exception: 101 | try: 102 | self.socket.close() 103 | except Exception: 104 | pass 105 | return 106 | 107 | while True: 108 | try: 109 | zmq.device(zmq.QUEUE, self.socket, self.socket) 110 | except zmq.ZMQError as e: 111 | if e.errno == errno.EINTR: 112 | # signal interrupt, resume heartbeat 113 | continue 114 | if e.errno == zmq.ETERM: 115 | # context terminated, close socket and exit 116 | try: 117 | self.socket.close() 118 | except zmq.ZMQError: 119 | # suppress further errors during cleanup 120 | # this shouldn't happen, though 121 | pass 122 | break 123 | if e.errno == zmq.ENOTSOCK: 124 | # socket closed elsewhere, exit 125 | break 126 | raise 127 | else: 128 | break 129 | -------------------------------------------------------------------------------- /ipykernel/inprocess/__init__.py: -------------------------------------------------------------------------------- 1 | from .blocking import BlockingInProcessKernelClient 2 | from .channels import InProcessChannel, InProcessHBChannel 3 | from .client import InProcessKernelClient 4 | from .manager import InProcessKernelManager 5 | -------------------------------------------------------------------------------- /ipykernel/inprocess/blocking.py: -------------------------------------------------------------------------------- 1 | """Implements a fully blocking kernel client. 2 | 3 | Useful for test suites and blocking terminal interfaces. 4 | """ 5 | 6 | import sys 7 | 8 | # ----------------------------------------------------------------------------- 9 | # Copyright (C) 2012 The IPython Development Team 10 | # 11 | # Distributed under the terms of the BSD License. The full license is in 12 | # the file LICENSE, distributed as part of this software. 13 | # ----------------------------------------------------------------------------- 14 | from queue import Empty, Queue 15 | 16 | # IPython imports 17 | from traitlets import Type 18 | 19 | # Local imports 20 | from .channels import InProcessChannel 21 | from .client import InProcessKernelClient 22 | 23 | 24 | class BlockingInProcessChannel(InProcessChannel): 25 | """A blocking in-process channel.""" 26 | 27 | def __init__(self, *args, **kwds): 28 | """Initialize the channel.""" 29 | super().__init__(*args, **kwds) 30 | self._in_queue: Queue[object] = Queue() 31 | 32 | def call_handlers(self, msg): 33 | """Call the handlers for a message.""" 34 | self._in_queue.put(msg) 35 | 36 | def get_msg(self, block=True, timeout=None): 37 | """Gets a message if there is one that is ready.""" 38 | if timeout is None: 39 | # Queue.get(timeout=None) has stupid uninteruptible 40 | # behavior, so wait for a week instead 41 | timeout = 604800 42 | return self._in_queue.get(block, timeout) 43 | 44 | def get_msgs(self): 45 | """Get all messages that are currently ready.""" 46 | msgs = [] 47 | while True: 48 | try: 49 | msgs.append(self.get_msg(block=False)) 50 | except Empty: 51 | break 52 | return msgs 53 | 54 | def msg_ready(self): 55 | """Is there a message that has been received?""" 56 | return not self._in_queue.empty() 57 | 58 | 59 | class BlockingInProcessStdInChannel(BlockingInProcessChannel): 60 | """A blocking in-process stdin channel.""" 61 | 62 | def call_handlers(self, msg): 63 | """Overridden for the in-process channel. 64 | 65 | This methods simply calls raw_input directly. 66 | """ 67 | msg_type = msg["header"]["msg_type"] 68 | if msg_type == "input_request": 69 | _raw_input = self.client.kernel._sys_raw_input 70 | prompt = msg["content"]["prompt"] 71 | print(prompt, end="", file=sys.__stdout__) 72 | assert sys.__stdout__ is not None 73 | sys.__stdout__.flush() 74 | self.client.input(_raw_input()) 75 | 76 | 77 | class BlockingInProcessKernelClient(InProcessKernelClient): 78 | """A blocking in-process kernel client.""" 79 | 80 | # The classes to use for the various channels. 81 | shell_channel_class = Type(BlockingInProcessChannel) # type:ignore[arg-type] 82 | iopub_channel_class = Type(BlockingInProcessChannel) # type:ignore[arg-type] 83 | stdin_channel_class = Type(BlockingInProcessStdInChannel) # type:ignore[arg-type] 84 | 85 | async def wait_for_ready(self): 86 | """Wait for kernel info reply on shell channel.""" 87 | while True: 88 | await self.kernel_info() 89 | try: 90 | msg = self.shell_channel.get_msg(block=True, timeout=1) 91 | except Empty: 92 | pass 93 | else: 94 | if msg["msg_type"] == "kernel_info_reply": 95 | # Checking that IOPub is connected. If it is not connected, start over. 96 | try: 97 | self.iopub_channel.get_msg(block=True, timeout=0.2) 98 | except Empty: 99 | pass 100 | else: 101 | self._handle_kernel_info_reply(msg) 102 | break 103 | 104 | # Flush IOPub channel 105 | while True: 106 | try: 107 | msg = self.iopub_channel.get_msg(block=True, timeout=0.2) 108 | except Empty: 109 | break 110 | -------------------------------------------------------------------------------- /ipykernel/inprocess/channels.py: -------------------------------------------------------------------------------- 1 | """A kernel client for in-process kernels.""" 2 | 3 | # Copyright (c) IPython Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | from __future__ import annotations 6 | 7 | from jupyter_client.channelsabc import HBChannelABC 8 | 9 | # ----------------------------------------------------------------------------- 10 | # Channel classes 11 | # ----------------------------------------------------------------------------- 12 | 13 | 14 | class InProcessChannel: 15 | """Base class for in-process channels.""" 16 | 17 | proxy_methods: list[object] = [] 18 | 19 | def __init__(self, client=None): 20 | """Initialize the channel.""" 21 | super().__init__() 22 | self.client = client 23 | self._is_alive = False 24 | 25 | def is_alive(self): 26 | """Test if the channel is alive.""" 27 | return self._is_alive 28 | 29 | def start(self): 30 | """Start the channel.""" 31 | self._is_alive = True 32 | 33 | def stop(self): 34 | """Stop the channel.""" 35 | self._is_alive = False 36 | 37 | def call_handlers(self, msg): 38 | """This method is called in the main thread when a message arrives. 39 | 40 | Subclasses should override this method to handle incoming messages. 41 | """ 42 | msg = "call_handlers must be defined in a subclass." 43 | raise NotImplementedError(msg) 44 | 45 | def flush(self, timeout=1.0): 46 | """Flush the channel.""" 47 | 48 | def call_handlers_later(self, *args, **kwds): 49 | """Call the message handlers later. 50 | 51 | The default implementation just calls the handlers immediately, but this 52 | method exists so that GUI toolkits can defer calling the handlers until 53 | after the event loop has run, as expected by GUI frontends. 54 | """ 55 | self.call_handlers(*args, **kwds) 56 | 57 | def process_events(self): 58 | """Process any pending GUI events. 59 | 60 | This method will be never be called from a frontend without an event 61 | loop (e.g., a terminal frontend). 62 | """ 63 | raise NotImplementedError 64 | 65 | 66 | class InProcessHBChannel: 67 | """A dummy heartbeat channel interface for in-process kernels. 68 | 69 | Normally we use the heartbeat to check that the kernel process is alive. 70 | When the kernel is in-process, that doesn't make sense, but clients still 71 | expect this interface. 72 | """ 73 | 74 | time_to_dead = 3.0 75 | 76 | def __init__(self, client=None): 77 | """Initialize the channel.""" 78 | super().__init__() 79 | self.client = client 80 | self._is_alive = False 81 | self._pause = True 82 | 83 | def is_alive(self): 84 | """Test if the channel is alive.""" 85 | return self._is_alive 86 | 87 | def start(self): 88 | """Start the channel.""" 89 | self._is_alive = True 90 | 91 | def stop(self): 92 | """Stop the channel.""" 93 | self._is_alive = False 94 | 95 | def pause(self): 96 | """Pause the channel.""" 97 | self._pause = True 98 | 99 | def unpause(self): 100 | """Unpause the channel.""" 101 | self._pause = False 102 | 103 | def is_beating(self): 104 | """Test if the channel is beating.""" 105 | return not self._pause 106 | 107 | 108 | HBChannelABC.register(InProcessHBChannel) 109 | -------------------------------------------------------------------------------- /ipykernel/inprocess/constants.py: -------------------------------------------------------------------------------- 1 | """Shared constants.""" 2 | 3 | # Because inprocess communication is not networked, we can use a common Session 4 | # key everywhere. This is not just the empty bytestring to avoid tripping 5 | # certain security checks in the rest of Jupyter that assumes that empty keys 6 | # are insecure. 7 | INPROCESS_KEY = b"inprocess" 8 | -------------------------------------------------------------------------------- /ipykernel/inprocess/manager.py: -------------------------------------------------------------------------------- 1 | """A kernel manager for in-process kernels.""" 2 | 3 | # Copyright (c) IPython Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | from typing import Any 7 | 8 | from anyio import TASK_STATUS_IGNORED 9 | from anyio.abc import TaskStatus 10 | from jupyter_client.manager import KernelManager 11 | from jupyter_client.managerabc import KernelManagerABC 12 | from traitlets import DottedObjectName, Instance, default 13 | 14 | from .constants import INPROCESS_KEY 15 | from .session import Session 16 | 17 | 18 | class InProcessKernelManager(KernelManager): 19 | """A manager for an in-process kernel. 20 | 21 | This class implements the interface of 22 | `jupyter_client.kernelmanagerabc.KernelManagerABC` and allows 23 | (asynchronous) frontends to be used seamlessly with an in-process kernel. 24 | 25 | See `jupyter_client.kernelmanager.KernelManager` for docstrings. 26 | """ 27 | 28 | # The kernel process with which the KernelManager is communicating. 29 | kernel = Instance("ipykernel.inprocess.ipkernel.InProcessKernel", allow_none=True) 30 | # the client class for KM.client() shortcut 31 | client_class = DottedObjectName("ipykernel.inprocess.BlockingInProcessKernelClient") 32 | 33 | @default("blocking_class") 34 | def _default_blocking_class(self): 35 | from .blocking import BlockingInProcessKernelClient 36 | 37 | return BlockingInProcessKernelClient 38 | 39 | @default("session") 40 | def _default_session(self): 41 | # don't sign in-process messages 42 | return Session(key=INPROCESS_KEY, parent=self) 43 | 44 | # -------------------------------------------------------------------------- 45 | # Kernel management methods 46 | # -------------------------------------------------------------------------- 47 | 48 | async def start_kernel( # type: ignore[explicit-override, override] 49 | self, *, task_status: TaskStatus = TASK_STATUS_IGNORED, **kwds: Any 50 | ) -> None: 51 | """Start the kernel.""" 52 | from ipykernel.inprocess.ipkernel import InProcessKernel 53 | 54 | self.kernel = InProcessKernel(parent=self, session=self.session) 55 | await self.kernel.start(task_status=task_status) 56 | 57 | def shutdown_kernel(self): 58 | """Shutdown the kernel.""" 59 | if self.kernel: 60 | self.kernel.iopub_thread.stop() 61 | self._kill_kernel() 62 | 63 | async def restart_kernel( # type: ignore[explicit-override, override] 64 | self, 65 | now: bool = False, 66 | newports: bool = False, 67 | *, 68 | task_status: TaskStatus = TASK_STATUS_IGNORED, 69 | **kw: Any, 70 | ) -> None: 71 | """Restart the kernel.""" 72 | self.shutdown_kernel() 73 | await self.start_kernel(task_status=task_status, **kw) 74 | 75 | @property 76 | def has_kernel(self): 77 | return self.kernel is not None 78 | 79 | def _kill_kernel(self): 80 | if self.kernel: 81 | self.kernel.stop() 82 | self.kernel = None 83 | 84 | def interrupt_kernel(self): 85 | """Interrupt the kernel.""" 86 | msg = "Cannot interrupt in-process kernel." 87 | raise NotImplementedError(msg) 88 | 89 | def signal_kernel(self, signum): 90 | """Send a signal to the kernel.""" 91 | msg = "Cannot signal in-process kernel." 92 | raise NotImplementedError(msg) 93 | 94 | def is_alive(self): 95 | """Test if the kernel is alive.""" 96 | return self.kernel is not None 97 | 98 | def client(self, **kwargs): 99 | """Get a client for the kernel.""" 100 | kwargs["kernel"] = self.kernel 101 | return super().client(**kwargs) 102 | 103 | 104 | # ----------------------------------------------------------------------------- 105 | # ABC Registration 106 | # ----------------------------------------------------------------------------- 107 | 108 | KernelManagerABC.register(InProcessKernelManager) 109 | -------------------------------------------------------------------------------- /ipykernel/inprocess/session.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from jupyter_client.session import Session as _Session 4 | 5 | 6 | class Session(_Session): 7 | # superclass is not async. 8 | async def recv( # type: ignore[override] 9 | self, socket, mode: int = 0, content: bool = True, copy=True 10 | ) -> Any: 11 | """ 12 | mode, content, copy have no effect, but are present for superclass compatibility 13 | 14 | """ 15 | return await socket.arecv_multipart().wait() 16 | 17 | def send( 18 | self, 19 | socket, 20 | msg_or_type, 21 | content=None, 22 | parent=None, 23 | ident=None, 24 | buffers=None, 25 | track=False, 26 | header=None, 27 | metadata=None, 28 | ): 29 | if isinstance(msg_or_type, str): 30 | msg = self.msg( 31 | msg_or_type, 32 | content=content, 33 | parent=parent, 34 | header=header, 35 | metadata=metadata, 36 | ) 37 | else: 38 | # We got a Message or message dict, not a msg_type so don't 39 | # build a new Message. 40 | msg = msg_or_type 41 | buffers = buffers or msg.get("buffers", []) 42 | 43 | socket.send_multipart(msg) 44 | return msg 45 | 46 | def feed_identities(self, msg, copy=True): 47 | return "", msg 48 | 49 | def deserialize(self, msg, content=True, copy=True): 50 | return msg 51 | -------------------------------------------------------------------------------- /ipykernel/inprocess/socket.py: -------------------------------------------------------------------------------- 1 | """Defines a dummy socket implementing (part of) the zmq.Socket interface.""" 2 | 3 | # Copyright (c) IPython Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | from math import inf 7 | 8 | import zmq 9 | import zmq.asyncio 10 | from anyio import create_memory_object_stream 11 | from traitlets import HasTraits, Instance 12 | 13 | # ----------------------------------------------------------------------------- 14 | # Dummy socket class 15 | # ----------------------------------------------------------------------------- 16 | 17 | 18 | class DummySocket(HasTraits): 19 | """A dummy socket implementing (part of) the zmq.asyncio.Socket interface.""" 20 | 21 | context = Instance(zmq.asyncio.Context) 22 | 23 | def _context_default(self): 24 | return zmq.asyncio.Context() 25 | 26 | # ------------------------------------------------------------------------- 27 | # Socket interface 28 | # ------------------------------------------------------------------------- 29 | 30 | def __init__(self, is_shell, *args, **kwargs): 31 | super().__init__(*args, **kwargs) 32 | self.is_shell = is_shell 33 | self.on_recv = None 34 | if is_shell: 35 | self.in_send_stream, self.in_receive_stream = create_memory_object_stream[dict]( 36 | max_buffer_size=inf 37 | ) 38 | self.out_send_stream, self.out_receive_stream = create_memory_object_stream[dict]( 39 | max_buffer_size=inf 40 | ) 41 | 42 | def put(self, msg): 43 | self.in_send_stream.send_nowait(msg) 44 | 45 | async def get(self): 46 | return await self.out_receive_stream.receive() 47 | 48 | async def recv_multipart(self, flags=0, copy=True, track=False): 49 | """Recv a multipart message.""" 50 | return await self.in_receive_stream.receive() 51 | 52 | def send_multipart(self, msg_parts, flags=0, copy=True, track=False): 53 | """Send a multipart message.""" 54 | if self.is_shell: 55 | self.out_send_stream.send_nowait(msg_parts) 56 | if self.on_recv is not None: 57 | self.on_recv(msg_parts) 58 | 59 | def flush(self, timeout=1.0): 60 | """no-op to comply with stream API""" 61 | 62 | async def poll(self, timeout=0): 63 | assert timeout == 0 64 | statistics = self.in_receive_stream.statistics() 65 | return statistics.current_buffer_used != 0 66 | 67 | def close(self): 68 | if self.is_shell: 69 | self.in_send_stream.close() 70 | self.in_receive_stream.close() 71 | self.out_send_stream.close() 72 | self.out_receive_stream.close() 73 | -------------------------------------------------------------------------------- /ipykernel/jsonutil.py: -------------------------------------------------------------------------------- 1 | """Utilities to manipulate JSON objects.""" 2 | 3 | # Copyright (c) IPython Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | import math 7 | import numbers 8 | import re 9 | import types 10 | from binascii import b2a_base64 11 | from datetime import date, datetime 12 | 13 | from jupyter_client._version import version_info as jupyter_client_version 14 | 15 | next_attr_name = "__next__" 16 | 17 | # ----------------------------------------------------------------------------- 18 | # Globals and constants 19 | # ----------------------------------------------------------------------------- 20 | 21 | # timestamp formats 22 | ISO8601 = "%Y-%m-%dT%H:%M:%S.%f" 23 | ISO8601_PAT = re.compile( 24 | r"^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})(\.\d{1,6})?Z?([\+\-]\d{2}:?\d{2})?$" 25 | ) 26 | 27 | # holy crap, strptime is not threadsafe. 28 | # Calling it once at import seems to help. 29 | datetime.strptime("2000-01-01", "%Y-%m-%d") 30 | 31 | # ----------------------------------------------------------------------------- 32 | # Classes and functions 33 | # ----------------------------------------------------------------------------- 34 | 35 | 36 | # constants for identifying png/jpeg data 37 | PNG = b"\x89PNG\r\n\x1a\n" 38 | # front of PNG base64-encoded 39 | PNG64 = b"iVBORw0KG" 40 | JPEG = b"\xff\xd8" 41 | # front of JPEG base64-encoded 42 | JPEG64 = b"/9" 43 | # constants for identifying gif data 44 | GIF_64 = b"R0lGODdh" 45 | GIF89_64 = b"R0lGODlh" 46 | # front of PDF base64-encoded 47 | PDF64 = b"JVBER" 48 | 49 | JUPYTER_CLIENT_MAJOR_VERSION = jupyter_client_version[0] 50 | 51 | 52 | def encode_images(format_dict): 53 | """b64-encodes images in a displaypub format dict 54 | 55 | Perhaps this should be handled in json_clean itself? 56 | 57 | Parameters 58 | ---------- 59 | format_dict : dict 60 | A dictionary of display data keyed by mime-type 61 | 62 | Returns 63 | ------- 64 | format_dict : dict 65 | A copy of the same dictionary, 66 | but binary image data ('image/png', 'image/jpeg' or 'application/pdf') 67 | is base64-encoded. 68 | 69 | """ 70 | 71 | # no need for handling of ambiguous bytestrings on Python 3, 72 | # where bytes objects always represent binary data and thus 73 | # base64-encoded. 74 | return format_dict 75 | 76 | 77 | def json_clean(obj): # pragma: no cover 78 | """Deprecated, this is a no-op for jupyter-client>=7. 79 | 80 | Clean an object to ensure it's safe to encode in JSON. 81 | 82 | Atomic, immutable objects are returned unmodified. Sets and tuples are 83 | converted to lists, lists are copied and dicts are also copied. 84 | 85 | Note: dicts whose keys could cause collisions upon encoding (such as a dict 86 | with both the number 1 and the string '1' as keys) will cause a ValueError 87 | to be raised. 88 | 89 | Parameters 90 | ---------- 91 | obj : any python object 92 | 93 | Returns 94 | ------- 95 | out : object 96 | A version of the input which will not cause an encoding error when 97 | encoded as JSON. Note that this function does not *encode* its inputs, 98 | it simply sanitizes it so that there will be no encoding errors later. 99 | 100 | """ 101 | if int(JUPYTER_CLIENT_MAJOR_VERSION) >= 7: 102 | return obj 103 | 104 | # types that are 'atomic' and ok in json as-is. 105 | atomic_ok = (str, type(None)) 106 | 107 | # containers that we need to convert into lists 108 | container_to_list = (tuple, set, types.GeneratorType) 109 | 110 | # Since bools are a subtype of Integrals, which are a subtype of Reals, 111 | # we have to check them in that order. 112 | 113 | if isinstance(obj, bool): 114 | return obj 115 | 116 | if isinstance(obj, numbers.Integral): 117 | # cast int to int, in case subclasses override __str__ (e.g. boost enum, #4598) 118 | return int(obj) 119 | 120 | if isinstance(obj, numbers.Real): 121 | # cast out-of-range floats to their reprs 122 | if math.isnan(obj) or math.isinf(obj): 123 | return repr(obj) 124 | return float(obj) 125 | 126 | if isinstance(obj, atomic_ok): 127 | return obj 128 | 129 | if isinstance(obj, bytes): 130 | # unanmbiguous binary data is base64-encoded 131 | # (this probably should have happened upstream) 132 | return b2a_base64(obj).decode("ascii") 133 | 134 | if isinstance(obj, container_to_list) or ( 135 | hasattr(obj, "__iter__") and hasattr(obj, next_attr_name) 136 | ): 137 | obj = list(obj) 138 | 139 | if isinstance(obj, list): 140 | return [json_clean(x) for x in obj] 141 | 142 | if isinstance(obj, dict): 143 | # First, validate that the dict won't lose data in conversion due to 144 | # key collisions after stringification. This can happen with keys like 145 | # True and 'true' or 1 and '1', which collide in JSON. 146 | nkeys = len(obj) 147 | nkeys_collapsed = len(set(map(str, obj))) 148 | if nkeys != nkeys_collapsed: 149 | msg = ( 150 | "dict cannot be safely converted to JSON: " 151 | "key collision would lead to dropped values" 152 | ) 153 | raise ValueError(msg) 154 | # If all OK, proceed by making the new dict that will be json-safe 155 | out = {} 156 | for k, v in obj.items(): 157 | out[str(k)] = json_clean(v) 158 | return out 159 | if isinstance(obj, (datetime, date)): 160 | return obj.strftime(ISO8601) 161 | 162 | # we don't understand it, it's probably an unserializable object 163 | raise ValueError("Can't clean for JSON: %r" % obj) 164 | -------------------------------------------------------------------------------- /ipykernel/log.py: -------------------------------------------------------------------------------- 1 | """A PUB log handler.""" 2 | 3 | import warnings 4 | 5 | from zmq.log.handlers import PUBHandler 6 | 7 | warnings.warn( 8 | "ipykernel.log is deprecated since ipykernel 4.3.0 (2016). It has moved to ipyparallel.engine.log", 9 | DeprecationWarning, 10 | stacklevel=2, 11 | ) 12 | 13 | 14 | class EnginePUBHandler(PUBHandler): 15 | """A simple PUBHandler subclass that sets root_topic""" 16 | 17 | engine = None 18 | 19 | def __init__(self, engine, *args, **kwargs): 20 | """Initialize the handler.""" 21 | PUBHandler.__init__(self, *args, **kwargs) 22 | self.engine = engine 23 | 24 | @property # type:ignore[misc] 25 | def root_topic(self): 26 | """this is a property, in case the handler is created 27 | before the engine gets registered with an id""" 28 | if isinstance(getattr(self.engine, "id", None), int): 29 | return "engine.%i" % self.engine.id # type:ignore[union-attr] 30 | return "engine" 31 | -------------------------------------------------------------------------------- /ipykernel/parentpoller.py: -------------------------------------------------------------------------------- 1 | """A parent poller for unix.""" 2 | # Copyright (c) IPython Development Team. 3 | # Distributed under the terms of the Modified BSD License. 4 | 5 | try: 6 | import ctypes 7 | except ImportError: 8 | ctypes = None # type:ignore[assignment] 9 | import os 10 | import platform 11 | import signal 12 | import time 13 | import warnings 14 | from _thread import interrupt_main # Py 3 15 | from threading import Thread 16 | 17 | from traitlets.log import get_logger 18 | 19 | 20 | class ParentPollerUnix(Thread): 21 | """A Unix-specific daemon thread that terminates the program immediately 22 | when the parent process no longer exists. 23 | """ 24 | 25 | def __init__(self, parent_pid=0): 26 | """Initialize the poller. 27 | 28 | Parameters 29 | ---------- 30 | parent_handle : int, optional 31 | If provided, the program will terminate immediately when 32 | process parent is no longer this original parent. 33 | """ 34 | super().__init__() 35 | self.parent_pid = parent_pid 36 | self.daemon = True 37 | 38 | def run(self): 39 | """Run the poller.""" 40 | # We cannot use os.waitpid because it works only for child processes. 41 | from errno import EINTR 42 | 43 | # before start, check if the passed-in parent pid is valid 44 | original_ppid = os.getppid() 45 | if original_ppid != self.parent_pid: 46 | self.parent_pid = 0 47 | 48 | get_logger().debug( 49 | "%s: poll for parent change with original parent pid=%d", 50 | type(self).__name__, 51 | self.parent_pid, 52 | ) 53 | 54 | while True: 55 | try: 56 | ppid = os.getppid() 57 | parent_is_init = not self.parent_pid and ppid == 1 58 | parent_has_changed = self.parent_pid and ppid != self.parent_pid 59 | if parent_is_init or parent_has_changed: 60 | get_logger().warning("Parent appears to have exited, shutting down.") 61 | os._exit(1) 62 | time.sleep(1.0) 63 | except OSError as e: 64 | if e.errno == EINTR: 65 | continue 66 | raise 67 | 68 | 69 | class ParentPollerWindows(Thread): 70 | """A Windows-specific daemon thread that listens for a special event that 71 | signals an interrupt and, optionally, terminates the program immediately 72 | when the parent process no longer exists. 73 | """ 74 | 75 | def __init__(self, interrupt_handle=None, parent_handle=None): 76 | """Create the poller. At least one of the optional parameters must be 77 | provided. 78 | 79 | Parameters 80 | ---------- 81 | interrupt_handle : HANDLE (int), optional 82 | If provided, the program will generate a Ctrl+C event when this 83 | handle is signaled. 84 | parent_handle : HANDLE (int), optional 85 | If provided, the program will terminate immediately when this 86 | handle is signaled. 87 | """ 88 | assert interrupt_handle or parent_handle 89 | super().__init__() 90 | if ctypes is None: 91 | msg = "ParentPollerWindows requires ctypes" # type:ignore[unreachable] 92 | raise ImportError(msg) 93 | self.daemon = True 94 | self.interrupt_handle = interrupt_handle 95 | self.parent_handle = parent_handle 96 | 97 | def run(self): 98 | """Run the poll loop. This method never returns.""" 99 | try: 100 | from _winapi import INFINITE, WAIT_OBJECT_0 # type:ignore[attr-defined] 101 | except ImportError: 102 | from _subprocess import INFINITE, WAIT_OBJECT_0 103 | 104 | # Build the list of handle to listen on. 105 | handles = [] 106 | if self.interrupt_handle: 107 | handles.append(self.interrupt_handle) 108 | if self.parent_handle: 109 | handles.append(self.parent_handle) 110 | arch = platform.architecture()[0] 111 | c_int = ctypes.c_int64 if arch.startswith("64") else ctypes.c_int 112 | 113 | # Listen forever. 114 | while True: 115 | result = ctypes.windll.kernel32.WaitForMultipleObjects( # type:ignore[attr-defined] 116 | len(handles), # nCount 117 | (c_int * len(handles))(*handles), # lpHandles 118 | False, # bWaitAll 119 | INFINITE, 120 | ) # dwMilliseconds 121 | 122 | if WAIT_OBJECT_0 <= result < len(handles): 123 | handle = handles[result - WAIT_OBJECT_0] 124 | 125 | if handle == self.interrupt_handle: 126 | # check if signal handler is callable 127 | # to avoid 'int not callable' error (Python issue #23395) 128 | if callable(signal.getsignal(signal.SIGINT)): 129 | interrupt_main() 130 | 131 | elif handle == self.parent_handle: 132 | get_logger().warning("Parent appears to have exited, shutting down.") 133 | os._exit(1) 134 | elif result < 0: 135 | # wait failed, just give up and stop polling. 136 | warnings.warn( 137 | """Parent poll failed. If the frontend dies, 138 | the kernel may be left running. Please let us know 139 | about your system (bitness, Python, etc.) at 140 | ipython-dev@scipy.org""", 141 | stacklevel=2, 142 | ) 143 | return 144 | -------------------------------------------------------------------------------- /ipykernel/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipython/ipykernel/8322a7684b004ee95f07b2f86f61e28146a5996d/ipykernel/py.typed -------------------------------------------------------------------------------- /ipykernel/pylab/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipython/ipykernel/8322a7684b004ee95f07b2f86f61e28146a5996d/ipykernel/pylab/__init__.py -------------------------------------------------------------------------------- /ipykernel/pylab/backend_inline.py: -------------------------------------------------------------------------------- 1 | """A matplotlib backend for publishing figures via display_data""" 2 | 3 | # Copyright (c) IPython Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | import warnings 7 | 8 | from matplotlib_inline.backend_inline import * # type:ignore[import-untyped] # noqa: F403 # analysis: ignore 9 | 10 | warnings.warn( 11 | "`ipykernel.pylab.backend_inline` is deprecated, directly " 12 | "use `matplotlib_inline.backend_inline`", 13 | DeprecationWarning, 14 | stacklevel=2, 15 | ) 16 | -------------------------------------------------------------------------------- /ipykernel/pylab/config.py: -------------------------------------------------------------------------------- 1 | """Configurable for configuring the IPython inline backend 2 | 3 | This module does not import anything from matplotlib. 4 | """ 5 | 6 | import warnings 7 | 8 | from matplotlib_inline.config import * # type:ignore[import-untyped] # noqa: F403 # analysis: ignore 9 | 10 | warnings.warn( 11 | "`ipykernel.pylab.config` is deprecated, directly use `matplotlib_inline.config`", 12 | DeprecationWarning, 13 | stacklevel=2, 14 | ) 15 | -------------------------------------------------------------------------------- /ipykernel/resources/logo-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipython/ipykernel/8322a7684b004ee95f07b2f86f61e28146a5996d/ipykernel/resources/logo-32x32.png -------------------------------------------------------------------------------- /ipykernel/resources/logo-64x64.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipython/ipykernel/8322a7684b004ee95f07b2f86f61e28146a5996d/ipykernel/resources/logo-64x64.png -------------------------------------------------------------------------------- /ipykernel/shellchannel.py: -------------------------------------------------------------------------------- 1 | """A thread for a shell channel.""" 2 | 3 | import zmq 4 | import zmq_anyio 5 | 6 | from .subshell_manager import SubshellManager 7 | from .thread import SHELL_CHANNEL_THREAD_NAME, BaseThread 8 | 9 | 10 | class ShellChannelThread(BaseThread): 11 | """A thread for a shell channel. 12 | 13 | Communicates with shell/subshell threads via pairs of ZMQ inproc sockets. 14 | """ 15 | 16 | def __init__( 17 | self, 18 | context: zmq.Context, # type: ignore[type-arg] 19 | shell_socket: zmq_anyio.Socket, 20 | **kwargs, 21 | ): 22 | """Initialize the thread.""" 23 | super().__init__(name=SHELL_CHANNEL_THREAD_NAME, **kwargs) 24 | self._manager: SubshellManager | None = None 25 | self._context = context 26 | self._shell_socket = shell_socket 27 | 28 | @property 29 | def manager(self) -> SubshellManager: 30 | # Lazy initialisation. 31 | if self._manager is None: 32 | self._manager = SubshellManager(self._context, self._shell_socket) 33 | return self._manager 34 | 35 | def run(self) -> None: 36 | """Run the thread.""" 37 | try: 38 | super().run() 39 | finally: 40 | if self._manager: 41 | self._manager.close() 42 | -------------------------------------------------------------------------------- /ipykernel/subshell.py: -------------------------------------------------------------------------------- 1 | """A thread for a subshell.""" 2 | 3 | from threading import current_thread 4 | 5 | import zmq 6 | import zmq_anyio 7 | 8 | from .thread import BaseThread 9 | 10 | 11 | class SubshellThread(BaseThread): 12 | """A thread for a subshell.""" 13 | 14 | def __init__(self, subshell_id: str, **kwargs): 15 | """Initialize the thread.""" 16 | super().__init__(name=f"subshell-{subshell_id}", **kwargs) 17 | 18 | # Inproc PAIR socket, for communication with shell channel thread. 19 | self._pair_socket: zmq_anyio.Socket | None = None 20 | 21 | async def create_pair_socket( 22 | self, 23 | context: zmq.Context, # type: ignore[type-arg] 24 | address: str, 25 | ) -> None: 26 | """Create inproc PAIR socket, for communication with shell channel thread. 27 | 28 | Should be called from this thread, so usually via start_soon before the 29 | thread is started. 30 | """ 31 | assert current_thread() == self 32 | self._pair_socket = zmq_anyio.Socket(context, zmq.PAIR) 33 | self._pair_socket.connect(address) 34 | self.start_soon(self._pair_socket.start) 35 | 36 | def run(self) -> None: 37 | try: 38 | super().run() 39 | finally: 40 | if self._pair_socket is not None: 41 | self._pair_socket.close() 42 | self._pair_socket = None 43 | -------------------------------------------------------------------------------- /ipykernel/thread.py: -------------------------------------------------------------------------------- 1 | """Base class for threads.""" 2 | 3 | from __future__ import annotations 4 | 5 | from collections.abc import Awaitable 6 | from queue import Queue 7 | from threading import Event, Thread 8 | from typing import Any, Callable 9 | 10 | from anyio import create_task_group, run, to_thread 11 | from anyio.abc import TaskGroup 12 | 13 | CONTROL_THREAD_NAME = "Control" 14 | SHELL_CHANNEL_THREAD_NAME = "Shell channel" 15 | 16 | 17 | class BaseThread(Thread): 18 | """Base class for threads.""" 19 | 20 | def __init__(self, **kwargs): 21 | """Initialize the thread.""" 22 | super().__init__(**kwargs) 23 | self.started = Event() 24 | self.stopped = Event() 25 | self.pydev_do_not_trace = True 26 | self.is_pydev_daemon_thread = True 27 | self._tasks: Queue[tuple[str, Callable[[], Awaitable[Any]]] | None] = Queue() 28 | self._result: Queue[Any] = Queue() 29 | self._exception: Exception | None = None 30 | 31 | @property 32 | def exception(self) -> Exception | None: 33 | return self._exception 34 | 35 | @property 36 | def task_group(self) -> TaskGroup: 37 | return self._task_group 38 | 39 | def start_soon(self, coro: Callable[[], Awaitable[Any]]) -> None: 40 | self._tasks.put(("start_soon", coro)) 41 | 42 | def run_async(self, coro: Callable[[], Awaitable[Any]]) -> Any: 43 | self._tasks.put(("run_async", coro)) 44 | return self._result.get() 45 | 46 | def run_sync(self, func: Callable[..., Any]) -> Any: 47 | self._tasks.put(("run_sync", func)) 48 | return self._result.get() 49 | 50 | def run(self) -> None: 51 | """Run the thread.""" 52 | try: 53 | run(self._main) 54 | except Exception as exc: 55 | self._exception = exc 56 | 57 | async def _main(self) -> None: 58 | async with create_task_group() as tg: 59 | self._task_group = tg 60 | self.started.set() 61 | while True: 62 | task = await to_thread.run_sync(self._tasks.get) 63 | if task is None: 64 | break 65 | func, arg = task 66 | if func == "start_soon": 67 | tg.start_soon(arg) 68 | elif func == "run_async": 69 | res = await arg 70 | self._result.put(res) 71 | else: # func == "run_sync" 72 | res = arg() 73 | self._result.put(res) 74 | 75 | tg.cancel_scope.cancel() 76 | 77 | def stop(self) -> None: 78 | """Stop the thread. 79 | 80 | This method is threadsafe. 81 | """ 82 | self._tasks.put(None) 83 | self.stopped.set() 84 | -------------------------------------------------------------------------------- /ipykernel_launcher.py: -------------------------------------------------------------------------------- 1 | """Entry point for launching an IPython kernel. 2 | 3 | This is separate from the ipykernel package so we can avoid doing imports until 4 | after removing the cwd from sys.path. 5 | """ 6 | 7 | import sys 8 | from pathlib import Path 9 | 10 | if __name__ == "__main__": 11 | # Remove the CWD from sys.path while we load stuff. 12 | # This is added back by InteractiveShellApp.init_path() 13 | if sys.path[0] == "" or Path(sys.path[0]) == Path.cwd(): 14 | del sys.path[0] 15 | 16 | from ipykernel import kernelapp as app 17 | 18 | app.launch_new_instance() 19 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) IPython Development Team. 2 | # Distributed under the terms of the Modified BSD License. 3 | 4 | import os 5 | import shutil 6 | import sys 7 | import tempfile 8 | from unittest.mock import patch 9 | 10 | import pytest 11 | 12 | from ipykernel.kernelspec import install 13 | 14 | pjoin = os.path.join 15 | 16 | tmp = None 17 | patchers: list = [] 18 | 19 | 20 | @pytest.fixture(autouse=True) 21 | def _global_setup(): 22 | """setup temporary env for tests""" 23 | global tmp 24 | tmp = tempfile.mkdtemp() 25 | patchers[:] = [ 26 | patch.dict( 27 | os.environ, 28 | { 29 | "HOME": tmp, 30 | # Let tests work with --user install when HOME is changed: 31 | "PYTHONPATH": os.pathsep.join(sys.path), 32 | }, 33 | ), 34 | ] 35 | for p in patchers: 36 | p.start() 37 | 38 | # install IPython in the temp home: 39 | install(user=True) 40 | yield 41 | for p in patchers: 42 | p.stop() 43 | 44 | try: 45 | shutil.rmtree(tmp) # type:ignore 46 | except OSError: 47 | # no such file 48 | pass 49 | -------------------------------------------------------------------------------- /tests/inprocess/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipython/ipykernel/8322a7684b004ee95f07b2f86f61e28146a5996d/tests/inprocess/__init__.py -------------------------------------------------------------------------------- /tests/inprocess/test_kernel.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) IPython Development Team. 2 | # Distributed under the terms of the Modified BSD License. 3 | 4 | import sys 5 | from contextlib import contextmanager 6 | from io import StringIO 7 | 8 | import pytest 9 | from anyio import create_task_group 10 | from IPython.utils.io import capture_output # type:ignore[attr-defined] 11 | from jupyter_client.session import Session 12 | 13 | from ipykernel.inprocess.blocking import BlockingInProcessKernelClient 14 | from ipykernel.inprocess.ipkernel import InProcessKernel 15 | from ipykernel.inprocess.manager import InProcessKernelManager 16 | 17 | from ..utils import assemble_output 18 | 19 | orig_msg = Session.msg 20 | 21 | 22 | def _inject_cell_id(_self, *args, **kwargs): 23 | """ 24 | This patch jupyter_client.session:Session.msg to add a cell_id to the return message metadata 25 | """ 26 | assert isinstance(_self, Session) 27 | res = orig_msg(_self, *args, **kwargs) 28 | assert "cellId" not in res["metadata"] 29 | res["metadata"]["cellId"] = "test_cell_id" 30 | return res 31 | 32 | 33 | @contextmanager 34 | def patch_cell_id(): 35 | try: 36 | Session.msg = _inject_cell_id # type:ignore 37 | yield 38 | finally: 39 | Session.msg = orig_msg # type:ignore 40 | 41 | 42 | @pytest.fixture() 43 | def anyio_backend(): 44 | return "asyncio" 45 | 46 | 47 | @pytest.fixture() 48 | async def kc(anyio_backend): 49 | async with create_task_group() as tg: 50 | km = InProcessKernelManager() 51 | await tg.start(km.start_kernel) 52 | kc = km.client() 53 | kc.start_channels() 54 | await kc.wait_for_ready() 55 | yield kc 56 | km.shutdown_kernel() 57 | 58 | 59 | async def test_with_cell_id(kc): 60 | with patch_cell_id(): 61 | await kc.execute("1+1") 62 | 63 | 64 | async def test_pylab(kc): 65 | """Does %pylab work in the in-process kernel?""" 66 | _ = pytest.importorskip("matplotlib", reason="This test requires matplotlib") 67 | await kc.execute("%pylab") 68 | out, err = assemble_output(kc.get_iopub_msg) 69 | assert "matplotlib" in out 70 | 71 | 72 | async def test_raw_input(kc): 73 | """Does the in-process kernel handle raw_input correctly?""" 74 | io = StringIO("foobar\n") 75 | sys_stdin = sys.stdin 76 | sys.stdin = io 77 | try: 78 | await kc.execute("x = input()") 79 | finally: 80 | sys.stdin = sys_stdin 81 | assert kc.kernel.shell.user_ns.get("x") == "foobar" 82 | 83 | 84 | @pytest.mark.skipif("__pypy__" in sys.builtin_module_names, reason="fails on pypy") 85 | async def test_stdout(kc): 86 | """Does the in-process kernel correctly capture IO?""" 87 | kernel = kc.kernel 88 | 89 | with capture_output() as io: 90 | kernel.shell.run_cell('print("foo")') 91 | assert io.stdout == "foo\n" 92 | 93 | await kc.execute('print("bar")') 94 | out, err = assemble_output(kc.get_iopub_msg) 95 | assert out == "bar\n" 96 | 97 | 98 | @pytest.mark.skip(reason="Currently don't capture during test as pytest does its own capturing") 99 | def test_capfd(kc): 100 | """Does correctly capture fd""" 101 | kernel = InProcessKernel() 102 | 103 | with capture_output() as io: 104 | kernel.shell.run_cell('print("foo")') 105 | assert io.stdout == "foo\n" 106 | 107 | kc = BlockingInProcessKernelClient(kernel=kernel, session=kernel.session) 108 | kernel.frontends.append(kc) 109 | kc.execute("import os") 110 | kc.execute('os.system("echo capfd")') 111 | out, err = assemble_output(kc.get_iopub_msg) 112 | assert out == "capfd\n" 113 | 114 | 115 | def test_getpass_stream(kc): 116 | """Tests that kernel getpass accept the stream parameter""" 117 | kernel = InProcessKernel() 118 | kernel._allow_stdin = True 119 | kernel._input_request = lambda *args, **kwargs: None # type:ignore 120 | 121 | kernel.getpass(stream="non empty") 122 | 123 | 124 | async def test_do_execute(kc): 125 | kernel = InProcessKernel() 126 | await kernel.do_execute("a=1", True) 127 | assert kernel.shell.user_ns["a"] == 1 128 | -------------------------------------------------------------------------------- /tests/inprocess/test_kernelmanager.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) IPython Development Team. 2 | # Distributed under the terms of the Modified BSD License. 3 | 4 | import unittest 5 | 6 | import pytest 7 | from anyio import create_task_group 8 | from flaky import flaky 9 | 10 | from ipykernel.inprocess.manager import InProcessKernelManager 11 | 12 | # ----------------------------------------------------------------------------- 13 | # Test case 14 | # ----------------------------------------------------------------------------- 15 | 16 | 17 | @pytest.fixture() 18 | def anyio_backend(): 19 | return "asyncio" 20 | 21 | 22 | @pytest.fixture() 23 | async def km_kc(anyio_backend): 24 | async with create_task_group() as tg: 25 | km = InProcessKernelManager() 26 | await tg.start(km.start_kernel) 27 | kc = km.client() 28 | kc.start_channels() 29 | await kc.wait_for_ready() 30 | yield km, kc 31 | km.shutdown_kernel() 32 | 33 | 34 | @pytest.fixture() 35 | async def km(anyio_backend): 36 | km = InProcessKernelManager() 37 | yield km 38 | if km.has_kernel: 39 | km.shutdown_kernel() 40 | 41 | 42 | class TestInProcessKernelManager: 43 | @flaky 44 | async def test_interface(self, km): 45 | """Does the in-process kernel manager implement the basic KM interface?""" 46 | async with create_task_group() as tg: 47 | assert not km.has_kernel 48 | 49 | await tg.start(km.start_kernel) 50 | assert km.has_kernel 51 | assert km.kernel is not None 52 | 53 | kc = km.client() 54 | assert not kc.channels_running 55 | 56 | kc.start_channels() 57 | assert kc.channels_running 58 | 59 | old_kernel = km.kernel 60 | await tg.start(km.restart_kernel) 61 | assert km.kernel is not None 62 | assert km.kernel != old_kernel 63 | 64 | km.shutdown_kernel() 65 | assert not km.has_kernel 66 | 67 | with pytest.raises(NotImplementedError): 68 | km.interrupt_kernel() 69 | 70 | with pytest.raises(NotImplementedError): 71 | km.signal_kernel(9) 72 | 73 | kc.stop_channels() 74 | assert not kc.channels_running 75 | 76 | async def test_execute(self, km_kc): 77 | """Does executing code in an in-process kernel work?""" 78 | km, kc = km_kc 79 | 80 | await kc.execute("foo = 1") 81 | assert km.kernel.shell.user_ns["foo"] == 1 82 | 83 | async def test_complete(self, km_kc): 84 | """Does requesting completion from an in-process kernel work?""" 85 | km, kc = km_kc 86 | 87 | km.kernel.shell.push({"my_bar": 0, "my_baz": 1}) 88 | await kc.complete("my_ba", 5) 89 | msg = kc.get_shell_msg() 90 | assert msg["header"]["msg_type"] == "complete_reply" 91 | assert sorted(msg["content"]["matches"]) == ["my_bar", "my_baz"] 92 | 93 | async def test_inspect(self, km_kc): 94 | """Does requesting object information from an in-process kernel work?""" 95 | km, kc = km_kc 96 | 97 | km.kernel.shell.user_ns["foo"] = 1 98 | await kc.inspect("foo") 99 | msg = kc.get_shell_msg() 100 | assert msg["header"]["msg_type"] == "inspect_reply" 101 | content = msg["content"] 102 | assert content["found"] 103 | text = content["data"]["text/plain"] 104 | assert "int" in text 105 | 106 | async def test_history(self, km_kc): 107 | """Does requesting history from an in-process kernel work?""" 108 | km, kc = km_kc 109 | 110 | await kc.execute("1") 111 | await kc.history(hist_access_type="tail", n=1) 112 | msg = kc.shell_channel.get_msgs()[-1] 113 | assert msg["header"]["msg_type"] == "history_reply" 114 | history = msg["content"]["history"] 115 | assert len(history) == 1 116 | assert history[0][2] == "1" 117 | 118 | 119 | if __name__ == "__main__": 120 | unittest.main() 121 | -------------------------------------------------------------------------------- /tests/test_async.py: -------------------------------------------------------------------------------- 1 | """Test async/await integration""" 2 | 3 | import os 4 | import time 5 | 6 | import pytest 7 | 8 | from .test_message_spec import validate_message 9 | from .utils import TIMEOUT, execute, flush_channels, start_new_kernel 10 | 11 | KC = KM = None 12 | 13 | 14 | @pytest.fixture(autouse=True) 15 | def _setup_env(): 16 | """start the global kernel (if it isn't running) and return its client""" 17 | global KM, KC 18 | KM, KC = start_new_kernel() 19 | flush_channels(KC) 20 | yield 21 | assert KC is not None 22 | assert KM is not None 23 | KC.stop_channels() 24 | KM.shutdown_kernel(now=True) 25 | 26 | 27 | def test_async_await(): 28 | flush_channels(KC) 29 | msg_id, content = execute("import asyncio; await asyncio.sleep(0.1)", KC) 30 | assert content["status"] == "ok", content 31 | 32 | 33 | @pytest.mark.skipif(os.name == "nt", reason="Cannot interrupt on Windows") 34 | @pytest.mark.parametrize("anyio_backend", ["asyncio"]) # FIXME: %autoawait trio 35 | def test_async_interrupt(anyio_backend, request): 36 | assert KC is not None 37 | assert KM is not None 38 | try: 39 | __import__(anyio_backend) 40 | except ImportError: 41 | pytest.skip("Requires %s" % anyio_backend) 42 | request.addfinalizer(lambda: execute(f"%autoawait {anyio_backend}", KC)) 43 | 44 | flush_channels(KC) 45 | msg_id, content = execute(f"%autoawait {anyio_backend}", KC) 46 | assert content["status"] == "ok", content 47 | 48 | flush_channels(KC) 49 | msg_id = KC.execute(f"print('begin'); import {anyio_backend}; await {anyio_backend}.sleep(5)") 50 | busy = KC.get_iopub_msg(timeout=TIMEOUT) 51 | validate_message(busy, "status", msg_id) 52 | assert busy["content"]["execution_state"] == "busy" 53 | echo = KC.get_iopub_msg(timeout=TIMEOUT) 54 | validate_message(echo, "execute_input") 55 | # wait for the stream output to be sure kernel is in the async block 56 | stream = "" 57 | t0 = time.monotonic() 58 | while True: 59 | msg = KC.get_iopub_msg(timeout=TIMEOUT) 60 | validate_message(msg, "stream") 61 | stream += msg["content"]["text"] 62 | assert "begin\n".startswith(stream) 63 | if stream == "begin\n": 64 | break 65 | if time.monotonic() - t0 > TIMEOUT: 66 | raise TimeoutError() 67 | 68 | KM.interrupt_kernel() 69 | reply = KC.get_shell_msg()["content"] 70 | assert reply["status"] == "error", reply 71 | assert reply["ename"] in {"CancelledError", "KeyboardInterrupt"} 72 | 73 | flush_channels(KC) 74 | -------------------------------------------------------------------------------- /tests/test_comm.py: -------------------------------------------------------------------------------- 1 | import unittest.mock 2 | 3 | import pytest 4 | 5 | from ipykernel.comm import Comm, CommManager 6 | from ipykernel.ipkernel import IPythonKernel 7 | from ipykernel.kernelbase import Kernel 8 | 9 | 10 | def test_comm(kernel: Kernel) -> None: 11 | manager = CommManager(kernel=kernel) 12 | kernel.comm_manager = manager # type:ignore 13 | 14 | with pytest.deprecated_call(): 15 | c = Comm(kernel=kernel, target_name="bar") 16 | msgs = [] 17 | 18 | assert kernel is c.kernel # type:ignore 19 | 20 | def on_close(msg): 21 | msgs.append(msg) 22 | 23 | def on_message(msg): 24 | msgs.append(msg) 25 | 26 | c.publish_msg("foo") 27 | c.open({}) 28 | c.on_msg(on_message) 29 | c.on_close(on_close) 30 | c.handle_msg({}) 31 | c.handle_close({}) 32 | c.close() 33 | assert len(msgs) == 2 34 | assert c.target_name == "bar" 35 | 36 | 37 | def test_comm_manager(kernel: Kernel) -> None: 38 | manager = CommManager(kernel=kernel) 39 | msgs = [] 40 | 41 | def foo(comm, msg): 42 | msgs.append(msg) 43 | comm.close() 44 | 45 | def fizz(comm, msg): 46 | raise RuntimeError("hi") 47 | 48 | def on_close(msg): 49 | msgs.append(msg) 50 | 51 | def on_msg(msg): 52 | msgs.append(msg) 53 | 54 | manager.register_target("foo", foo) 55 | manager.register_target("fizz", fizz) 56 | 57 | kernel.comm_manager = manager # type:ignore 58 | with unittest.mock.patch.object(Comm, "publish_msg") as publish_msg: 59 | with pytest.deprecated_call(): 60 | comm = Comm() 61 | comm.on_msg(on_msg) 62 | comm.on_close(on_close) 63 | manager.register_comm(comm) 64 | assert publish_msg.call_count == 1 65 | 66 | # make sure that when we don't pass a kernel, the 'default' kernel is taken 67 | Kernel._instance = kernel # type:ignore 68 | assert comm.kernel is kernel # type:ignore 69 | Kernel.clear_instance() 70 | 71 | assert manager.get_comm(comm.comm_id) == comm 72 | assert manager.get_comm("foo") is None 73 | 74 | msg = dict(content=dict(comm_id=comm.comm_id, target_name="foo")) 75 | manager.comm_open(None, None, msg) 76 | assert len(msgs) == 1 77 | msg["content"]["target_name"] = "bar" 78 | manager.comm_open(None, None, msg) 79 | assert len(msgs) == 1 80 | msg = dict(content=dict(comm_id=comm.comm_id, target_name="fizz")) 81 | manager.comm_open(None, None, msg) 82 | assert len(msgs) == 1 83 | 84 | manager.register_comm(comm) 85 | assert manager.get_comm(comm.comm_id) == comm 86 | msg = dict(content=dict(comm_id=comm.comm_id)) 87 | manager.comm_msg(None, None, msg) 88 | assert len(msgs) == 2 89 | msg["content"]["comm_id"] = "foo" 90 | manager.comm_msg(None, None, msg) 91 | assert len(msgs) == 2 92 | 93 | manager.register_comm(comm) 94 | assert manager.get_comm(comm.comm_id) == comm 95 | msg = dict(content=dict(comm_id=comm.comm_id)) 96 | manager.comm_close(None, None, msg) 97 | assert len(msgs) == 3 98 | 99 | assert comm._closed 100 | 101 | 102 | def test_comm_in_manager(ipkernel: IPythonKernel) -> None: 103 | with pytest.deprecated_call(): 104 | comm = Comm() 105 | 106 | assert comm.comm_id in ipkernel.comm_manager.comms 107 | -------------------------------------------------------------------------------- /tests/test_connect.py: -------------------------------------------------------------------------------- 1 | """Tests for kernel connection utilities""" 2 | 3 | # Copyright (c) IPython Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | import errno 7 | import json 8 | import os 9 | from tempfile import TemporaryDirectory 10 | from typing import no_type_check 11 | from unittest.mock import patch 12 | 13 | import pytest 14 | import zmq 15 | from traitlets.config.loader import Config 16 | 17 | from ipykernel import connect 18 | from ipykernel.kernelapp import IPKernelApp 19 | 20 | from .utils import TemporaryWorkingDirectory 21 | 22 | sample_info: dict = { 23 | "ip": "1.2.3.4", 24 | "transport": "ipc", 25 | "shell_port": 1, 26 | "hb_port": 2, 27 | "iopub_port": 3, 28 | "stdin_port": 4, 29 | "control_port": 5, 30 | "key": b"abc123", 31 | "signature_scheme": "hmac-md5", 32 | } 33 | 34 | 35 | class DummyKernelApp(IPKernelApp): 36 | def _default_shell_port(self): 37 | return 0 38 | 39 | def initialize(self, argv=None): 40 | self.init_profile_dir() 41 | self.init_connection_file() 42 | 43 | 44 | def test_get_connection_file(): 45 | cfg = Config() 46 | with TemporaryWorkingDirectory() as d: 47 | cfg.ProfileDir.location = d 48 | cf = "kernel.json" 49 | app = DummyKernelApp(config=cfg, connection_file=cf) 50 | app.initialize() 51 | 52 | profile_cf = os.path.join(app.connection_dir, cf) 53 | assert profile_cf == app.abs_connection_file 54 | with open(profile_cf, "w") as f: 55 | f.write("{}") 56 | assert os.path.exists(profile_cf) 57 | assert connect.get_connection_file(app) == profile_cf 58 | 59 | app.connection_file = cf 60 | assert connect.get_connection_file(app) == profile_cf 61 | 62 | 63 | def test_get_connection_info(): 64 | with TemporaryDirectory() as d: 65 | cf = os.path.join(d, "kernel.json") 66 | connect.write_connection_file(cf, **sample_info) 67 | json_info = connect.get_connection_info(cf) 68 | info = connect.get_connection_info(cf, unpack=True) 69 | assert isinstance(json_info, str) 70 | 71 | sub_info = {k: v for k, v in info.items() if k in sample_info} 72 | assert sub_info == sample_info 73 | 74 | info2 = json.loads(json_info) 75 | info2["key"] = info2["key"].encode("utf-8") 76 | sub_info2 = {k: v for k, v in info.items() if k in sample_info} 77 | assert sub_info2 == sample_info 78 | 79 | 80 | def test_port_bind_failure_raises(request): 81 | cfg = Config() 82 | with TemporaryWorkingDirectory() as d: 83 | cfg.ProfileDir.location = d 84 | cf = "kernel.json" 85 | app = DummyKernelApp(config=cfg, connection_file=cf) 86 | request.addfinalizer(app.close) 87 | app.initialize() 88 | with patch.object(app, "_try_bind_socket") as mock_try_bind: 89 | mock_try_bind.side_effect = zmq.ZMQError(-100, "fails for unknown error types") 90 | with pytest.raises(zmq.ZMQError): 91 | app.init_sockets() 92 | assert mock_try_bind.call_count == 1 93 | 94 | 95 | @no_type_check 96 | def test_port_bind_failure_recovery(request): 97 | try: 98 | errno.WSAEADDRINUSE 99 | except AttributeError: 100 | # Fake windows address in-use code 101 | p = patch.object(errno, "WSAEADDRINUSE", 12345, create=True) 102 | p.start() 103 | request.addfinalizer(p.stop) 104 | 105 | cfg = Config() 106 | with TemporaryWorkingDirectory() as d: 107 | cfg.ProfileDir.location = d 108 | cf = "kernel.json" 109 | app = DummyKernelApp(config=cfg, connection_file=cf) 110 | request.addfinalizer(app.close) 111 | app.initialize() 112 | with patch.object(app, "_try_bind_socket") as mock_try_bind: 113 | mock_try_bind.side_effect = [ 114 | zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind unix"), 115 | zmq.ZMQError(errno.WSAEADDRINUSE, "fails for non-bind windows"), 116 | ] + [0] * 100 117 | # Shouldn't raise anything as retries will kick in 118 | app.init_sockets() 119 | 120 | 121 | def test_port_bind_failure_gives_up_retries(request, tracemalloc_resource_warning): 122 | cfg = Config() 123 | with TemporaryWorkingDirectory() as d: 124 | cfg.ProfileDir.location = d 125 | cf = "kernel.json" 126 | app = DummyKernelApp(config=cfg, connection_file=cf) 127 | request.addfinalizer(app.close) 128 | app.initialize() 129 | with patch.object(app, "_try_bind_socket") as mock_try_bind: 130 | mock_try_bind.side_effect = zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind") 131 | with pytest.raises(zmq.ZMQError): 132 | app.init_sockets() 133 | assert mock_try_bind.call_count == 100 134 | -------------------------------------------------------------------------------- /tests/test_embed_kernel.py: -------------------------------------------------------------------------------- 1 | """test embed_kernel""" 2 | 3 | # Copyright (c) IPython Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | import json 7 | import os 8 | import sys 9 | import threading 10 | import time 11 | from contextlib import contextmanager 12 | from subprocess import PIPE, Popen 13 | 14 | import pytest 15 | from flaky import flaky 16 | from jupyter_client.blocking.client import BlockingKernelClient 17 | from jupyter_core import paths 18 | 19 | from ipykernel.embed import IPKernelApp, embed_kernel # type:ignore[attr-defined] 20 | 21 | SETUP_TIMEOUT = 60 22 | TIMEOUT = 15 23 | 24 | 25 | if os.name == "nt": 26 | pytest.skip("skipping tests on windows", allow_module_level=True) 27 | 28 | 29 | @contextmanager 30 | def setup_kernel(cmd): 31 | """start an embedded kernel in a subprocess, and wait for it to be ready 32 | 33 | Returns 34 | ------- 35 | kernel_manager: connected KernelManager instance 36 | """ 37 | 38 | def connection_file_ready(connection_file): 39 | """Check if connection_file is a readable json file.""" 40 | if not os.path.exists(connection_file): 41 | return False 42 | try: 43 | with open(connection_file) as f: 44 | json.load(f) 45 | return True 46 | except ValueError: 47 | return False 48 | 49 | kernel = Popen([sys.executable, "-c", cmd], stdout=PIPE, stderr=PIPE, encoding="utf-8") 50 | try: 51 | connection_file = os.path.join( 52 | paths.jupyter_runtime_dir(), 53 | "kernel-%i.json" % kernel.pid, 54 | ) 55 | # wait for connection file to exist, timeout after 5s 56 | tic = time.time() 57 | while ( 58 | not connection_file_ready(connection_file) 59 | and kernel.poll() is None 60 | and time.time() < tic + SETUP_TIMEOUT 61 | ): 62 | time.sleep(0.1) 63 | 64 | # Wait 100ms for the writing to finish 65 | time.sleep(0.1) 66 | 67 | if kernel.poll() is not None: 68 | o, e = kernel.communicate() 69 | raise OSError("Kernel failed to start:\n%s" % e) 70 | 71 | if not os.path.exists(connection_file): 72 | if kernel.poll() is None: 73 | kernel.terminate() 74 | raise OSError("Connection file %r never arrived" % connection_file) 75 | 76 | client = BlockingKernelClient(connection_file=connection_file) 77 | client.load_connection_file() 78 | client.start_channels() 79 | client.wait_for_ready() 80 | try: 81 | yield client 82 | finally: 83 | client.stop_channels() 84 | finally: 85 | kernel.terminate() 86 | kernel.wait() 87 | # Make sure all the fds get closed. 88 | for attr in ["stdout", "stderr", "stdin"]: 89 | fid = getattr(kernel, attr) 90 | if fid: 91 | fid.close() 92 | 93 | 94 | @flaky(max_runs=3) 95 | def test_embed_kernel_basic(): 96 | """ipykernel.embed.embed_kernel() is basically functional""" 97 | cmd = "\n".join( 98 | [ 99 | "from ipykernel.embed import embed_kernel", 100 | "def go():", 101 | " a=5", 102 | ' b="hi there"', 103 | " embed_kernel()", 104 | "go()", 105 | "", 106 | ] 107 | ) 108 | 109 | with setup_kernel(cmd) as client: 110 | # oinfo a (int) 111 | client.inspect("a") 112 | msg = client.get_shell_msg(timeout=TIMEOUT) 113 | content = msg["content"] 114 | assert content["found"] 115 | 116 | client.execute("c=a*2") 117 | msg = client.get_shell_msg(timeout=TIMEOUT) 118 | content = msg["content"] 119 | assert content["status"] == "ok" 120 | 121 | # oinfo c (should be 10) 122 | client.inspect("c") 123 | msg = client.get_shell_msg(timeout=TIMEOUT) 124 | content = msg["content"] 125 | assert content["found"] 126 | text = content["data"]["text/plain"] 127 | assert "10" in text 128 | 129 | 130 | @flaky(max_runs=3) 131 | def test_embed_kernel_namespace(): 132 | """ipykernel.embed.embed_kernel() inherits calling namespace""" 133 | cmd = "\n".join( 134 | [ 135 | "from ipykernel.embed import embed_kernel", 136 | "def go():", 137 | " a=5", 138 | ' b="hi there"', 139 | " embed_kernel()", 140 | "go()", 141 | "", 142 | ] 143 | ) 144 | 145 | with setup_kernel(cmd) as client: 146 | # oinfo a (int) 147 | client.inspect("a") 148 | while True: 149 | msg = client.get_shell_msg(timeout=TIMEOUT) 150 | if msg["msg_type"] == "inspect_reply": 151 | break 152 | content = msg["content"] 153 | assert content["found"] 154 | text = content["data"]["text/plain"] 155 | assert "5" in text 156 | 157 | # oinfo b (str) 158 | client.inspect("b") 159 | while True: 160 | msg = client.get_shell_msg(timeout=TIMEOUT) 161 | if msg["msg_type"] == "inspect_reply": 162 | break 163 | content = msg["content"] 164 | assert content["found"] 165 | text = content["data"]["text/plain"] 166 | assert "hi there" in text 167 | 168 | # oinfo c (undefined) 169 | client.inspect("c") 170 | while True: 171 | msg = client.get_shell_msg(timeout=TIMEOUT) 172 | if msg["msg_type"] == "inspect_reply": 173 | break 174 | content = msg["content"] 175 | assert not content["found"] 176 | 177 | 178 | @flaky(max_runs=3) 179 | def test_embed_kernel_reentrant(): 180 | """ipykernel.embed.embed_kernel() can be called multiple times""" 181 | cmd = "\n".join( 182 | [ 183 | "from ipykernel.embed import embed_kernel", 184 | "count = 0", 185 | "def go():", 186 | " global count", 187 | " embed_kernel()", 188 | " count = count + 1", 189 | "", 190 | "while True: go()", 191 | "", 192 | ] 193 | ) 194 | 195 | with setup_kernel(cmd) as client: 196 | for i in range(5): 197 | client.inspect("count") 198 | while True: 199 | msg = client.get_shell_msg(timeout=TIMEOUT) 200 | if msg["msg_type"] == "inspect_reply": 201 | break 202 | content = msg["content"] 203 | assert content["found"] 204 | text = content["data"]["text/plain"] 205 | assert str(i) in text 206 | 207 | # exit from embed_kernel 208 | client.execute("get_ipython().exit_now = True") 209 | msg = client.get_shell_msg(timeout=TIMEOUT) 210 | time.sleep(0.2) 211 | 212 | 213 | def test_embed_kernel_func(): 214 | from types import ModuleType 215 | 216 | module = ModuleType("test") 217 | 218 | def trigger_stop(): 219 | time.sleep(1) 220 | app = IPKernelApp.instance() 221 | app.stop() 222 | IPKernelApp.clear_instance() 223 | 224 | thread = threading.Thread(target=trigger_stop) 225 | thread.start() 226 | 227 | embed_kernel(module, outstream_class=None) 228 | -------------------------------------------------------------------------------- /tests/test_eventloop.py: -------------------------------------------------------------------------------- 1 | """Test eventloop integration""" 2 | 3 | import asyncio 4 | import os 5 | import sys 6 | import threading 7 | import time 8 | 9 | import pytest 10 | 11 | from ipykernel.eventloops import ( 12 | enable_gui, 13 | loop_asyncio, 14 | loop_cocoa, 15 | loop_tk, 16 | ) 17 | 18 | from .utils import flush_channels, start_new_kernel 19 | 20 | KC = KM = None 21 | 22 | qt_guis_avail = [] 23 | 24 | gui_to_module = {"qt6": "PySide6", "qt5": "PyQt5"} 25 | 26 | 27 | def _get_qt_vers(): 28 | """If any version of Qt is available, this will populate `guis_avail` with 'qt' and 'qtx'. Due 29 | to the import mechanism, we can't import multiple versions of Qt in one session.""" 30 | for gui in ["qt6", "qt5"]: 31 | print(f"Trying {gui}") 32 | try: 33 | __import__(gui_to_module[gui]) 34 | qt_guis_avail.append(gui) 35 | if "QT_API" in os.environ: 36 | del os.environ["QT_API"] 37 | except ImportError: 38 | pass # that version of Qt isn't available. 39 | 40 | 41 | _get_qt_vers() 42 | 43 | 44 | @pytest.fixture(autouse=True) 45 | def _setup_env(): 46 | """start the global kernel (if it isn't running) and return its client""" 47 | global KM, KC 48 | KM, KC = start_new_kernel() 49 | flush_channels(KC) 50 | yield 51 | assert KM is not None 52 | assert KC is not None 53 | KC.stop_channels() 54 | KM.shutdown_kernel(now=True) 55 | 56 | 57 | windows_skip = pytest.mark.skipif(os.name == "nt", reason="causing failures on windows") 58 | 59 | 60 | @windows_skip 61 | @pytest.mark.skipif(sys.platform == "darwin", reason="hangs on macos") 62 | def test_tk_loop(kernel): 63 | def do_thing(): 64 | time.sleep(1) 65 | try: 66 | kernel.app_wrapper.app.quit() 67 | # guard for tk failing to start (if there is no display) 68 | except AttributeError: 69 | pass 70 | 71 | t = threading.Thread(target=do_thing) 72 | t.start() 73 | # guard for tk failing to start (if there is no display) 74 | try: 75 | loop_tk(kernel) 76 | except Exception: 77 | pass 78 | t.join() 79 | 80 | 81 | @windows_skip 82 | @pytest.mark.parametrize("anyio_backend", ["asyncio"]) 83 | def test_asyncio_loop(kernel): 84 | def do_thing(): 85 | loop.call_later(0.01, loop.stop) 86 | 87 | loop = asyncio.get_event_loop() 88 | loop.call_soon(do_thing) 89 | loop_asyncio(kernel) 90 | 91 | 92 | @windows_skip 93 | def test_enable_gui(kernel): 94 | enable_gui("tk", kernel) 95 | 96 | 97 | @pytest.mark.skipif(sys.platform != "darwin", reason="MacOS-only") 98 | def test_cocoa_loop(kernel): 99 | loop_cocoa(kernel) 100 | 101 | 102 | @pytest.mark.parametrize("gui", qt_guis_avail) 103 | def test_qt_enable_gui(gui, kernel, capsys): 104 | if os.getenv("GITHUB_ACTIONS", None) == "true" and gui == "qt5": 105 | pytest.skip("Qt5 and GitHub action crash CPython") 106 | if gui == "qt6" and sys.version_info < (3, 10): 107 | pytest.skip( 108 | "qt6 fails on 3.9 with AttributeError: module 'PySide6.QtPrintSupport' has no attribute 'QApplication'" 109 | ) 110 | if sys.platform == "linux" and gui == "qt6" and os.getenv("GITHUB_ACTIONS", None) == "true": 111 | pytest.skip("qt6 fails on github CI with missing libEGL.so.1") 112 | enable_gui(gui, kernel) 113 | 114 | # We store the `QApplication` instance in the kernel. 115 | assert hasattr(kernel, "app") 116 | 117 | # And the `QEventLoop` is added to `app`:` 118 | assert hasattr(kernel.app, "qt_event_loop") 119 | 120 | # Don't create another app even if `gui` is the same. 121 | app = kernel.app 122 | enable_gui(gui, kernel) 123 | assert app == kernel.app 124 | 125 | # Event loop integration can be turned off. 126 | enable_gui(None, kernel) 127 | assert not hasattr(kernel, "app") 128 | 129 | # But now we're stuck with this version of Qt for good; can't switch. 130 | for not_gui in ["qt6", "qt5"]: 131 | if not_gui not in qt_guis_avail: 132 | break 133 | 134 | enable_gui(not_gui, kernel) 135 | captured = capsys.readouterr() 136 | assert captured.out == f"Cannot switch Qt versions for this session; you must use {gui}.\n" 137 | 138 | # Check 'qt' gui, which means "the best available" 139 | enable_gui(None, kernel) 140 | enable_gui("qt", kernel) 141 | assert gui_to_module[gui] in str(kernel.app) 142 | -------------------------------------------------------------------------------- /tests/test_heartbeat.py: -------------------------------------------------------------------------------- 1 | """Tests for heartbeat thread""" 2 | 3 | # Copyright (c) IPython Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | import errno 7 | from typing import no_type_check 8 | from unittest.mock import patch 9 | 10 | import pytest 11 | import zmq 12 | 13 | from ipykernel.heartbeat import Heartbeat 14 | 15 | 16 | def test_port_bind_failure_raises(): 17 | heart = Heartbeat(None) 18 | with patch.object(heart, "_try_bind_socket") as mock_try_bind: 19 | mock_try_bind.side_effect = zmq.ZMQError(-100, "fails for unknown error types") 20 | with pytest.raises(zmq.ZMQError): 21 | heart._bind_socket() 22 | assert mock_try_bind.call_count == 1 23 | 24 | 25 | def test_port_bind_success(): 26 | heart = Heartbeat(None) 27 | with patch.object(heart, "_try_bind_socket") as mock_try_bind: 28 | heart._bind_socket() 29 | assert mock_try_bind.call_count == 1 30 | 31 | 32 | @no_type_check 33 | def test_port_bind_failure_recovery(): 34 | try: 35 | errno.WSAEADDRINUSE 36 | except AttributeError: 37 | # Fake windows address in-use code 38 | errno.WSAEADDRINUSE = 12345 39 | 40 | try: 41 | heart = Heartbeat(None) 42 | with patch.object(heart, "_try_bind_socket") as mock_try_bind: 43 | mock_try_bind.side_effect = [ 44 | zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind unix"), 45 | zmq.ZMQError(errno.WSAEADDRINUSE, "fails for non-bind windows"), 46 | ] + [0] * 100 47 | # Shouldn't raise anything as retries will kick in 48 | heart._bind_socket() 49 | finally: 50 | # Cleanup fake assignment 51 | if errno.WSAEADDRINUSE == 12345: 52 | del errno.WSAEADDRINUSE 53 | 54 | 55 | def test_port_bind_failure_gives_up_retries(): 56 | heart = Heartbeat(None) 57 | with patch.object(heart, "_try_bind_socket") as mock_try_bind: 58 | mock_try_bind.side_effect = zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind") 59 | with pytest.raises(zmq.ZMQError): 60 | heart._bind_socket() 61 | assert mock_try_bind.call_count == 100 62 | -------------------------------------------------------------------------------- /tests/test_ipkernel_direct.py: -------------------------------------------------------------------------------- 1 | """Test IPythonKernel directly""" 2 | 3 | import os 4 | 5 | import pytest 6 | from IPython.core.history import DummyDB 7 | 8 | from ipykernel.comm.comm import BaseComm 9 | from ipykernel.ipkernel import IPythonKernel, _create_comm 10 | 11 | from .conftest import MockIPyKernel 12 | 13 | if os.name == "nt": 14 | pytest.skip("skipping tests on windows", allow_module_level=True) 15 | 16 | 17 | class user_mod: 18 | __dict__ = {} 19 | 20 | 21 | async def test_properties(ipkernel: IPythonKernel) -> None: 22 | ipkernel.user_module = user_mod() 23 | ipkernel.user_ns = {} 24 | 25 | 26 | async def test_direct_kernel_info_request(ipkernel): 27 | reply = await ipkernel.test_shell_message("kernel_info_request", {}) 28 | assert reply["header"]["msg_type"] == "kernel_info_reply" 29 | assert ( 30 | "supported_features" not in reply["content"] 31 | or "kernel subshells" not in reply["content"]["supported_features"] 32 | ) 33 | 34 | 35 | async def test_direct_execute_request(ipkernel: MockIPyKernel) -> None: 36 | reply = await ipkernel.test_shell_message("execute_request", dict(code="hello", silent=False)) 37 | assert reply["header"]["msg_type"] == "execute_reply" 38 | reply = await ipkernel.test_shell_message( 39 | "execute_request", dict(code="trigger_error", silent=False) 40 | ) 41 | assert reply["content"]["status"] == "aborted" 42 | 43 | reply = await ipkernel.test_shell_message("execute_request", dict(code="hello", silent=False)) 44 | assert reply["header"]["msg_type"] == "execute_reply" 45 | 46 | 47 | async def test_direct_execute_request_aborting(ipkernel): 48 | ipkernel._aborting = True 49 | reply = await ipkernel.test_shell_message("execute_request", dict(code="hello", silent=False)) 50 | assert reply["header"]["msg_type"] == "execute_reply" 51 | assert reply["content"]["status"] == "aborted" 52 | 53 | 54 | async def test_complete_request(ipkernel, tracemalloc_resource_warning): 55 | reply = await ipkernel.test_shell_message("complete_request", dict(code="hello", cursor_pos=0)) 56 | assert reply["header"]["msg_type"] == "complete_reply" 57 | ipkernel.use_experimental_completions = False 58 | reply = await ipkernel.test_shell_message( 59 | "complete_request", dict(code="hello", cursor_pos=None) 60 | ) 61 | assert reply["header"]["msg_type"] == "complete_reply" 62 | 63 | 64 | async def test_inspect_request(ipkernel): 65 | reply = await ipkernel.test_shell_message("inspect_request", dict(code="hello", cursor_pos=0)) 66 | assert reply["header"]["msg_type"] == "inspect_reply" 67 | 68 | 69 | async def test_history_request(ipkernel): 70 | ipkernel.shell.history_manager.db = DummyDB() 71 | reply = await ipkernel.test_shell_message( 72 | "history_request", dict(hist_access_type="", output="", raw="") 73 | ) 74 | assert reply["header"]["msg_type"] == "history_reply" 75 | reply = await ipkernel.test_shell_message( 76 | "history_request", dict(hist_access_type="tail", output="", raw="") 77 | ) 78 | assert reply["header"]["msg_type"] == "history_reply" 79 | reply = await ipkernel.test_shell_message( 80 | "history_request", dict(hist_access_type="range", output="", raw="") 81 | ) 82 | assert reply["header"]["msg_type"] == "history_reply" 83 | reply = await ipkernel.test_shell_message( 84 | "history_request", dict(hist_access_type="search", output="", raw="") 85 | ) 86 | assert reply["header"]["msg_type"] == "history_reply" 87 | 88 | 89 | async def test_comm_info_request(ipkernel): 90 | reply = await ipkernel.test_shell_message("comm_info_request") 91 | assert reply["header"]["msg_type"] == "comm_info_reply" 92 | 93 | 94 | async def test_direct_interrupt_request(ipkernel): 95 | reply = await ipkernel.test_control_message("interrupt_request", {}) 96 | assert reply["header"]["msg_type"] == "interrupt_reply" 97 | assert reply["content"] == {"status": "ok"} 98 | 99 | # test failure on interrupt request 100 | def raiseOSError(): 101 | msg = "evalue" 102 | raise OSError(msg) 103 | 104 | ipkernel._send_interrupt_children = raiseOSError 105 | reply = await ipkernel.test_control_message("interrupt_request", {}) 106 | assert reply["header"]["msg_type"] == "interrupt_reply" 107 | assert reply["content"]["status"] == "error" 108 | assert reply["content"]["ename"] == "OSError" 109 | assert reply["content"]["evalue"] == "evalue" 110 | assert len(reply["content"]["traceback"]) > 0 111 | 112 | 113 | # TODO: this causes deadlock 114 | # async def test_direct_shutdown_request(ipkernel): 115 | # reply = await ipkernel.test_shell_message("shutdown_request", dict(restart=False)) 116 | # assert reply["header"]["msg_type"] == "shutdown_reply" 117 | # reply = await ipkernel.test_shell_message("shutdown_request", dict(restart=True)) 118 | # assert reply["header"]["msg_type"] == "shutdown_reply" 119 | 120 | # TODO: this causes deadlock 121 | # async def test_direct_usage_request(kernel): 122 | # reply = await kernel.test_control_message("usage_request", {}) 123 | # assert reply['header']['msg_type'] == 'usage_reply' 124 | 125 | 126 | async def test_is_complete_request(ipkernel: MockIPyKernel) -> None: 127 | reply = await ipkernel.test_shell_message("is_complete_request", dict(code="hello")) 128 | assert reply["header"]["msg_type"] == "is_complete_reply" 129 | setattr(ipkernel, "shell.input_transformer_manager", None) 130 | reply = await ipkernel.test_shell_message("is_complete_request", dict(code="hello")) 131 | assert reply["header"]["msg_type"] == "is_complete_reply" 132 | 133 | 134 | def test_do_apply(ipkernel: MockIPyKernel) -> None: 135 | from ipyparallel import pack_apply_message 136 | 137 | def hello(): 138 | pass 139 | 140 | msg = pack_apply_message(hello, (), {}) 141 | ipkernel.do_apply(None, msg, "1", {}) 142 | ipkernel.do_apply(None, [], "1", {}) 143 | 144 | 145 | async def test_direct_debug_request(ipkernel): 146 | reply = await ipkernel.test_control_message("debug_request", {}) 147 | assert reply["header"]["msg_type"] == "debug_reply" 148 | 149 | 150 | async def test_direct_clear(ipkernel): 151 | ipkernel.do_clear() 152 | 153 | 154 | async def test_dispatch_debugpy(ipkernel: IPythonKernel) -> None: 155 | msg = ipkernel.session.msg("debug_request", {}) 156 | msg_list = ipkernel.session.serialize(msg) 157 | await ipkernel.receive_debugpy_message(msg_list) 158 | 159 | 160 | def test_create_comm(): 161 | assert isinstance(_create_comm(), BaseComm) 162 | 163 | 164 | def test_finish_metadata(ipkernel: IPythonKernel) -> None: 165 | reply_content = dict(status="error", ename="UnmetDependency") 166 | metadata = ipkernel.finish_metadata({}, {}, reply_content) 167 | assert metadata["dependencies_met"] is False 168 | 169 | 170 | async def test_do_debug_request(ipkernel: IPythonKernel) -> None: 171 | msg = ipkernel.session.msg("debug_request", {}) 172 | ipkernel.session.serialize(msg) 173 | await ipkernel.do_debug_request(msg) 174 | -------------------------------------------------------------------------------- /tests/test_jsonutil.py: -------------------------------------------------------------------------------- 1 | """Test suite for our JSON utilities.""" 2 | 3 | # Copyright (c) IPython Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | import json 7 | import numbers 8 | from binascii import a2b_base64 9 | from datetime import date, datetime 10 | 11 | import pytest 12 | from jupyter_client._version import version_info as jupyter_client_version 13 | 14 | from ipykernel import jsonutil 15 | from ipykernel.jsonutil import encode_images, json_clean 16 | 17 | JUPYTER_CLIENT_MAJOR_VERSION: int = jupyter_client_version[0] # type:ignore 18 | 19 | 20 | class MyInt: 21 | def __int__(self): 22 | return 389 23 | 24 | 25 | numbers.Integral.register(MyInt) 26 | 27 | 28 | class MyFloat: 29 | def __float__(self): 30 | return 3.14 31 | 32 | 33 | numbers.Real.register(MyFloat) 34 | 35 | 36 | @pytest.mark.skipif(JUPYTER_CLIENT_MAJOR_VERSION >= 7, reason="json_clean is a no-op") 37 | def test(): 38 | # list of input/expected output. Use None for the expected output if it 39 | # can be the same as the input. 40 | pairs = [ 41 | (1, None), # start with scalars 42 | (1.0, None), 43 | ("a", None), 44 | (True, None), 45 | (False, None), 46 | (None, None), 47 | # Containers 48 | ([1, 2], None), 49 | ((1, 2), [1, 2]), 50 | ({1, 2}, [1, 2]), 51 | (dict(x=1), None), 52 | ({"x": 1, "y": [1, 2, 3], "1": "int"}, None), 53 | # More exotic objects 54 | ((x for x in range(3)), [0, 1, 2]), 55 | (iter([1, 2]), [1, 2]), 56 | (datetime(1991, 7, 3, 12, 00), "1991-07-03T12:00:00.000000"), 57 | (date(1991, 7, 3), "1991-07-03T00:00:00.000000"), 58 | (MyFloat(), 3.14), 59 | (MyInt(), 389), 60 | ] 61 | 62 | for val, jval in pairs: 63 | if jval is None: 64 | jval = val # type:ignore 65 | out = json_clean(val) 66 | # validate our cleanup 67 | assert out == jval 68 | # and ensure that what we return, indeed encodes cleanly 69 | json.loads(json.dumps(out)) 70 | 71 | 72 | @pytest.mark.skipif(JUPYTER_CLIENT_MAJOR_VERSION >= 7, reason="json_clean is a no-op") 73 | def test_encode_images(): 74 | # invalid data, but the header and footer are from real files 75 | pngdata = b"\x89PNG\r\n\x1a\nblahblahnotactuallyvalidIEND\xaeB`\x82" 76 | jpegdata = b"\xff\xd8\xff\xe0\x00\x10JFIFblahblahjpeg(\xa0\x0f\xff\xd9" 77 | pdfdata = b"%PDF-1.\ntrailer<>]>>>>>>" 78 | bindata = b"\xff\xff\xff\xff" 79 | 80 | fmt = { 81 | "image/png": pngdata, 82 | "image/jpeg": jpegdata, 83 | "application/pdf": pdfdata, 84 | "application/unrecognized": bindata, 85 | } 86 | encoded = json_clean(encode_images(fmt)) 87 | for key, value in fmt.items(): 88 | # encoded has unicode, want bytes 89 | decoded = a2b_base64(encoded[key]) 90 | assert decoded == value 91 | encoded2 = json_clean(encode_images(encoded)) 92 | assert encoded == encoded2 93 | 94 | for key, value in fmt.items(): 95 | decoded = a2b_base64(encoded[key]) 96 | assert decoded == value 97 | 98 | 99 | @pytest.mark.skipif(JUPYTER_CLIENT_MAJOR_VERSION >= 7, reason="json_clean is a no-op") 100 | def test_lambda(): 101 | with pytest.raises(ValueError): # noqa: PT011 102 | json_clean(lambda: 1) 103 | 104 | 105 | @pytest.mark.skipif(JUPYTER_CLIENT_MAJOR_VERSION >= 7, reason="json_clean is a no-op") 106 | def test_exception(): 107 | bad_dicts = [ 108 | {1: "number", "1": "string"}, 109 | {True: "bool", "True": "string"}, 110 | ] 111 | for d in bad_dicts: 112 | with pytest.raises(ValueError): # noqa: PT011 113 | json_clean(d) 114 | 115 | 116 | @pytest.mark.skipif(JUPYTER_CLIENT_MAJOR_VERSION >= 7, reason="json_clean is a no-op") 117 | def test_unicode_dict(): 118 | data = {"üniço∂e": "üniço∂e"} 119 | clean = jsonutil.json_clean(data) 120 | assert data == clean 121 | -------------------------------------------------------------------------------- /tests/test_kernel_direct.py: -------------------------------------------------------------------------------- 1 | """test the IPython Kernel""" 2 | 3 | # Copyright (c) IPython Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | import os 7 | 8 | import pytest 9 | 10 | if os.name == "nt": 11 | pytest.skip("skipping tests on windows", allow_module_level=True) 12 | 13 | 14 | async def test_direct_kernel_info_request(kernel): 15 | reply = await kernel.test_shell_message("kernel_info_request", {}) 16 | assert reply["header"]["msg_type"] == "kernel_info_reply" 17 | assert ( 18 | "supported_features" not in reply["content"] 19 | or "kernel subshells" not in reply["content"]["supported_features"] 20 | ) 21 | 22 | 23 | async def test_direct_execute_request(kernel): 24 | reply = await kernel.test_shell_message("execute_request", dict(code="hello", silent=False)) 25 | assert reply["header"]["msg_type"] == "execute_reply" 26 | 27 | 28 | async def test_direct_execute_request_aborting(kernel): 29 | kernel._aborting = True 30 | reply = await kernel.test_shell_message("execute_request", dict(code="hello", silent=False)) 31 | assert reply["header"]["msg_type"] == "execute_reply" 32 | assert reply["content"]["status"] == "aborted" 33 | 34 | 35 | async def test_direct_execute_request_error(kernel): 36 | await kernel.execute_request(None, None, None) 37 | 38 | 39 | async def test_complete_request(kernel): 40 | reply = await kernel.test_shell_message("complete_request", dict(code="hello", cursor_pos=0)) 41 | assert reply["header"]["msg_type"] == "complete_reply" 42 | 43 | 44 | async def test_inspect_request(kernel): 45 | reply = await kernel.test_shell_message("inspect_request", dict(code="hello", cursor_pos=0)) 46 | assert reply["header"]["msg_type"] == "inspect_reply" 47 | 48 | 49 | async def test_history_request(kernel): 50 | reply = await kernel.test_shell_message( 51 | "history_request", dict(hist_access_type="", output="", raw="") 52 | ) 53 | assert reply["header"]["msg_type"] == "history_reply" 54 | reply = await kernel.test_shell_message( 55 | "history_request", dict(hist_access_type="tail", output="", raw="") 56 | ) 57 | assert reply["header"]["msg_type"] == "history_reply" 58 | reply = await kernel.test_shell_message( 59 | "history_request", dict(hist_access_type="range", output="", raw="") 60 | ) 61 | assert reply["header"]["msg_type"] == "history_reply" 62 | reply = await kernel.test_shell_message( 63 | "history_request", dict(hist_access_type="search", output="", raw="") 64 | ) 65 | assert reply["header"]["msg_type"] == "history_reply" 66 | 67 | 68 | async def test_comm_info_request(kernel): 69 | reply = await kernel.test_shell_message("comm_info_request") 70 | assert reply["header"]["msg_type"] == "comm_info_reply" 71 | 72 | 73 | async def test_direct_interrupt_request(kernel): 74 | reply = await kernel.test_control_message("interrupt_request", {}) 75 | assert reply["header"]["msg_type"] == "interrupt_reply" 76 | assert reply["content"] == {"status": "ok"} 77 | 78 | # test failure on interrupt request 79 | def raiseOSError(): 80 | msg = "evalue" 81 | raise OSError(msg) 82 | 83 | kernel._send_interrupt_children = raiseOSError 84 | reply = await kernel.test_control_message("interrupt_request", {}) 85 | assert reply["header"]["msg_type"] == "interrupt_reply" 86 | assert reply["content"]["status"] == "error" 87 | assert reply["content"]["ename"] == "OSError" 88 | assert reply["content"]["evalue"] == "evalue" 89 | assert len(reply["content"]["traceback"]) > 0 90 | 91 | 92 | async def test_direct_shutdown_request(kernel): 93 | reply = await kernel.test_shell_message("shutdown_request", dict(restart=False)) 94 | assert reply["header"]["msg_type"] == "shutdown_reply" 95 | reply = await kernel.test_shell_message("shutdown_request", dict(restart=True)) 96 | assert reply["header"]["msg_type"] == "shutdown_reply" 97 | 98 | 99 | async def test_is_complete_request(kernel): 100 | reply = await kernel.test_shell_message("is_complete_request", dict(code="hello")) 101 | assert reply["header"]["msg_type"] == "is_complete_reply" 102 | 103 | 104 | async def test_direct_debug_request(kernel): 105 | reply = await kernel.test_control_message("debug_request", {}) 106 | assert reply["header"]["msg_type"] == "debug_reply" 107 | 108 | 109 | async def test_process_control(kernel): 110 | from jupyter_client.session import DELIM 111 | 112 | await kernel.process_control_message([DELIM, 1]) 113 | msg = kernel._prep_msg("does_not_exist") 114 | await kernel.process_control_message(msg) 115 | 116 | 117 | def test_should_handle(kernel): 118 | msg = kernel.session.msg("debug_request", {}) 119 | assert kernel.should_handle(kernel.control_socket, msg, []) is True 120 | 121 | 122 | async def test_dispatch_shell(kernel): 123 | from jupyter_client.session import DELIM 124 | 125 | await kernel.process_shell_message([DELIM, 1]) 126 | msg = kernel._prep_msg("does_not_exist") 127 | await kernel.process_shell_message(msg) 128 | 129 | 130 | async def test_publish_debug_event(kernel): 131 | kernel._publish_debug_event({}) 132 | 133 | 134 | async def test_connect_request(kernel): 135 | await kernel.connect_request(kernel.shell_socket, b"foo", {}) 136 | 137 | 138 | async def test_send_interrupt_children(kernel): 139 | kernel._send_interrupt_children() 140 | 141 | 142 | @pytest.mark.skip(reason="this causes deadlock") 143 | async def test_direct_usage_request(kernel): 144 | reply = await kernel.test_control_message("usage_request", {}) 145 | assert reply["header"]["msg_type"] == "usage_reply" 146 | -------------------------------------------------------------------------------- /tests/test_kernelapp.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import threading 4 | import time 5 | 6 | import pytest 7 | from jupyter_core.paths import secure_write 8 | from traitlets.config.loader import Config 9 | 10 | from ipykernel.kernelapp import IPKernelApp 11 | 12 | from .conftest import MockKernel 13 | from .utils import TemporaryWorkingDirectory 14 | 15 | 16 | @pytest.mark.skipif(os.name == "nt", reason="requires ipc") 17 | def test_init_ipc_socket(): 18 | app = IPKernelApp(transport="ipc") 19 | app.init_sockets() 20 | app.cleanup_connection_file() 21 | app.close() 22 | 23 | 24 | def test_blackhole(): 25 | app = IPKernelApp() 26 | app.no_stderr = True 27 | app.no_stdout = True 28 | app.init_blackhole() 29 | app.close() 30 | 31 | 32 | def test_start_app(): 33 | app = IPKernelApp() 34 | app.kernel = MockKernel() 35 | 36 | def trigger_stop(): 37 | time.sleep(1) 38 | app.stop() 39 | 40 | thread = threading.Thread(target=trigger_stop) 41 | t0 = time.time() 42 | thread.start() 43 | app.init_sockets() 44 | app.start() 45 | t1 = time.time() 46 | assert t1 - t0 >= 1 47 | app.cleanup_connection_file() 48 | app.kernel.destroy() 49 | app.close() 50 | 51 | 52 | @pytest.mark.skipif(os.name == "nt", reason="permission errors on windows") 53 | def test_merge_connection_file(): 54 | cfg = Config() 55 | with TemporaryWorkingDirectory() as d: 56 | cfg.ProfileDir.location = d 57 | cf = os.path.join(d, "kernel.json") 58 | initial_connection_info = { 59 | "ip": "*", 60 | "transport": "tcp", 61 | "shell_port": 0, 62 | "hb_port": 0, 63 | "iopub_port": 0, 64 | "stdin_port": 0, 65 | "control_port": 53555, 66 | "key": "abc123", 67 | "signature_scheme": "hmac-sha256", 68 | "kernel_name": "My Kernel", 69 | } 70 | # We cannot use connect.write_connection_file since 71 | # it replaces port number 0 with a random port 72 | # and we want IPKernelApp to do that replacement. 73 | with secure_write(cf) as f: 74 | json.dump(initial_connection_info, f) 75 | assert os.path.exists(cf) 76 | 77 | app = IPKernelApp(config=cfg, connection_file=cf) 78 | 79 | # Calling app.initialize() does not work in the test, so we call the relevant functions that initialize() calls 80 | # We must pass in an empty argv, otherwise the default is to try to parse the test runner's argv 81 | super(IPKernelApp, app).initialize(argv=[""]) 82 | app.init_connection_file() 83 | app.init_sockets() 84 | app.init_heartbeat() 85 | app.write_connection_file() 86 | 87 | # Initialize should have merged the actual connection info 88 | # with the connection info in the file 89 | assert cf == app.abs_connection_file 90 | assert os.path.exists(cf) 91 | 92 | with open(cf) as f: 93 | new_connection_info = json.load(f) 94 | 95 | # ports originally set as 0 have been replaced 96 | for port in ("shell", "hb", "iopub", "stdin"): 97 | key = f"{port}_port" 98 | # We initially had the port as 0 99 | assert initial_connection_info[key] == 0 100 | # the port is not 0 now 101 | assert new_connection_info[key] > 0 102 | # the port matches the port the kernel actually used 103 | assert new_connection_info[key] == getattr(app, key), f"{key}" 104 | del new_connection_info[key] 105 | del initial_connection_info[key] 106 | 107 | # The wildcard ip address was also replaced 108 | assert new_connection_info["ip"] != "*" 109 | del new_connection_info["ip"] 110 | del initial_connection_info["ip"] 111 | 112 | # everything else in the connection file is the same 113 | assert initial_connection_info == new_connection_info 114 | 115 | app.close() 116 | os.remove(cf) 117 | 118 | 119 | @pytest.mark.skip("Something wrong with CI") 120 | @pytest.mark.parametrize("anyio_backend", ["trio"]) 121 | async def test_trio_loop(anyio_backend): 122 | import trio 123 | 124 | app = IPKernelApp(trio_loop=True) 125 | 126 | async def trigger_stop(): 127 | await trio.sleep(1) 128 | app.stop() 129 | 130 | app.kernel = MockKernel() 131 | app.init_sockets() 132 | async with trio.open_nursery() as nursery: 133 | nursery.start_soon(lambda: app._start("trio")) 134 | nursery.start_soon(trigger_stop) 135 | app.cleanup_connection_file() 136 | app.kernel.destroy() 137 | app.close() 138 | -------------------------------------------------------------------------------- /tests/test_kernelspec.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) IPython Development Team. 2 | # Distributed under the terms of the Modified BSD License. 3 | 4 | import json 5 | import os 6 | import platform 7 | import shutil 8 | import sys 9 | import tempfile 10 | from unittest import mock 11 | 12 | import pytest 13 | from jupyter_core.paths import jupyter_data_dir 14 | 15 | from ipykernel.kernelspec import ( 16 | KERNEL_NAME, 17 | RESOURCES, 18 | InstallIPythonKernelSpecApp, 19 | get_kernel_dict, 20 | install, 21 | make_ipkernel_cmd, 22 | write_kernel_spec, 23 | ) 24 | 25 | pjoin = os.path.join 26 | is_cpython = platform.python_implementation() == "CPython" 27 | 28 | 29 | def test_make_ipkernel_cmd(): 30 | cmd = make_ipkernel_cmd() 31 | assert cmd == [sys.executable, "-m", "ipykernel_launcher", "-f", "{connection_file}"] 32 | 33 | 34 | def assert_kernel_dict(d): 35 | assert d["argv"] == make_ipkernel_cmd() 36 | assert d["display_name"] == "Python %i (ipykernel)" % sys.version_info[0] 37 | assert d["language"] == "python" 38 | 39 | 40 | def test_get_kernel_dict(): 41 | d = get_kernel_dict() 42 | assert_kernel_dict(d) 43 | 44 | 45 | def assert_kernel_dict_with_profile(d): 46 | assert d["argv"] == make_ipkernel_cmd(extra_arguments=["--profile", "test"]) 47 | assert d["display_name"] == "Python %i (ipykernel)" % sys.version_info[0] 48 | assert d["language"] == "python" 49 | 50 | 51 | def test_get_kernel_dict_with_profile(): 52 | d = get_kernel_dict(["--profile", "test"]) 53 | assert_kernel_dict_with_profile(d) 54 | 55 | 56 | def assert_is_spec(path): 57 | for fname in os.listdir(RESOURCES): 58 | dst = pjoin(path, fname) 59 | assert os.path.exists(dst) 60 | kernel_json = pjoin(path, "kernel.json") 61 | assert os.path.exists(kernel_json) 62 | with open(kernel_json, encoding="utf8") as f: 63 | json.load(f) 64 | 65 | 66 | def test_write_kernel_spec(): 67 | path = write_kernel_spec() 68 | assert_is_spec(path) 69 | shutil.rmtree(path) 70 | 71 | 72 | def test_write_kernel_spec_path(): 73 | path = os.path.join(tempfile.mkdtemp(), KERNEL_NAME) 74 | path2 = write_kernel_spec(path) 75 | assert path == path2 76 | assert_is_spec(path) 77 | shutil.rmtree(path) 78 | 79 | 80 | def test_install_kernelspec(): 81 | path = tempfile.mkdtemp() 82 | try: 83 | InstallIPythonKernelSpecApp.launch_instance(argv=["--prefix", path]) 84 | assert_is_spec(os.path.join(path, "share", "jupyter", "kernels", KERNEL_NAME)) 85 | finally: 86 | shutil.rmtree(path) 87 | 88 | 89 | def test_install_user(): 90 | tmp = tempfile.mkdtemp() 91 | 92 | with mock.patch.dict(os.environ, {"HOME": tmp}): 93 | install(user=True) 94 | data_dir = jupyter_data_dir() 95 | 96 | assert_is_spec(os.path.join(data_dir, "kernels", KERNEL_NAME)) 97 | 98 | 99 | def test_install(): 100 | system_jupyter_dir = tempfile.mkdtemp() 101 | 102 | with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]): 103 | install() 104 | 105 | assert_is_spec(os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME)) 106 | 107 | 108 | def test_install_profile(): 109 | system_jupyter_dir = tempfile.mkdtemp() 110 | 111 | with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]): 112 | install(profile="Test") 113 | 114 | spec_file = os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME, "kernel.json") 115 | with open(spec_file) as f: 116 | spec = json.load(f) 117 | assert spec["display_name"].endswith(" [profile=Test]") 118 | assert spec["argv"][-2:] == ["--profile", "Test"] 119 | 120 | 121 | def test_install_display_name_overrides_profile(): 122 | system_jupyter_dir = tempfile.mkdtemp() 123 | 124 | with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]): 125 | install(display_name="Display", profile="Test") 126 | 127 | spec_file = os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME, "kernel.json") 128 | with open(spec_file) as f: 129 | spec = json.load(f) 130 | assert spec["display_name"] == "Display" 131 | 132 | 133 | @pytest.mark.parametrize("env", [None, dict(spam="spam"), dict(spam="spam", foo="bar")]) 134 | def test_install_env(tmp_path, env): 135 | # python 3.5 // tmp_path must be converted to str 136 | with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [str(tmp_path)]): 137 | install(env=env) 138 | 139 | spec = tmp_path / "kernels" / KERNEL_NAME / "kernel.json" 140 | with spec.open() as f: 141 | spec = json.load(f) 142 | 143 | if env: 144 | assert len(env) == len(spec["env"]) 145 | for k, v in env.items(): 146 | assert spec["env"][k] == v 147 | else: 148 | assert "env" not in spec 149 | 150 | 151 | @pytest.mark.skipif(sys.version_info < (3, 11) or not is_cpython, reason="requires cPython 3.11") 152 | def test_install_frozen_modules_on(): 153 | system_jupyter_dir = tempfile.mkdtemp() 154 | 155 | with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]): 156 | install(frozen_modules=True) 157 | 158 | spec_file = os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME, "kernel.json") 159 | with open(spec_file) as f: 160 | spec = json.load(f) 161 | assert spec["env"]["PYDEVD_DISABLE_FILE_VALIDATION"] == "1" 162 | assert "-Xfrozen_modules=off" not in spec["argv"] 163 | 164 | 165 | @pytest.mark.skipif(sys.version_info < (3, 11) or not is_cpython, reason="requires cPython 3.11") 166 | def test_install_frozen_modules_off(): 167 | system_jupyter_dir = tempfile.mkdtemp() 168 | 169 | with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]): 170 | install(frozen_modules=False) 171 | 172 | spec_file = os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME, "kernel.json") 173 | with open(spec_file) as f: 174 | spec = json.load(f) 175 | assert "env" not in spec 176 | assert spec["argv"][1] == "-Xfrozen_modules=off" 177 | 178 | 179 | @pytest.mark.skipif( 180 | sys.version_info >= (3, 11) or is_cpython, 181 | reason="checks versions older than 3.11 and other Python implementations", 182 | ) 183 | def test_install_frozen_modules_no_op(): 184 | # ensure we do not add add Xfrozen_modules on older Python versions 185 | # (although cPython does not error out on unknown X options as of 3.8) 186 | system_jupyter_dir = tempfile.mkdtemp() 187 | 188 | with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]): 189 | install(frozen_modules=False) 190 | 191 | spec_file = os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME, "kernel.json") 192 | with open(spec_file) as f: 193 | spec = json.load(f) 194 | assert "-Xfrozen_modules=off" not in spec["argv"] 195 | -------------------------------------------------------------------------------- /tests/test_parentpoller.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import warnings 4 | from unittest import mock 5 | 6 | import pytest 7 | 8 | from ipykernel.parentpoller import ParentPollerUnix, ParentPollerWindows 9 | 10 | 11 | @pytest.mark.skipif(os.name == "nt", reason="only works on posix") 12 | def test_parent_poller_unix_to_pid1(): 13 | poller = ParentPollerUnix() 14 | with mock.patch("os.getppid", lambda: 1): # noqa: PT008 15 | 16 | def exit_mock(*args): 17 | sys.exit(1) 18 | 19 | with mock.patch("os._exit", exit_mock), pytest.raises(SystemExit): 20 | poller.run() 21 | 22 | def mock_getppid(): 23 | msg = "hi" 24 | raise ValueError(msg) 25 | 26 | with mock.patch("os.getppid", mock_getppid), pytest.raises(ValueError): # noqa: PT011 27 | poller.run() 28 | 29 | 30 | @pytest.mark.skipif(os.name == "nt", reason="only works on posix") 31 | def test_parent_poller_unix_reparent_not_pid1(): 32 | parent_pid = 221 33 | parent_pids = iter([parent_pid, parent_pid - 1]) 34 | 35 | poller = ParentPollerUnix(parent_pid=parent_pid) 36 | 37 | with mock.patch("os.getppid", lambda: next(parent_pids)): # noqa: PT008 38 | 39 | def exit_mock(*args): 40 | sys.exit(1) 41 | 42 | with mock.patch("os._exit", exit_mock), pytest.raises(SystemExit): 43 | poller.run() 44 | 45 | 46 | @pytest.mark.skipif(os.name != "nt", reason="only works on windows") 47 | def test_parent_poller_windows(): 48 | poller = ParentPollerWindows(interrupt_handle=1) 49 | 50 | def mock_wait(*args, **kwargs): 51 | return -1 52 | 53 | with mock.patch("ctypes.windll.kernel32.WaitForMultipleObjects", mock_wait): # noqa 54 | with warnings.catch_warnings(): 55 | warnings.simplefilter("ignore") 56 | poller.run() 57 | -------------------------------------------------------------------------------- /tests/test_start_kernel.py: -------------------------------------------------------------------------------- 1 | import os 2 | from textwrap import dedent 3 | 4 | import pytest 5 | from flaky import flaky 6 | 7 | from .test_embed_kernel import setup_kernel 8 | 9 | TIMEOUT = 15 10 | 11 | if os.name == "nt": 12 | pytest.skip("skipping tests on windows", allow_module_level=True) 13 | 14 | 15 | @flaky(max_runs=3) 16 | def test_ipython_start_kernel_userns(): 17 | import IPython 18 | 19 | if IPython.version_info > (9, 0): # noqa:SIM108 20 | EXPECTED = "IPythonMainModule" 21 | else: 22 | # not this since https://github.com/ipython/ipython/pull/14754 23 | EXPECTED = "DummyMod" 24 | 25 | cmd = dedent( 26 | """ 27 | from ipykernel.kernelapp import launch_new_instance 28 | ns = {"custom": 123} 29 | launch_new_instance(user_ns=ns) 30 | """ 31 | ) 32 | 33 | with setup_kernel(cmd) as client: 34 | client.inspect("custom") 35 | while True: 36 | msg = client.get_shell_msg(timeout=TIMEOUT) 37 | if msg["msg_type"] == "inspect_reply": 38 | break 39 | content = msg["content"] 40 | assert content["found"] 41 | text = content["data"]["text/plain"] 42 | assert "123" in text 43 | 44 | # user_module should be an instance of DummyMod 45 | client.execute("usermod = get_ipython().user_module") 46 | msg = client.get_shell_msg(timeout=TIMEOUT) 47 | content = msg["content"] 48 | assert content["status"] == "ok" 49 | client.inspect("usermod") 50 | while True: 51 | msg = client.get_shell_msg(timeout=TIMEOUT) 52 | if msg["msg_type"] == "inspect_reply": 53 | break 54 | content = msg["content"] 55 | assert content["found"] 56 | text = content["data"]["text/plain"] 57 | assert EXPECTED in text 58 | 59 | 60 | @flaky(max_runs=3) 61 | def test_ipython_start_kernel_no_userns(): 62 | # Issue #4188 - user_ns should be passed to shell as None, not {} 63 | cmd = dedent( 64 | """ 65 | from ipykernel.kernelapp import launch_new_instance 66 | launch_new_instance() 67 | """ 68 | ) 69 | 70 | with setup_kernel(cmd) as client: 71 | # user_module should not be an instance of DummyMod 72 | client.execute("usermod = get_ipython().user_module") 73 | msg = client.get_shell_msg(timeout=TIMEOUT) 74 | content = msg["content"] 75 | assert content["status"] == "ok" 76 | client.inspect("usermod") 77 | while True: 78 | msg = client.get_shell_msg(timeout=TIMEOUT) 79 | if msg["msg_type"] == "inspect_reply": 80 | break 81 | content = msg["content"] 82 | assert content["found"] 83 | text = content["data"]["text/plain"] 84 | assert "DummyMod" not in text 85 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | """utilities for testing IPython kernels""" 2 | 3 | # Copyright (c) IPython Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | from __future__ import annotations 6 | 7 | import atexit 8 | import os 9 | import sys 10 | from contextlib import contextmanager 11 | from queue import Empty 12 | from subprocess import STDOUT 13 | from tempfile import TemporaryDirectory 14 | from time import time 15 | 16 | from jupyter_client import manager 17 | from jupyter_client.blocking.client import BlockingKernelClient 18 | 19 | STARTUP_TIMEOUT = 60 20 | TIMEOUT = 100 21 | 22 | KM: manager.KernelManager = None # type:ignore 23 | KC: BlockingKernelClient = None # type:ignore 24 | 25 | 26 | def start_new_kernel(**kwargs): 27 | """start a new kernel, and return its Manager and Client 28 | 29 | Integrates with our output capturing for tests. 30 | """ 31 | kwargs["stderr"] = STDOUT 32 | return manager.start_new_kernel(startup_timeout=STARTUP_TIMEOUT, **kwargs) 33 | 34 | 35 | def flush_channels(kc=None): 36 | """flush any messages waiting on the queue""" 37 | from .test_message_spec import validate_message 38 | 39 | if kc is None: 40 | kc = KC 41 | for get_msg in (kc.get_shell_msg, kc.get_iopub_msg): 42 | while True: 43 | try: 44 | msg = get_msg(timeout=0.1) 45 | except Empty: 46 | break 47 | else: 48 | validate_message(msg) 49 | 50 | 51 | def get_reply(kc, msg_id, timeout=TIMEOUT, channel="shell"): 52 | t0 = time() 53 | while True: 54 | get_msg = getattr(kc, f"get_{channel}_msg") 55 | reply = get_msg(timeout=timeout) 56 | if reply["parent_header"]["msg_id"] == msg_id: 57 | break 58 | # Allow debugging ignored replies 59 | print(f"Ignoring reply not to {msg_id}: {reply}") 60 | t1 = time() 61 | timeout -= t1 - t0 62 | t0 = t1 63 | return reply 64 | 65 | 66 | def get_replies(kc, msg_ids: list[str], timeout=TIMEOUT, channel="shell"): 67 | # Get replies which may arrive in any order as they may be running on different subshells. 68 | # Replies are returned in the same order as the msg_ids, not in the order of arrival. 69 | t0 = time() 70 | count = 0 71 | replies = [None] * len(msg_ids) 72 | while count < len(msg_ids): 73 | get_msg = getattr(kc, f"get_{channel}_msg") 74 | reply = get_msg(timeout=timeout) 75 | try: 76 | msg_id = reply["parent_header"]["msg_id"] 77 | replies[msg_ids.index(msg_id)] = reply 78 | count += 1 79 | except ValueError: 80 | # Allow debugging ignored replies 81 | print(f"Ignoring reply not to any of {msg_ids}: {reply}") 82 | t1 = time() 83 | timeout -= t1 - t0 84 | t0 = t1 85 | return replies 86 | 87 | 88 | def execute(code="", kc=None, **kwargs): 89 | """wrapper for doing common steps for validating an execution request""" 90 | from .test_message_spec import validate_message 91 | 92 | if kc is None: 93 | kc = KC 94 | msg_id = kc.execute(code=code, **kwargs) 95 | reply = get_reply(kc, msg_id, TIMEOUT) 96 | validate_message(reply, "execute_reply", msg_id) 97 | busy = kc.get_iopub_msg(timeout=TIMEOUT) 98 | validate_message(busy, "status", msg_id) 99 | assert busy["content"]["execution_state"] == "busy" 100 | 101 | if not kwargs.get("silent"): 102 | execute_input = kc.get_iopub_msg(timeout=TIMEOUT) 103 | validate_message(execute_input, "execute_input", msg_id) 104 | assert execute_input["content"]["code"] == code 105 | 106 | # show tracebacks if present for debugging 107 | if reply["content"].get("traceback"): 108 | print("\n".join(reply["content"]["traceback"]), file=sys.stderr) 109 | 110 | return msg_id, reply["content"] 111 | 112 | 113 | def start_global_kernel(): 114 | """start the global kernel (if it isn't running) and return its client""" 115 | global KM, KC 116 | if KM is None: 117 | KM, KC = start_new_kernel() 118 | atexit.register(stop_global_kernel) 119 | else: 120 | flush_channels(KC) 121 | return KC 122 | 123 | 124 | @contextmanager 125 | def kernel(): 126 | """Context manager for the global kernel instance 127 | 128 | Should be used for most kernel tests 129 | 130 | Returns 131 | ------- 132 | kernel_client: connected KernelClient instance 133 | """ 134 | yield start_global_kernel() 135 | 136 | 137 | def uses_kernel(test_f): 138 | """Decorator for tests that use the global kernel""" 139 | 140 | def wrapped_test(): 141 | with kernel() as kc: 142 | test_f(kc) 143 | 144 | wrapped_test.__doc__ = test_f.__doc__ 145 | wrapped_test.__name__ = test_f.__name__ 146 | return wrapped_test 147 | 148 | 149 | def stop_global_kernel(): 150 | """Stop the global shared kernel instance, if it exists""" 151 | global KM, KC 152 | KC.stop_channels() 153 | KC = None # type:ignore 154 | if KM is None: 155 | return 156 | KM.shutdown_kernel(now=True) 157 | KM = None # type:ignore 158 | 159 | 160 | def new_kernel(argv=None): 161 | """Context manager for a new kernel in a subprocess 162 | 163 | Should only be used for tests where the kernel must not be reused. 164 | 165 | Returns 166 | ------- 167 | kernel_client: connected KernelClient instance 168 | """ 169 | kwargs = {"stderr": STDOUT} 170 | if argv is not None: 171 | kwargs["extra_arguments"] = argv 172 | return manager.run_kernel(**kwargs) 173 | 174 | 175 | def assemble_output(get_msg): 176 | """assemble stdout/err from an execution""" 177 | stdout = "" 178 | stderr = "" 179 | while True: 180 | msg = get_msg(timeout=1) 181 | msg_type = msg["msg_type"] 182 | content = msg["content"] 183 | if msg_type == "status" and content["execution_state"] == "idle": 184 | # idle message signals end of output 185 | break 186 | elif msg["msg_type"] == "stream": 187 | if content["name"] == "stdout": 188 | stdout += content["text"] 189 | elif content["name"] == "stderr": 190 | stderr += content["text"] 191 | else: 192 | raise KeyError("bad stream: %r" % content["name"]) 193 | else: 194 | # other output, ignored 195 | pass 196 | return stdout, stderr 197 | 198 | 199 | def wait_for_idle(kc): 200 | while True: 201 | msg = kc.get_iopub_msg(timeout=1) 202 | msg_type = msg["msg_type"] 203 | content = msg["content"] 204 | if msg_type == "status" and content["execution_state"] == "idle": 205 | break 206 | 207 | 208 | class TemporaryWorkingDirectory(TemporaryDirectory): 209 | """ 210 | Creates a temporary directory and sets the cwd to that directory. 211 | Automatically reverts to previous cwd upon cleanup. 212 | Usage example: 213 | 214 | with TemporaryWorkingDirectory() as tmpdir: 215 | ... 216 | """ 217 | 218 | def __enter__(self): 219 | self.old_wd = os.getcwd() 220 | os.chdir(self.name) 221 | return super().__enter__() 222 | 223 | def __exit__(self, exc, value, tb): 224 | os.chdir(self.old_wd) 225 | return super().__exit__(exc, value, tb) 226 | --------------------------------------------------------------------------------