├── ipykernel
├── py.typed
├── pylab
│ ├── __init__.py
│ ├── config.py
│ └── backend_inline.py
├── resources
│ ├── logo-32x32.png
│ └── logo-64x64.png
├── comm
│ ├── __init__.py
│ ├── manager.py
│ └── comm.py
├── __main__.py
├── __init__.py
├── inprocess
│ ├── __init__.py
│ ├── constants.py
│ ├── socket.py
│ ├── manager.py
│ ├── channels.py
│ ├── blocking.py
│ └── ipkernel.py
├── control.py
├── gui
│ ├── __init__.py
│ ├── gtkembed.py
│ └── gtk3embed.py
├── _version.py
├── thread.py
├── log.py
├── subshell.py
├── shellchannel.py
├── socket_pair.py
├── embed.py
├── utils.py
├── trio_runner.py
├── compiler.py
├── displayhook.py
├── connect.py
├── heartbeat.py
├── _eventloop_macos.py
├── jsonutil.py
└── parentpoller.py
├── tests
├── inprocess
│ ├── __init__.py
│ ├── test_kernelmanager.py
│ └── test_kernel.py
├── __init__.py
├── test_parentpoller.py
├── test_async.py
├── test_heartbeat.py
├── test_matplotlib_eventloops.py
├── test_comm.py
├── test_start_kernel.py
├── test_jsonutil.py
├── test_kernelapp.py
├── test_eventloop.py
├── test_connect.py
├── conftest.py
├── test_kernel_direct.py
├── test_kernelspec.py
├── test_embed_kernel.py
└── utils.py
├── docs
├── api
│ ├── modules.rst
│ ├── ipykernel.comm.rst
│ ├── ipykernel.inprocess.rst
│ └── ipykernel.rst
└── index.rst
├── .git-blame-ignore-revs
├── .readthedocs.yaml
├── .github
├── dependabot.yml
└── workflows
│ ├── enforce-label.yml
│ ├── nightly.yml
│ ├── publish-changelog.yml
│ ├── prep-release.yml
│ ├── publish-release.yml
│ ├── downstream.yml
│ └── ci.yml
├── .gitignore
├── ipykernel_launcher.py
├── RELEASE.md
├── hatch_build.py
├── examples
└── embedding
│ ├── inprocess_qtconsole.py
│ ├── inprocess_terminal.py
│ ├── internal_ipkernel.py
│ ├── ipkernel_qtapp.py
│ └── ipkernel_wxapp.py
├── LICENSE
├── README.md
├── .pre-commit-config.yaml
└── CONTRIBUTING.md
/ipykernel/py.typed:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/ipykernel/pylab/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/inprocess/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/api/modules.rst:
--------------------------------------------------------------------------------
1 | ipykernel
2 | =========
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | ipykernel
8 |
--------------------------------------------------------------------------------
/ipykernel/resources/logo-32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ipython/ipykernel/main/ipykernel/resources/logo-32x32.png
--------------------------------------------------------------------------------
/ipykernel/resources/logo-64x64.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ipython/ipykernel/main/ipykernel/resources/logo-64x64.png
--------------------------------------------------------------------------------
/ipykernel/comm/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["Comm", "CommManager"]
2 |
3 | from .comm import Comm
4 | from .manager import CommManager
5 |
--------------------------------------------------------------------------------
/.git-blame-ignore-revs:
--------------------------------------------------------------------------------
1 | # Black formatting: https://github.com/ipython/ipykernel/pull/892
2 | c5bca730f82bbdfb005ab93969ff5a1d028c2341
3 |
--------------------------------------------------------------------------------
/ipykernel/__main__.py:
--------------------------------------------------------------------------------
1 | """The cli entry point for ipykernel."""
2 |
3 | if __name__ == "__main__":
4 | from ipykernel import kernelapp as app
5 |
6 | app.launch_new_instance()
7 |
--------------------------------------------------------------------------------
/ipykernel/__init__.py:
--------------------------------------------------------------------------------
1 | from ._version import (
2 | __version__,
3 | kernel_protocol_version,
4 | kernel_protocol_version_info,
5 | version_info,
6 | )
7 | from .connect import * # noqa: F403
8 |
--------------------------------------------------------------------------------
/ipykernel/inprocess/__init__.py:
--------------------------------------------------------------------------------
1 | from .blocking import BlockingInProcessKernelClient
2 | from .channels import InProcessChannel, InProcessHBChannel
3 | from .client import InProcessKernelClient
4 | from .manager import InProcessKernelManager
5 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | build:
4 | os: ubuntu-22.04
5 | tools:
6 | python: "3.13"
7 |
8 | sphinx:
9 | configuration: docs/conf.py
10 |
11 | python:
12 | install:
13 | # install itself with pip install .
14 | - method: pip
15 | path: .
16 | extra_requirements:
17 | - docs
18 |
--------------------------------------------------------------------------------
/ipykernel/inprocess/constants.py:
--------------------------------------------------------------------------------
1 | """Shared constants."""
2 |
3 | # Because inprocess communication is not networked, we can use a common Session
4 | # key everywhere. This is not just the empty bytestring to avoid tripping
5 | # certain security checks in the rest of Jupyter that assumes that empty keys
6 | # are insecure.
7 | INPROCESS_KEY = b"inprocess"
8 |
--------------------------------------------------------------------------------
/ipykernel/control.py:
--------------------------------------------------------------------------------
1 | """A thread for a control channel."""
2 |
3 | from .thread import CONTROL_THREAD_NAME, BaseThread
4 |
5 |
6 | class ControlThread(BaseThread):
7 | """A thread for a control channel."""
8 |
9 | def __init__(self, **kwargs):
10 | """Initialize the thread."""
11 | super().__init__(name=CONTROL_THREAD_NAME, **kwargs)
12 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "github-actions"
4 | directory: "/"
5 | schedule:
6 | interval: "weekly"
7 | groups:
8 | actions:
9 | patterns:
10 | - "*"
11 | - package-ecosystem: "pip"
12 | directory: "/"
13 | schedule:
14 | interval: "weekly"
15 | groups:
16 | actions:
17 | patterns:
18 | - "*"
19 |
--------------------------------------------------------------------------------
/ipykernel/pylab/config.py:
--------------------------------------------------------------------------------
1 | """Configurable for configuring the IPython inline backend
2 |
3 | This module does not import anything from matplotlib.
4 | """
5 |
6 | import warnings
7 |
8 | from matplotlib_inline.config import * # noqa: F403 # analysis: ignore
9 |
10 | warnings.warn(
11 | "`ipykernel.pylab.config` is deprecated, directly use `matplotlib_inline.config`",
12 | DeprecationWarning,
13 | stacklevel=2,
14 | )
15 |
--------------------------------------------------------------------------------
/.github/workflows/enforce-label.yml:
--------------------------------------------------------------------------------
1 | name: Enforce PR label
2 |
3 | concurrency:
4 | group: label-${{ github.ref }}
5 | cancel-in-progress: true
6 |
7 | on:
8 | pull_request:
9 | types: [labeled, unlabeled, opened, edited, synchronize]
10 | jobs:
11 | enforce-label:
12 | runs-on: ubuntu-latest
13 | permissions:
14 | pull-requests: write
15 | steps:
16 | - name: enforce-triage-label
17 | uses: jupyterlab/maintainer-tools/.github/actions/enforce-label@v1
18 |
--------------------------------------------------------------------------------
/docs/api/ipykernel.comm.rst:
--------------------------------------------------------------------------------
1 | ipykernel.comm package
2 | ======================
3 |
4 | Submodules
5 | ----------
6 |
7 |
8 | .. automodule:: ipykernel.comm.comm
9 | :members:
10 | :undoc-members:
11 | :show-inheritance:
12 |
13 |
14 | .. automodule:: ipykernel.comm.manager
15 | :members:
16 | :undoc-members:
17 | :show-inheritance:
18 |
19 | Module contents
20 | ---------------
21 |
22 | .. automodule:: ipykernel.comm
23 | :members:
24 | :undoc-members:
25 | :show-inheritance:
26 |
--------------------------------------------------------------------------------
/ipykernel/pylab/backend_inline.py:
--------------------------------------------------------------------------------
1 | """A matplotlib backend for publishing figures via display_data"""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 |
6 | import warnings
7 |
8 | from matplotlib_inline.backend_inline import * # noqa: F403 # analysis: ignore
9 |
10 | warnings.warn(
11 | "`ipykernel.pylab.backend_inline` is deprecated, directly "
12 | "use `matplotlib_inline.backend_inline`",
13 | DeprecationWarning,
14 | stacklevel=2,
15 | )
16 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. _index:
2 |
3 | IPython Kernel Docs
4 | ===================
5 |
6 | This contains minimal version-sensitive documentation for the IPython kernel package.
7 | Most IPython kernel documentation is in the `IPython documentation `_.
8 |
9 | Contents:
10 |
11 | .. toctree::
12 | :maxdepth: 1
13 |
14 | changelog
15 | API docs
16 |
17 |
18 | Indices and tables
19 | ==================
20 |
21 | * :ref:`genindex`
22 | * :ref:`modindex`
23 | * :ref:`search`
24 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | MANIFEST
2 | build
3 | cover
4 | dist
5 | _build
6 | docs/man/*.gz
7 | docs/source/api/generated
8 | docs/source/config/options
9 | docs/source/interactive/magics-generated.txt
10 | docs/gh-pages
11 | IPython/html/notebook/static/mathjax
12 | IPython/html/static/style/*.map
13 | *.py[co]
14 | __pycache__
15 | *.egg-info
16 | *~
17 | *.bak
18 | .ipynb_checkpoints
19 | .tox
20 | .DS_Store
21 | \#*#
22 | .#*
23 | .coverage
24 | .cache
25 |
26 | data_kernelspec
27 | .pytest_cache
28 |
29 | # copied changelog file
30 | docs/changelog.md
31 |
--------------------------------------------------------------------------------
/ipykernel_launcher.py:
--------------------------------------------------------------------------------
1 | """Entry point for launching an IPython kernel.
2 |
3 | This is separate from the ipykernel package so we can avoid doing imports until
4 | after removing the cwd from sys.path.
5 | """
6 |
7 | import sys
8 | from pathlib import Path
9 |
10 | if __name__ == "__main__":
11 | # Remove the CWD from sys.path while we load stuff.
12 | # This is added back by InteractiveShellApp.init_path()
13 | if sys.path[0] == "" or Path(sys.path[0]) == Path.cwd():
14 | del sys.path[0]
15 |
16 | from ipykernel import kernelapp as app
17 |
18 | app.launch_new_instance()
19 |
--------------------------------------------------------------------------------
/RELEASE.md:
--------------------------------------------------------------------------------
1 | # Release Guide
2 |
3 | ## Using `jupyter_releaser`
4 |
5 | The recommended way to make a release is to use [`jupyter_releaser`](https://jupyter-releaser.readthedocs.io/en/latest/get_started/making_release_from_repo.html).
6 |
7 | ## Manual Release
8 |
9 | - Update `CHANGELOG`
10 |
11 | - Run the following:
12 |
13 | ```bash
14 | export VERSION=
15 | pip install pipx
16 | pipx run hatch version $VERSION
17 | git commit -a -m "Release $VERSION"
18 | git tag $VERSION; true;
19 | git push --all
20 | git push --tags
21 | rm -rf dist build
22 | pipx run build .
23 | pipx run twine check dist/*
24 | pipx run twine upload dist/*
25 | ```
26 |
--------------------------------------------------------------------------------
/ipykernel/gui/__init__.py:
--------------------------------------------------------------------------------
1 | """GUI support for the IPython ZeroMQ kernel.
2 |
3 | This package contains the various toolkit-dependent utilities we use to enable
4 | coordination between the IPython kernel and the event loops of the various GUI
5 | toolkits.
6 | """
7 |
8 | # -----------------------------------------------------------------------------
9 | # Copyright (C) 2010-2011 The IPython Development Team.
10 | #
11 | # Distributed under the terms of the BSD License.
12 | #
13 | # The full license is in the file LICENSE, distributed as part of this
14 | # software.
15 | # -----------------------------------------------------------------------------
16 |
--------------------------------------------------------------------------------
/ipykernel/_version.py:
--------------------------------------------------------------------------------
1 | """
2 | store the current version info of the server.
3 | """
4 |
5 | import re
6 |
7 | # Version string must appear intact for hatch versioning
8 | __version__ = "7.1.0"
9 |
10 | # Build up version_info tuple for backwards compatibility
11 | pattern = r"(?P\d+).(?P\d+).(?P\d+)(?P.*)"
12 | match = re.match(pattern, __version__)
13 | assert match is not None
14 | parts: list[object] = [int(match[part]) for part in ["major", "minor", "patch"]]
15 | if match["rest"]:
16 | parts.append(match["rest"])
17 | version_info = tuple(parts)
18 |
19 | kernel_protocol_version_info = (5, 3)
20 | kernel_protocol_version = "{}.{}".format(*kernel_protocol_version_info)
21 |
--------------------------------------------------------------------------------
/.github/workflows/nightly.yml:
--------------------------------------------------------------------------------
1 | name: nightly build and upload
2 | on:
3 | workflow_dispatch:
4 | schedule:
5 | - cron: "0 0 * * *"
6 |
7 | defaults:
8 | run:
9 | shell: bash -eux {0}
10 |
11 | jobs:
12 | build:
13 | runs-on: "ubuntu-latest"
14 | strategy:
15 | fail-fast: false
16 | matrix:
17 | python-version: ["3.12"]
18 | steps:
19 | - name: Checkout
20 | uses: actions/checkout@v6
21 |
22 | - name: Base Setup
23 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
24 |
25 | - name: Build
26 | run: |
27 | python -m pip install build
28 | python -m build
29 | - name: Upload wheel
30 | uses: scientific-python/upload-nightly-action@b36e8c0c10dbcfd2e05bf95f17ef8c14fd708dbf # 0.6.2
31 | with:
32 | artifacts_path: dist
33 | anaconda_nightly_upload_token: ${{secrets.UPLOAD_TOKEN}}
34 |
--------------------------------------------------------------------------------
/ipykernel/thread.py:
--------------------------------------------------------------------------------
1 | """Base class for threads."""
2 |
3 | from threading import Thread
4 |
5 | from tornado.ioloop import IOLoop
6 |
7 | CONTROL_THREAD_NAME = "Control"
8 | SHELL_CHANNEL_THREAD_NAME = "Shell channel"
9 |
10 |
11 | class BaseThread(Thread):
12 | """Base class for threads."""
13 |
14 | def __init__(self, **kwargs):
15 | """Initialize the thread."""
16 | super().__init__(**kwargs)
17 | self.io_loop = IOLoop(make_current=False)
18 | self.pydev_do_not_trace = True
19 | self.is_pydev_daemon_thread = True
20 |
21 | def run(self) -> None:
22 | """Run the thread."""
23 | try:
24 | self.io_loop.start()
25 | finally:
26 | self.io_loop.close()
27 |
28 | def stop(self) -> None:
29 | """Stop the thread.
30 |
31 | This method is threadsafe.
32 | """
33 | self.io_loop.add_callback(self.io_loop.stop)
34 |
--------------------------------------------------------------------------------
/ipykernel/log.py:
--------------------------------------------------------------------------------
1 | """A PUB log handler."""
2 |
3 | import warnings
4 |
5 | from zmq.log.handlers import PUBHandler
6 |
7 | warnings.warn(
8 | "ipykernel.log is deprecated since ipykernel 4.3.0 (2016). It has moved to ipyparallel.engine.log",
9 | DeprecationWarning,
10 | stacklevel=2,
11 | )
12 |
13 |
14 | class EnginePUBHandler(PUBHandler):
15 | """A simple PUBHandler subclass that sets root_topic"""
16 |
17 | engine = None
18 |
19 | def __init__(self, engine, *args, **kwargs):
20 | """Initialize the handler."""
21 | PUBHandler.__init__(self, *args, **kwargs)
22 | self.engine = engine
23 |
24 | @property # type:ignore[misc]
25 | def root_topic(self):
26 | """this is a property, in case the handler is created
27 | before the engine gets registered with an id"""
28 | if isinstance(getattr(self.engine, "id", None), int):
29 | return "engine.%i" % self.engine.id # type:ignore[union-attr]
30 | return "engine"
31 |
--------------------------------------------------------------------------------
/.github/workflows/publish-changelog.yml:
--------------------------------------------------------------------------------
1 | name: "Publish Changelog"
2 | on:
3 | release:
4 | types: [published]
5 |
6 | workflow_dispatch:
7 | inputs:
8 | branch:
9 | description: "The branch to target"
10 | required: false
11 |
12 | jobs:
13 | publish_changelog:
14 | runs-on: ubuntu-latest
15 | environment: release
16 | steps:
17 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
18 |
19 | - uses: actions/create-github-app-token@v2
20 | id: app-token
21 | with:
22 | app-id: ${{ vars.APP_ID }}
23 | private-key: ${{ secrets.APP_PRIVATE_KEY }}
24 |
25 | - name: Publish changelog
26 | id: publish-changelog
27 | uses: jupyter-server/jupyter_releaser/.github/actions/publish-changelog@v2
28 | with:
29 | token: ${{ steps.app-token.outputs.token }}
30 | branch: ${{ github.event.inputs.branch }}
31 |
32 | - name: "** Next Step **"
33 | run: |
34 | echo "Merge the changelog update PR: ${{ steps.publish-changelog.outputs.pr_url }}"
35 |
--------------------------------------------------------------------------------
/docs/api/ipykernel.inprocess.rst:
--------------------------------------------------------------------------------
1 | ipykernel.inprocess package
2 | ===========================
3 |
4 | Submodules
5 | ----------
6 |
7 |
8 | .. automodule:: ipykernel.inprocess.blocking
9 | :members:
10 | :undoc-members:
11 | :show-inheritance:
12 |
13 |
14 | .. automodule:: ipykernel.inprocess.channels
15 | :members:
16 | :undoc-members:
17 | :show-inheritance:
18 |
19 |
20 | .. automodule:: ipykernel.inprocess.client
21 | :members:
22 | :undoc-members:
23 | :show-inheritance:
24 |
25 |
26 | .. automodule:: ipykernel.inprocess.constants
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 |
32 | .. automodule:: ipykernel.inprocess.ipkernel
33 | :members:
34 | :undoc-members:
35 | :show-inheritance:
36 |
37 |
38 | .. automodule:: ipykernel.inprocess.manager
39 | :members:
40 | :undoc-members:
41 | :show-inheritance:
42 |
43 |
44 | .. automodule:: ipykernel.inprocess.socket
45 | :members:
46 | :undoc-members:
47 | :show-inheritance:
48 |
49 | Module contents
50 | ---------------
51 |
52 | .. automodule:: ipykernel.inprocess
53 | :members:
54 | :undoc-members:
55 | :show-inheritance:
56 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) IPython Development Team.
2 | # Distributed under the terms of the Modified BSD License.
3 |
4 | import os
5 | import shutil
6 | import sys
7 | import tempfile
8 | from unittest.mock import patch
9 |
10 | import pytest
11 |
12 | from ipykernel.kernelspec import install
13 |
14 | pjoin = os.path.join
15 |
16 | tmp = None
17 | patchers: list = []
18 |
19 |
20 | @pytest.fixture(autouse=True)
21 | def _global_setup():
22 | """setup temporary env for tests"""
23 | global tmp
24 | tmp = tempfile.mkdtemp()
25 | patchers[:] = [
26 | patch.dict(
27 | os.environ,
28 | {
29 | "HOME": tmp,
30 | # Let tests work with --user install when HOME is changed:
31 | "PYTHONPATH": os.pathsep.join(sys.path),
32 | },
33 | ),
34 | ]
35 | for p in patchers:
36 | p.start()
37 |
38 | # install IPython in the temp home:
39 | install(user=True)
40 | yield
41 | for p in patchers:
42 | p.stop()
43 |
44 | try:
45 | shutil.rmtree(tmp) # type:ignore
46 | except OSError:
47 | # no such file
48 | pass
49 |
--------------------------------------------------------------------------------
/ipykernel/subshell.py:
--------------------------------------------------------------------------------
1 | """A thread for a subshell."""
2 |
3 | import asyncio
4 | from typing import Any
5 |
6 | import zmq
7 |
8 | from .socket_pair import SocketPair
9 | from .thread import BaseThread
10 |
11 |
12 | class SubshellThread(BaseThread):
13 | """A thread for a subshell.
14 |
15 | .. versionadded:: 7
16 | """
17 |
18 | def __init__(
19 | self,
20 | subshell_id: str,
21 | context: zmq.Context[Any],
22 | **kwargs,
23 | ):
24 | """Initialize the thread."""
25 | super().__init__(name=f"subshell-{subshell_id}", **kwargs)
26 |
27 | self.shell_channel_to_subshell = SocketPair(context, subshell_id)
28 | self.subshell_to_shell_channel = SocketPair(context, subshell_id + "-reverse")
29 |
30 | # When aborting flag is set, execute_request messages to this subshell will be aborted.
31 | self.aborting = False
32 |
33 | self.asyncio_lock = asyncio.Lock()
34 |
35 | def run(self) -> None:
36 | """Run the thread."""
37 | try:
38 | super().run()
39 | finally:
40 | self.shell_channel_to_subshell.close()
41 | self.subshell_to_shell_channel.close()
42 |
--------------------------------------------------------------------------------
/hatch_build.py:
--------------------------------------------------------------------------------
1 | """A custom hatch build hook for ipykernel."""
2 |
3 | import shutil
4 | import sys
5 | from pathlib import Path
6 |
7 | from hatchling.builders.hooks.plugin.interface import BuildHookInterface
8 |
9 |
10 | class CustomHook(BuildHookInterface):
11 | """The IPykernel build hook."""
12 |
13 | def initialize(self, version, build_data):
14 | """Initialize the hook."""
15 | here = Path(__file__).parent.resolve()
16 | sys.path.insert(0, str(here))
17 | from ipykernel.kernelspec import make_ipkernel_cmd, write_kernel_spec
18 |
19 | overrides = {}
20 |
21 | # When building a standard wheel, the executable specified in the kernelspec is simply 'python'.
22 | if version == "standard":
23 | overrides["metadata"] = dict(debugger=True)
24 | argv = make_ipkernel_cmd(executable="python")
25 |
26 | # When installing an editable wheel, the full `sys.executable` can be used.
27 | else:
28 | argv = make_ipkernel_cmd()
29 |
30 | overrides["argv"] = argv
31 |
32 | dest = Path(here) / "data_kernelspec"
33 | if Path(dest).exists():
34 | shutil.rmtree(dest)
35 |
36 | write_kernel_spec(dest, overrides=overrides)
37 |
--------------------------------------------------------------------------------
/examples/embedding/inprocess_qtconsole.py:
--------------------------------------------------------------------------------
1 | """An in-process qt console app."""
2 |
3 | import os
4 |
5 | import tornado
6 | from IPython.lib import guisupport
7 | from qtconsole.inprocess import QtInProcessKernelManager
8 | from qtconsole.rich_ipython_widget import RichIPythonWidget
9 |
10 | assert tornado.version_info >= (6, 1)
11 |
12 |
13 | def print_process_id():
14 | """Print the process id."""
15 | print("Process ID is:", os.getpid())
16 |
17 |
18 | def main():
19 | """The main entry point."""
20 | # Print the ID of the main process
21 | print_process_id()
22 |
23 | app = guisupport.get_app_qt4()
24 |
25 | # Create an in-process kernel
26 | # >>> print_process_id()
27 | # will print the same process ID as the main process
28 | kernel_manager = QtInProcessKernelManager()
29 | kernel_manager.start_kernel()
30 | kernel = kernel_manager.kernel
31 | kernel.gui = "qt4"
32 | kernel.shell.push({"foo": 43, "print_process_id": print_process_id})
33 |
34 | kernel_client = kernel_manager.client()
35 | kernel_client.start_channels()
36 |
37 | def stop():
38 | kernel_client.stop_channels()
39 | kernel_manager.shutdown_kernel()
40 | app.exit()
41 |
42 | control = RichIPythonWidget()
43 | control.kernel_manager = kernel_manager
44 | control.kernel_client = kernel_client
45 | control.exit_requested.connect(stop)
46 | control.show()
47 |
48 | guisupport.start_event_loop_qt4(app)
49 |
50 |
51 | if __name__ == "__main__":
52 | main()
53 |
--------------------------------------------------------------------------------
/ipykernel/inprocess/socket.py:
--------------------------------------------------------------------------------
1 | """Defines a dummy socket implementing (part of) the zmq.Socket interface."""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 |
6 | from queue import Queue
7 |
8 | import zmq
9 | from traitlets import HasTraits, Instance, Int
10 |
11 | # -----------------------------------------------------------------------------
12 | # Dummy socket class
13 | # -----------------------------------------------------------------------------
14 |
15 |
16 | class DummySocket(HasTraits):
17 | """A dummy socket implementing (part of) the zmq.Socket interface."""
18 |
19 | queue = Instance(Queue, ())
20 | message_sent = Int(0) # Should be an Event
21 | context = Instance(zmq.Context)
22 |
23 | def _context_default(self):
24 | return zmq.Context()
25 |
26 | # -------------------------------------------------------------------------
27 | # Socket interface
28 | # -------------------------------------------------------------------------
29 |
30 | def recv_multipart(self, flags=0, copy=True, track=False):
31 | """Recv a multipart message."""
32 | return self.queue.get_nowait()
33 |
34 | def send_multipart(self, msg_parts, flags=0, copy=True, track=False):
35 | """Send a multipart message."""
36 | msg_parts = list(map(zmq.Message, msg_parts))
37 | self.queue.put_nowait(msg_parts)
38 | self.message_sent += 1
39 |
40 | def flush(self, timeout=1.0):
41 | """no-op to comply with stream API"""
42 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2015, IPython Development Team
4 |
5 | All rights reserved.
6 |
7 | Redistribution and use in source and binary forms, with or without
8 | modification, are permitted provided that the following conditions are met:
9 |
10 | 1. Redistributions of source code must retain the above copyright notice, this
11 | list of conditions and the following disclaimer.
12 |
13 | 2. Redistributions in binary form must reproduce the above copyright notice,
14 | this list of conditions and the following disclaimer in the documentation
15 | and/or other materials provided with the distribution.
16 |
17 | 3. Neither the name of the copyright holder nor the names of its
18 | contributors may be used to endorse or promote products derived from
19 | this software without specific prior written permission.
20 |
21 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
24 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
25 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
26 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
28 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
29 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 |
--------------------------------------------------------------------------------
/ipykernel/shellchannel.py:
--------------------------------------------------------------------------------
1 | """A thread for a shell channel."""
2 |
3 | from __future__ import annotations
4 |
5 | import asyncio
6 | from threading import current_thread
7 | from typing import Any
8 |
9 | import zmq
10 |
11 | from .subshell_manager import SubshellManager
12 | from .thread import SHELL_CHANNEL_THREAD_NAME, BaseThread
13 |
14 |
15 | class ShellChannelThread(BaseThread):
16 | """A thread for a shell channel.
17 |
18 | Communicates with shell/subshell threads via pairs of ZMQ inproc sockets.
19 | """
20 |
21 | def __init__(
22 | self,
23 | context: zmq.Context[Any],
24 | shell_socket: zmq.Socket[Any],
25 | **kwargs,
26 | ):
27 | """Initialize the thread."""
28 | super().__init__(name=SHELL_CHANNEL_THREAD_NAME, **kwargs)
29 | self._manager: SubshellManager | None = None
30 | self._zmq_context = context # Avoid use of self._context
31 | self._shell_socket = shell_socket
32 | # Record the parent thread - the thread that started the app (usually the main thread)
33 | self.parent_thread = current_thread()
34 |
35 | self.asyncio_lock = asyncio.Lock()
36 |
37 | @property
38 | def manager(self) -> SubshellManager:
39 | # Lazy initialisation.
40 | if self._manager is None:
41 | assert current_thread() == self.parent_thread
42 | self._manager = SubshellManager(
43 | self._zmq_context,
44 | self.io_loop,
45 | self._shell_socket,
46 | )
47 | return self._manager
48 |
49 | def run(self) -> None:
50 | """Run the thread."""
51 | try:
52 | super().run()
53 | finally:
54 | if self._manager:
55 | self._manager.close()
56 |
--------------------------------------------------------------------------------
/tests/test_parentpoller.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import warnings
4 | from unittest import mock
5 |
6 | import pytest
7 |
8 | from ipykernel.parentpoller import ParentPollerUnix, ParentPollerWindows
9 |
10 |
11 | @pytest.mark.skipif(os.name == "nt", reason="only works on posix")
12 | def test_parent_poller_unix_to_pid1():
13 | poller = ParentPollerUnix()
14 | with mock.patch("os.getppid", lambda: 1): # noqa: PT008
15 |
16 | def exit_mock(*args):
17 | sys.exit(1)
18 |
19 | with mock.patch("os._exit", exit_mock), pytest.raises(SystemExit):
20 | poller.run()
21 |
22 | def mock_getppid():
23 | msg = "hi"
24 | raise ValueError(msg)
25 |
26 | with mock.patch("os.getppid", mock_getppid), pytest.raises(ValueError): # noqa: PT011
27 | poller.run()
28 |
29 |
30 | @pytest.mark.skipif(os.name == "nt", reason="only works on posix")
31 | def test_parent_poller_unix_reparent_not_pid1():
32 | parent_pid = 221
33 | parent_pids = iter([parent_pid, parent_pid - 1])
34 |
35 | poller = ParentPollerUnix(parent_pid=parent_pid)
36 |
37 | with mock.patch("os.getppid", lambda: next(parent_pids)): # noqa: PT008
38 |
39 | def exit_mock(*args):
40 | sys.exit(1)
41 |
42 | with mock.patch("os._exit", exit_mock), pytest.raises(SystemExit):
43 | poller.run()
44 |
45 |
46 | @pytest.mark.skipif(os.name != "nt", reason="only works on windows")
47 | def test_parent_poller_windows():
48 | poller = ParentPollerWindows(interrupt_handle=1)
49 |
50 | def mock_wait(*args, **kwargs):
51 | return -1
52 |
53 | with mock.patch("ctypes.windll.kernel32.WaitForMultipleObjects", mock_wait): # noqa
54 | with warnings.catch_warnings():
55 | warnings.simplefilter("ignore")
56 | poller.run()
57 |
--------------------------------------------------------------------------------
/.github/workflows/prep-release.yml:
--------------------------------------------------------------------------------
1 | name: "Step 1: Prep Release"
2 | on:
3 | workflow_dispatch:
4 | inputs:
5 | version_spec:
6 | description: "New Version Specifier"
7 | default: "next"
8 | required: false
9 | branch:
10 | description: "The branch to target"
11 | required: false
12 | post_version_spec:
13 | description: "Post Version Specifier"
14 | required: false
15 | silent:
16 | description: "Set a placeholder in the changelog and don't publish the release."
17 | required: false
18 | type: boolean
19 | since:
20 | description: "Use PRs with activity since this date or git reference"
21 | required: false
22 | since_last_stable:
23 | description: "Use PRs with activity since the last stable git tag"
24 | required: false
25 | type: boolean
26 | jobs:
27 | prep_release:
28 | runs-on: ubuntu-latest
29 | permissions:
30 | contents: write
31 | steps:
32 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
33 |
34 | - name: Prep Release
35 | id: prep-release
36 | uses: jupyter-server/jupyter_releaser/.github/actions/prep-release@v2
37 | with:
38 | token: ${{ secrets.GITHUB_TOKEN }}
39 | version_spec: ${{ github.event.inputs.version_spec }}
40 | silent: ${{ github.event.inputs.silent }}
41 | post_version_spec: ${{ github.event.inputs.post_version_spec }}
42 | target: ${{ github.event.inputs.target }}
43 | branch: ${{ github.event.inputs.branch }}
44 | since: ${{ github.event.inputs.since }}
45 | since_last_stable: ${{ github.event.inputs.since_last_stable }}
46 |
47 | - name: "** Next Step **"
48 | run: |
49 | echo "Optional): Review Draft Release: ${{ steps.prep-release.outputs.release_url }}"
50 |
--------------------------------------------------------------------------------
/ipykernel/socket_pair.py:
--------------------------------------------------------------------------------
1 | """Pair of ZMQ inproc sockets used for communication between threads."""
2 |
3 | from __future__ import annotations
4 |
5 | from typing import Any
6 |
7 | import zmq
8 | from tornado.ioloop import IOLoop
9 | from zmq.eventloop.zmqstream import ZMQStream
10 |
11 |
12 | class SocketPair:
13 | """Pair of ZMQ inproc sockets for one-direction communication between 2 threads.
14 |
15 | One of the threads is always the shell_channel_thread, the other may be the control
16 | thread, main thread or a subshell thread.
17 |
18 | .. versionadded:: 7
19 | """
20 |
21 | from_socket: zmq.Socket[Any]
22 | to_socket: zmq.Socket[Any]
23 | to_stream: ZMQStream | None = None
24 |
25 | def __init__(self, context: zmq.Context[Any], name: str):
26 | """Initialize the inproc socker pair."""
27 | self.from_socket = context.socket(zmq.PAIR)
28 | self.to_socket = context.socket(zmq.PAIR)
29 | address = self._address(name)
30 | self.from_socket.bind(address)
31 | self.to_socket.connect(address) # Or do I need to do this in another thread?
32 |
33 | def close(self):
34 | """Close the inproc socker pair."""
35 | self.from_socket.close()
36 |
37 | if self.to_stream is not None:
38 | self.to_stream.close()
39 | self.to_socket.close()
40 |
41 | def on_recv(self, io_loop: IOLoop, on_recv_callback, copy: bool = False):
42 | """Set the callback used when a message is received on the to stream."""
43 | # io_loop is that of the 'to' thread.
44 | if self.to_stream is None:
45 | self.to_stream = ZMQStream(self.to_socket, io_loop)
46 | self.to_stream.on_recv(on_recv_callback, copy=copy)
47 |
48 | def _address(self, name) -> str:
49 | """Return the address used for this inproc socket pair."""
50 | return f"inproc://subshell{name}"
51 |
--------------------------------------------------------------------------------
/.github/workflows/publish-release.yml:
--------------------------------------------------------------------------------
1 | name: "Step 2: Publish Release"
2 | on:
3 | workflow_dispatch:
4 | inputs:
5 | branch:
6 | description: "The target branch"
7 | required: false
8 | release_url:
9 | description: "The URL of the draft GitHub release"
10 | required: false
11 | steps_to_skip:
12 | description: "Comma separated list of steps to skip"
13 | required: false
14 |
15 | jobs:
16 | publish_release:
17 | runs-on: ubuntu-latest
18 | environment: release
19 | permissions:
20 | id-token: write
21 | steps:
22 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
23 |
24 | - uses: actions/create-github-app-token@v2
25 | id: app-token
26 | with:
27 | app-id: ${{ vars.APP_ID }}
28 | private-key: ${{ secrets.APP_PRIVATE_KEY }}
29 |
30 | - name: Populate Release
31 | id: populate-release
32 | uses: jupyter-server/jupyter_releaser/.github/actions/populate-release@v2
33 | with:
34 | token: ${{ steps.app-token.outputs.token }}
35 | branch: ${{ github.event.inputs.branch }}
36 | release_url: ${{ github.event.inputs.release_url }}
37 | steps_to_skip: ${{ github.event.inputs.steps_to_skip }}
38 |
39 | - name: Finalize Release
40 | id: finalize-release
41 | uses: jupyter-server/jupyter_releaser/.github/actions/finalize-release@v2
42 | with:
43 | token: ${{ steps.app-token.outputs.token }}
44 | release_url: ${{ steps.populate-release.outputs.release_url }}
45 |
46 | - name: "** Next Step **"
47 | if: ${{ success() }}
48 | run: |
49 | echo "Verify the final release"
50 | echo ${{ steps.finalize-release.outputs.release_url }}
51 |
52 | - name: "** Failure Message **"
53 | if: ${{ failure() }}
54 | run: |
55 | echo "Failed to Publish the Draft Release Url:"
56 | echo ${{ steps.populate-release.outputs.release_url }}
57 |
--------------------------------------------------------------------------------
/tests/test_async.py:
--------------------------------------------------------------------------------
1 | """Test async/await integration"""
2 |
3 | import pytest
4 |
5 | from .test_message_spec import validate_message
6 | from .utils import TIMEOUT, execute, flush_channels, start_new_kernel
7 |
8 | KC = KM = None
9 |
10 |
11 | @pytest.fixture(autouse=True)
12 | def _setup_env():
13 | """start the global kernel (if it isn't running) and return its client"""
14 | global KM, KC
15 | KM, KC = start_new_kernel()
16 | flush_channels(KC)
17 | yield
18 | assert KC is not None
19 | assert KM is not None
20 | KC.stop_channels()
21 | KM.shutdown_kernel(now=True)
22 |
23 |
24 | def test_async_await():
25 | flush_channels(KC)
26 | _msg_id, content = execute("import asyncio; await asyncio.sleep(0.1)", KC)
27 | assert content["status"] == "ok", content
28 |
29 |
30 | @pytest.mark.parametrize("asynclib", ["asyncio", "trio"])
31 | def test_async_interrupt(asynclib, request):
32 | assert KC is not None
33 | assert KM is not None
34 | try:
35 | __import__(asynclib)
36 | except ImportError:
37 | pytest.skip("Requires %s" % asynclib)
38 | request.addfinalizer(lambda: execute("%autoawait asyncio", KC))
39 |
40 | flush_channels(KC)
41 | msg_id, content = execute("%autoawait " + asynclib, KC)
42 | assert content["status"] == "ok", content
43 |
44 | flush_channels(KC)
45 | msg_id = KC.execute(f"print('begin'); import {asynclib}; await {asynclib}.sleep(5)")
46 | busy = KC.get_iopub_msg(timeout=TIMEOUT)
47 | validate_message(busy, "status", msg_id)
48 | assert busy["content"]["execution_state"] == "busy"
49 | echo = KC.get_iopub_msg(timeout=TIMEOUT)
50 | validate_message(echo, "execute_input")
51 | stream = KC.get_iopub_msg(timeout=TIMEOUT)
52 | # wait for the stream output to be sure kernel is in the async block
53 | validate_message(stream, "stream")
54 | assert stream["content"]["text"] == "begin\n"
55 |
56 | KM.interrupt_kernel()
57 | reply = KC.get_shell_msg()["content"]
58 | assert reply["status"] == "error", reply
59 | assert reply["ename"] in {"CancelledError", "KeyboardInterrupt"}
60 |
61 | flush_channels(KC)
62 |
--------------------------------------------------------------------------------
/tests/test_heartbeat.py:
--------------------------------------------------------------------------------
1 | """Tests for heartbeat thread"""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 |
6 | import errno
7 | from typing import no_type_check
8 | from unittest.mock import patch
9 |
10 | import pytest
11 | import zmq
12 |
13 | from ipykernel.heartbeat import Heartbeat
14 |
15 |
16 | def test_port_bind_failure_raises():
17 | heart = Heartbeat(None)
18 | with patch.object(heart, "_try_bind_socket") as mock_try_bind:
19 | mock_try_bind.side_effect = zmq.ZMQError(-100, "fails for unknown error types")
20 | with pytest.raises(zmq.ZMQError):
21 | heart._bind_socket()
22 | assert mock_try_bind.call_count == 1
23 |
24 |
25 | def test_port_bind_success():
26 | heart = Heartbeat(None)
27 | with patch.object(heart, "_try_bind_socket") as mock_try_bind:
28 | heart._bind_socket()
29 | assert mock_try_bind.call_count == 1
30 |
31 |
32 | @no_type_check
33 | def test_port_bind_failure_recovery():
34 | try:
35 | errno.WSAEADDRINUSE
36 | except AttributeError:
37 | # Fake windows address in-use code
38 | errno.WSAEADDRINUSE = 12345
39 |
40 | try:
41 | heart = Heartbeat(None)
42 | with patch.object(heart, "_try_bind_socket") as mock_try_bind:
43 | mock_try_bind.side_effect = [
44 | zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind unix"),
45 | zmq.ZMQError(errno.WSAEADDRINUSE, "fails for non-bind windows"),
46 | ] + [0] * 100
47 | # Shouldn't raise anything as retries will kick in
48 | heart._bind_socket()
49 | finally:
50 | # Cleanup fake assignment
51 | if errno.WSAEADDRINUSE == 12345:
52 | del errno.WSAEADDRINUSE
53 |
54 |
55 | def test_port_bind_failure_gives_up_retries():
56 | heart = Heartbeat(None)
57 | with patch.object(heart, "_try_bind_socket") as mock_try_bind:
58 | mock_try_bind.side_effect = zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind")
59 | with pytest.raises(zmq.ZMQError):
60 | heart._bind_socket()
61 | assert mock_try_bind.call_count == 100
62 |
--------------------------------------------------------------------------------
/ipykernel/comm/manager.py:
--------------------------------------------------------------------------------
1 | """Base class to manage comms"""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 |
6 | import logging
7 |
8 | import comm.base_comm
9 | import traitlets
10 | import traitlets.config
11 |
12 | from .comm import Comm
13 |
14 | logger = logging.getLogger("ipykernel.comm")
15 |
16 |
17 | class CommManager(comm.base_comm.CommManager, traitlets.config.LoggingConfigurable): # type:ignore[misc]
18 | """A comm manager."""
19 |
20 | kernel = traitlets.Instance("ipykernel.kernelbase.Kernel")
21 | comms = traitlets.Dict()
22 | targets = traitlets.Dict()
23 |
24 | def __init__(self, **kwargs):
25 | """Initialize the manager."""
26 | # CommManager doesn't take arguments, so we explicitly forward arguments
27 | comm.base_comm.CommManager.__init__(self)
28 | traitlets.config.LoggingConfigurable.__init__(self, **kwargs)
29 |
30 | def comm_open(self, stream, ident, msg):
31 | """Handler for comm_open messages"""
32 | # This is for backward compatibility, the comm_open creates a a new ipykernel.comm.Comm
33 | # but we should let the base class create the comm with comm.create_comm in a major release
34 | content = msg["content"]
35 | comm_id = content["comm_id"]
36 | target_name = content["target_name"]
37 | f = self.targets.get(target_name, None)
38 | comm = Comm(
39 | comm_id=comm_id,
40 | primary=False,
41 | target_name=target_name,
42 | show_warning=False,
43 | )
44 | self.register_comm(comm)
45 | if f is None:
46 | logger.error("No such comm target registered: %s", target_name)
47 | else:
48 | try:
49 | f(comm, msg)
50 | return
51 | except Exception:
52 | logger.error("Exception opening comm with target: %s", target_name, exc_info=True) # noqa: G201
53 |
54 | # Failure.
55 | try:
56 | comm.close()
57 | except Exception:
58 | logger.error( # noqa: G201
59 | """Could not close comm during `comm_open` failure
60 | clean-up. The comm may not have been opened yet.""",
61 | exc_info=True,
62 | )
63 |
--------------------------------------------------------------------------------
/ipykernel/embed.py:
--------------------------------------------------------------------------------
1 | """Simple function for embedding an IPython kernel"""
2 | # -----------------------------------------------------------------------------
3 | # Imports
4 | # -----------------------------------------------------------------------------
5 |
6 | import sys
7 |
8 | from IPython.utils.frame import extract_module_locals
9 |
10 | from .kernelapp import IPKernelApp
11 |
12 | # -----------------------------------------------------------------------------
13 | # Code
14 | # -----------------------------------------------------------------------------
15 |
16 |
17 | def embed_kernel(module=None, local_ns=None, **kwargs):
18 | """Embed and start an IPython kernel in a given scope.
19 |
20 | Parameters
21 | ----------
22 | module : ModuleType, optional
23 | The module to load into IPython globals (default: caller)
24 | local_ns : dict, optional
25 | The namespace to load into IPython user namespace (default: caller)
26 | kwargs : dict, optional
27 | Further keyword args are relayed to the IPKernelApp constructor,
28 | allowing configuration of the Kernel. Will only have an effect
29 | on the first embed_kernel call for a given process.
30 |
31 | """
32 | # get the app if it exists, or set it up if it doesn't
33 | if IPKernelApp.initialized():
34 | app = IPKernelApp.instance()
35 | else:
36 | app = IPKernelApp.instance(**kwargs)
37 | app.initialize([])
38 | # Undo unnecessary sys module mangling from init_sys_modules.
39 | # This would not be necessary if we could prevent it
40 | # in the first place by using a different InteractiveShell
41 | # subclass, as in the regular embed case.
42 | main = app.kernel.shell._orig_sys_modules_main_mod
43 | if main is not None:
44 | sys.modules[app.kernel.shell._orig_sys_modules_main_name] = main
45 |
46 | # load the calling scope if not given
47 | (caller_module, caller_locals) = extract_module_locals(1)
48 | if module is None:
49 | module = caller_module
50 | if local_ns is None:
51 | local_ns = dict(**caller_locals)
52 |
53 | app.kernel.user_module = module
54 | assert isinstance(local_ns, dict)
55 | app.kernel.user_ns = local_ns
56 | app.shell.set_completer_frame() # type:ignore[union-attr]
57 | app.start()
58 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # IPython Kernel for Jupyter
2 |
3 | [](https://github.com/ipython/ipykernel/actions/workflows/ci.yml/badge.svg?query=branch%3Amain++)
4 | [](http://ipykernel.readthedocs.io/en/latest/?badge=latest)
5 |
6 | This package provides the IPython kernel for Jupyter.
7 |
8 | ## Installation from source
9 |
10 | 1. `git clone`
11 | 1. `cd ipykernel`
12 | 1. `pip install -e ".[test]"`
13 |
14 | After that, all normal `ipython` commands will use this newly-installed version of the kernel.
15 |
16 | ## Running tests
17 |
18 | Follow the instructions from `Installation from source`.
19 |
20 | and then from the root directory
21 |
22 | ```bash
23 | pytest
24 | ```
25 |
26 | ## Running tests with coverage
27 |
28 | Follow the instructions from `Installation from source`.
29 |
30 | and then from the root directory
31 |
32 | ```bash
33 | pytest -vv -s --cov ipykernel --cov-branch --cov-report term-missing:skip-covered --durations 10
34 | ```
35 |
36 | ## About the IPython Development Team
37 |
38 | The IPython Development Team is the set of all contributors to the IPython project.
39 | This includes all of the IPython subprojects.
40 |
41 | The core team that coordinates development on GitHub can be found here:
42 | https://github.com/ipython/.
43 |
44 | ## Our Copyright Policy
45 |
46 | IPython uses a shared copyright model. Each contributor maintains copyright
47 | over their contributions to IPython. But, it is important to note that these
48 | contributions are typically only changes to the repositories. Thus, the IPython
49 | source code, in its entirety is not the copyright of any single person or
50 | institution. Instead, it is the collective copyright of the entire IPython
51 | Development Team. If individual contributors want to maintain a record of what
52 | changes/contributions they have specific copyright on, they should indicate
53 | their copyright in the commit message of the change, when they commit the
54 | change to one of the IPython repositories.
55 |
56 | With this in mind, the following banner should be used in any source code file
57 | to indicate the copyright and license terms:
58 |
59 | ```
60 | # Copyright (c) IPython Development Team.
61 | # Distributed under the terms of the Modified BSD License.
62 | ```
63 |
--------------------------------------------------------------------------------
/examples/embedding/inprocess_terminal.py:
--------------------------------------------------------------------------------
1 | """An in-process terminal example."""
2 |
3 | import os
4 | import sys
5 |
6 | import tornado
7 | from jupyter_console.ptshell import ZMQTerminalInteractiveShell
8 |
9 | from ipykernel.inprocess.manager import InProcessKernelManager
10 |
11 |
12 | def print_process_id():
13 | """Print the process id."""
14 | print("Process ID is:", os.getpid())
15 |
16 |
17 | def init_asyncio_patch():
18 | """set default asyncio policy to be compatible with tornado
19 | Tornado 6 (at least) is not compatible with the default
20 | asyncio implementation on Windows
21 | Pick the older SelectorEventLoopPolicy on Windows
22 | if the known-incompatible default policy is in use.
23 | do this as early as possible to make it a low priority and overridable
24 | ref: https://github.com/tornadoweb/tornado/issues/2608
25 | FIXME: if/when tornado supports the defaults in asyncio,
26 | remove and bump tornado requirement for py38
27 | """
28 | if (
29 | sys.platform.startswith("win")
30 | and sys.version_info >= (3, 8)
31 | and tornado.version_info < (6, 1)
32 | ):
33 | import asyncio
34 |
35 | try:
36 | from asyncio import WindowsProactorEventLoopPolicy, WindowsSelectorEventLoopPolicy
37 | except ImportError:
38 | pass
39 | # not affected
40 | else:
41 | if type(asyncio.get_event_loop_policy()) is WindowsProactorEventLoopPolicy:
42 | # WindowsProactorEventLoopPolicy is not compatible with tornado 6
43 | # fallback to the pre-3.8 default of Selector
44 | asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy())
45 |
46 |
47 | def main():
48 | """The main function."""
49 | print_process_id()
50 |
51 | # Create an in-process kernel
52 | # >>> print_process_id()
53 | # will print the same process ID as the main process
54 | init_asyncio_patch()
55 | kernel_manager = InProcessKernelManager()
56 | kernel_manager.start_kernel()
57 | kernel = kernel_manager.kernel
58 | kernel.gui = "qt4"
59 | kernel.shell.push({"foo": 43, "print_process_id": print_process_id})
60 | client = kernel_manager.client()
61 | client.start_channels()
62 |
63 | shell = ZMQTerminalInteractiveShell(manager=kernel_manager, client=client)
64 | shell.mainloop()
65 |
66 |
67 | if __name__ == "__main__":
68 | main()
69 |
--------------------------------------------------------------------------------
/examples/embedding/internal_ipkernel.py:
--------------------------------------------------------------------------------
1 | """An internal ipykernel example."""
2 | # -----------------------------------------------------------------------------
3 | # Imports
4 | # -----------------------------------------------------------------------------
5 |
6 | import sys
7 |
8 | from IPython.lib.kernel import connect_qtconsole
9 |
10 | from ipykernel.kernelapp import IPKernelApp
11 |
12 |
13 | # -----------------------------------------------------------------------------
14 | # Functions and classes
15 | # -----------------------------------------------------------------------------
16 | def mpl_kernel(gui):
17 | """Launch and return an IPython kernel with matplotlib support for the desired gui"""
18 | kernel = IPKernelApp.instance()
19 | kernel.initialize(
20 | [
21 | "python",
22 | "--matplotlib=%s" % gui,
23 | #'--log-level=10'
24 | ]
25 | )
26 | return kernel
27 |
28 |
29 | class InternalIPKernel:
30 | """An internal ipykernel class."""
31 |
32 | def init_ipkernel(self, backend):
33 | """Start IPython kernel with GUI event loop and mpl support."""
34 | self.ipkernel = mpl_kernel(backend)
35 | # To create and track active qt consoles
36 | self.consoles = []
37 |
38 | # This application will also act on the shell user namespace
39 | self.namespace = self.ipkernel.shell.user_ns
40 |
41 | # Example: a variable that will be seen by the user in the shell, and
42 | # that the GUI modifies (the 'Counter++' button increments it):
43 | self.namespace["app_counter"] = 0
44 | # self.namespace['ipkernel'] = self.ipkernel # dbg
45 |
46 | def print_namespace(self, evt=None):
47 | """Print the namespace."""
48 | print("\n***Variables in User namespace***")
49 | for k, v in self.namespace.items():
50 | if not k.startswith("_"):
51 | print(f"{k} -> {v!r}")
52 | sys.stdout.flush()
53 |
54 | def new_qt_console(self, evt=None):
55 | """start a new qtconsole connected to our kernel"""
56 | return connect_qtconsole(self.ipkernel.abs_connection_file, profile=self.ipkernel.profile)
57 |
58 | def count(self, evt=None):
59 | """Get the app counter value."""
60 | self.namespace["app_counter"] += 1
61 |
62 | def cleanup_consoles(self, evt=None):
63 | """Clean up the consoles."""
64 | for c in self.consoles:
65 | c.kill()
66 |
--------------------------------------------------------------------------------
/ipykernel/utils.py:
--------------------------------------------------------------------------------
1 | """Utilities"""
2 |
3 | from __future__ import annotations
4 |
5 | import asyncio
6 | import sys
7 | import typing as t
8 | from collections.abc import Mapping
9 | from contextvars import copy_context
10 | from functools import partial, wraps
11 |
12 | if t.TYPE_CHECKING:
13 | from collections.abc import Callable
14 | from contextvars import Context
15 |
16 |
17 | class LazyDict(Mapping[str, t.Any]):
18 | """Lazy evaluated read-only dictionary.
19 |
20 | Initialised with a dictionary of key-value pairs where the values are either
21 | constants or callables. Callables are evaluated each time the respective item is
22 | read.
23 | """
24 |
25 | def __init__(self, dict):
26 | self._dict = dict
27 |
28 | def __getitem__(self, key):
29 | item = self._dict.get(key)
30 | return item() if callable(item) else item
31 |
32 | def __len__(self):
33 | return len(self._dict)
34 |
35 | def __iter__(self):
36 | return iter(self._dict)
37 |
38 |
39 | T = t.TypeVar("T")
40 | U = t.TypeVar("U")
41 | V = t.TypeVar("V")
42 |
43 |
44 | def _async_in_context(
45 | f: Callable[..., t.Coroutine[T, U, V]], context: Context | None = None
46 | ) -> Callable[..., t.Coroutine[T, U, V]]:
47 | """
48 | Wrapper to run a coroutine in a persistent ContextVar Context.
49 |
50 | Backports asyncio.create_task(context=...) behavior from Python 3.11
51 | """
52 | if context is None:
53 | context = copy_context()
54 |
55 | if sys.version_info >= (3, 11):
56 |
57 | @wraps(f)
58 | async def run_in_context(*args, **kwargs):
59 | coro = f(*args, **kwargs)
60 | return await asyncio.create_task(coro, context=context)
61 |
62 | return run_in_context
63 |
64 | # don't need this backport when we require 3.11
65 | # context_holder so we have a modifiable container for later calls
66 | context_holder = [context] # type: ignore[unreachable]
67 |
68 | async def preserve_context(f, *args, **kwargs):
69 | """call a coroutine, preserving the context after it is called"""
70 | try:
71 | return await f(*args, **kwargs)
72 | finally:
73 | # persist changes to the context for future calls
74 | context_holder[0] = copy_context()
75 |
76 | @wraps(f)
77 | async def run_in_context_pre311(*args, **kwargs):
78 | ctx = context_holder[0]
79 | return await ctx.run(partial(asyncio.create_task, preserve_context(f, *args, **kwargs)))
80 |
81 | return run_in_context_pre311
82 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | ci:
2 | autoupdate_commit_msg: "chore: update pre-commit hooks"
3 | autoupdate_schedule: weekly
4 |
5 | repos:
6 | - repo: https://github.com/pre-commit/pre-commit-hooks
7 | rev: v6.0.0
8 | hooks:
9 | - id: check-case-conflict
10 | - id: check-ast
11 | - id: check-docstring-first
12 | - id: check-executables-have-shebangs
13 | - id: check-added-large-files
14 | - id: check-case-conflict
15 | - id: check-merge-conflict
16 | - id: check-json
17 | - id: check-toml
18 | - id: check-yaml
19 | - id: debug-statements
20 | exclude: ipykernel/kernelapp.py
21 | - id: end-of-file-fixer
22 | - id: trailing-whitespace
23 |
24 | - repo: https://github.com/python-jsonschema/check-jsonschema
25 | rev: 0.34.1
26 | hooks:
27 | - id: check-github-workflows
28 |
29 | - repo: https://github.com/hukkin/mdformat
30 | rev: 1.0.0
31 | hooks:
32 | - id: mdformat
33 | additional_dependencies: [mdformat-footnote]
34 |
35 | - repo: https://github.com/pre-commit/mirrors-prettier
36 | rev: "v4.0.0-alpha.8"
37 | hooks:
38 | - id: prettier
39 | types_or: [yaml, html, json]
40 |
41 | - repo: https://github.com/pre-commit/mirrors-mypy
42 | rev: "v1.18.2"
43 | hooks:
44 | - id: mypy
45 | files: ipykernel
46 | stages: [manual]
47 | args: ["--install-types", "--non-interactive"]
48 | additional_dependencies:
49 | [
50 | "traitlets>=5.13",
51 | "ipython>=8.16.1",
52 | "jupyter_client>=8.5",
53 | "appnope",
54 | ]
55 |
56 | - repo: https://github.com/adamchainz/blacken-docs
57 | rev: "1.20.0"
58 | hooks:
59 | - id: blacken-docs
60 | additional_dependencies: [black==23.7.0]
61 |
62 | - repo: https://github.com/codespell-project/codespell
63 | rev: "v2.4.1"
64 | hooks:
65 | - id: codespell
66 | args: ["-L", "sur,nd"]
67 |
68 | - repo: https://github.com/pre-commit/pygrep-hooks
69 | rev: "v1.10.0"
70 | hooks:
71 | - id: rst-backticks
72 | - id: rst-directive-colons
73 | - id: rst-inline-touching-normal
74 |
75 | - repo: https://github.com/astral-sh/ruff-pre-commit
76 | rev: v0.14.3
77 | hooks:
78 | - id: ruff-check
79 | types_or: [python, jupyter]
80 | args: ["--fix", "--show-fixes"]
81 | - id: ruff-format
82 | types_or: [python, jupyter]
83 |
84 | - repo: https://github.com/scientific-python/cookie
85 | rev: "2025.10.20"
86 | hooks:
87 | - id: sp-repo-review
88 | additional_dependencies: ["repo-review[cli]"]
89 |
--------------------------------------------------------------------------------
/ipykernel/trio_runner.py:
--------------------------------------------------------------------------------
1 | """A trio loop runner."""
2 |
3 | import builtins
4 | import logging
5 | import signal
6 | import threading
7 | import traceback
8 | import warnings
9 |
10 | import trio
11 |
12 |
13 | class TrioRunner:
14 | """A trio loop runner."""
15 |
16 | def __init__(self):
17 | """Initialize the runner."""
18 | self._cell_cancel_scope = None
19 | self._trio_token = None
20 |
21 | def initialize(self, kernel, io_loop):
22 | """Initialize the runner."""
23 | kernel.shell.set_trio_runner(self)
24 | kernel.shell.run_line_magic("autoawait", "trio")
25 | kernel.shell.magics_manager.magics["line"]["autoawait"] = lambda _: warnings.warn(
26 | "Autoawait isn't allowed in Trio background loop mode.", stacklevel=2
27 | )
28 | self._interrupted = False
29 | bg_thread = threading.Thread(target=io_loop.start, daemon=True, name="TornadoBackground")
30 | bg_thread.start()
31 |
32 | def interrupt(self, signum, frame):
33 | """Interrupt the runner."""
34 | if self._cell_cancel_scope:
35 | self._cell_cancel_scope.cancel()
36 | else:
37 | msg = "Kernel interrupted but no cell is running"
38 | raise Exception(msg)
39 |
40 | def run(self):
41 | """Run the loop."""
42 | old_sig = signal.signal(signal.SIGINT, self.interrupt)
43 |
44 | def log_nursery_exc(exc):
45 | exc = "\n".join(traceback.format_exception(type(exc), exc, exc.__traceback__))
46 | logging.error("An exception occurred in a global nursery task.\n%s", exc)
47 |
48 | async def trio_main():
49 | """Run the main loop."""
50 | self._trio_token = trio.lowlevel.current_trio_token()
51 | async with trio.open_nursery() as nursery:
52 | # TODO This hack prevents the nursery from cancelling all child
53 | # tasks when an uncaught exception occurs, but it's ugly.
54 | nursery._add_exc = log_nursery_exc
55 | builtins.GLOBAL_NURSERY = nursery # type:ignore[attr-defined]
56 | await trio.sleep_forever()
57 |
58 | trio.run(trio_main)
59 | signal.signal(signal.SIGINT, old_sig)
60 |
61 | def __call__(self, async_fn):
62 | """Handle a function call."""
63 |
64 | async def loc(coro):
65 | """A thread runner context."""
66 | self._cell_cancel_scope = trio.CancelScope()
67 | with self._cell_cancel_scope:
68 | return await coro
69 | self._cell_cancel_scope = None # type:ignore[unreachable]
70 | return None
71 |
72 | return trio.from_thread.run(loc, async_fn, trio_token=self._trio_token)
73 |
--------------------------------------------------------------------------------
/examples/embedding/ipkernel_qtapp.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Example integrating an IPython kernel into a GUI App.
3 |
4 | This trivial GUI application internally starts an IPython kernel, to which Qt
5 | consoles can be connected either by the user at the command line or started
6 | from the GUI itself, via a button. The GUI can also manipulate one variable in
7 | the kernel's namespace, and print the namespace to the console.
8 |
9 | Play with it by running the script and then opening one or more consoles, and
10 | pushing the 'Counter++' and 'Namespace' buttons.
11 |
12 | Upon exit, it should automatically close all consoles opened from the GUI.
13 |
14 | Consoles attached separately from a terminal will not be terminated, though
15 | they will notice that their kernel died.
16 | """
17 | # -----------------------------------------------------------------------------
18 | # Imports
19 | # -----------------------------------------------------------------------------
20 |
21 | from internal_ipkernel import InternalIPKernel
22 | from PyQt4 import Qt
23 |
24 |
25 | # -----------------------------------------------------------------------------
26 | # Functions and classes
27 | # -----------------------------------------------------------------------------
28 | class SimpleWindow(Qt.QWidget, InternalIPKernel):
29 | """A custom Qt widget for IPykernel."""
30 |
31 | def __init__(self, app):
32 | """Initialize the widget."""
33 | Qt.QWidget.__init__(self)
34 | self.app = app
35 | self.add_widgets()
36 | self.init_ipkernel("qt")
37 |
38 | def add_widgets(self):
39 | """Add the widget."""
40 | self.setGeometry(300, 300, 400, 70)
41 | self.setWindowTitle("IPython in your app")
42 |
43 | # Add simple buttons:
44 | console = Qt.QPushButton("Qt Console", self)
45 | console.setGeometry(10, 10, 100, 35)
46 | self.connect(console, Qt.SIGNAL("clicked()"), self.new_qt_console)
47 |
48 | namespace = Qt.QPushButton("Namespace", self)
49 | namespace.setGeometry(120, 10, 100, 35)
50 | self.connect(namespace, Qt.SIGNAL("clicked()"), self.print_namespace)
51 |
52 | count = Qt.QPushButton("Count++", self)
53 | count.setGeometry(230, 10, 80, 35)
54 | self.connect(count, Qt.SIGNAL("clicked()"), self.count)
55 |
56 | # Quit and cleanup
57 | quit = Qt.QPushButton("Quit", self)
58 | quit.setGeometry(320, 10, 60, 35)
59 | self.connect(quit, Qt.SIGNAL("clicked()"), Qt.qApp, Qt.SLOT("quit()"))
60 |
61 | self.app.connect(self.app, Qt.SIGNAL("lastWindowClosed()"), self.app, Qt.SLOT("quit()"))
62 |
63 | self.app.aboutToQuit.connect(self.cleanup_consoles)
64 |
65 |
66 | # -----------------------------------------------------------------------------
67 | # Main script
68 | # -----------------------------------------------------------------------------
69 |
70 | if __name__ == "__main__":
71 | app = Qt.QApplication([])
72 | # Create our window
73 | win = SimpleWindow(app)
74 | win.show()
75 |
76 | # Very important, IPython-specific step: this gets GUI event loop
77 | # integration going, and it replaces calling app.exec_()
78 | win.ipkernel.start()
79 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | Welcome!
4 |
5 | For contributing tips, follow the [Jupyter Contributing Guide](https://jupyter.readthedocs.io/en/latest/contributing/content-contributor.html).
6 | Please make sure to follow the [Jupyter Code of Conduct](https://github.com/jupyter/governance/blob/master/conduct/code_of_conduct.md).
7 |
8 | ## Installing ipykernel for development
9 |
10 | ipykernel is a pure Python package, so setting up for development is the same as most other Python projects:
11 |
12 | ```bash
13 | # clone the repo
14 | git clone https://github.com/ipython/ipykernel
15 | cd ipykernel
16 | # do a 'development' or 'editable' install with pip:
17 | pip install -e .
18 | ```
19 |
20 | ## Code Styling
21 |
22 | `ipykernel` has adopted automatic code formatting so you shouldn't
23 | need to worry too much about your code style.
24 | As long as your code is valid,
25 | the pre-commit hook should take care of how it should look.
26 | To install `pre-commit`, run the following::
27 |
28 | ```
29 | pip install pre-commit
30 | pre-commit install
31 | ```
32 |
33 | You can invoke the pre-commit hook by hand at any time with::
34 |
35 | ```
36 | pre-commit run
37 | ```
38 |
39 | which should run any autoformatting on your code
40 | and tell you about any errors it couldn't fix automatically.
41 | You may also install [black integration](https://github.com/psf/black#editor-integration)
42 | into your text editor to format code automatically.
43 |
44 | If you have already committed files before setting up the pre-commit
45 | hook with `pre-commit install`, you can fix everything up using
46 | `pre-commit run --all-files`. You need to make the fixing commit
47 | yourself after that.
48 |
49 | Some of the hooks only run on CI by default, but you can invoke them by
50 | running with the `--hook-stage manual` argument.
51 |
52 | ## Releasing ipykernel
53 |
54 | Releasing ipykernel is _almost_ standard for a Python package:
55 |
56 | - set version for release
57 | - make and publish tag
58 | - publish release to PyPI
59 | - set version back to development
60 |
61 | The one extra step for ipykernel is that we need to make separate wheels for Python 2 and 3
62 | because the bundled kernelspec has different contents for Python 2 and 3. This
63 | affects only the 4.x branch of ipykernel as the 5+ version is only compatible
64 | Python 3.
65 |
66 | The full release process is available below:
67 |
68 | ```bash
69 | # make sure version is set in ipykernel/_version.py
70 | VERSION="4.9.0"
71 | # commit the version and make a release tag
72 | git add ipykernel/_version.py
73 | git commit -m "release $VERSION"
74 | git tag -am "release $VERSION" $VERSION
75 |
76 | # push the changes to the repo
77 | git push
78 | git push --tags
79 |
80 | # publish the release to PyPI
81 | # note the extra `python2 setup.py bdist_wheel` for creating
82 | # the wheel for Python 2
83 | pip install --upgrade twine
84 | git clean -xfd
85 | python3 setup.py sdist bdist_wheel
86 | python2 setup.py bdist_wheel # the extra step for the 4.x branch.
87 | twine upload dist/*
88 |
89 | # set the version back to '.dev' in ipykernel/_version.py
90 | # e.g. 4.10.0.dev if we just released 4.9.0
91 | git add ipykernel/_version.py
92 | git commit -m "back to dev"
93 | git push
94 | ```
95 |
--------------------------------------------------------------------------------
/docs/api/ipykernel.rst:
--------------------------------------------------------------------------------
1 | ipykernel package
2 | =================
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 | :maxdepth: 4
9 |
10 | ipykernel.comm
11 | ipykernel.inprocess
12 |
13 | Submodules
14 | ----------
15 |
16 |
17 | .. automodule:: ipykernel.compiler
18 | :members:
19 | :undoc-members:
20 | :show-inheritance:
21 |
22 |
23 | .. automodule:: ipykernel.connect
24 | :members:
25 | :undoc-members:
26 | :show-inheritance:
27 |
28 |
29 | .. automodule:: ipykernel.control
30 | :members:
31 | :undoc-members:
32 | :show-inheritance:
33 |
34 |
35 | .. automodule:: ipykernel.debugger
36 | :members:
37 | :undoc-members:
38 | :show-inheritance:
39 |
40 |
41 | .. automodule:: ipykernel.displayhook
42 | :members:
43 | :undoc-members:
44 | :show-inheritance:
45 |
46 |
47 | .. automodule:: ipykernel.embed
48 | :members:
49 | :undoc-members:
50 | :show-inheritance:
51 |
52 |
53 | .. automodule:: ipykernel.eventloops
54 | :members:
55 | :undoc-members:
56 | :show-inheritance:
57 |
58 |
59 | .. automodule:: ipykernel.heartbeat
60 | :members:
61 | :undoc-members:
62 | :show-inheritance:
63 |
64 |
65 | .. automodule:: ipykernel.iostream
66 | :members:
67 | :undoc-members:
68 | :show-inheritance:
69 |
70 |
71 | .. automodule:: ipykernel.ipkernel
72 | :members:
73 | :undoc-members:
74 | :show-inheritance:
75 |
76 |
77 | .. automodule:: ipykernel.jsonutil
78 | :members:
79 | :undoc-members:
80 | :show-inheritance:
81 |
82 |
83 | .. automodule:: ipykernel.kernelapp
84 | :members:
85 | :undoc-members:
86 | :show-inheritance:
87 |
88 |
89 | .. automodule:: ipykernel.kernelbase
90 | :members:
91 | :undoc-members:
92 | :show-inheritance:
93 |
94 |
95 | .. automodule:: ipykernel.kernelspec
96 | :members:
97 | :undoc-members:
98 | :show-inheritance:
99 |
100 |
101 | .. automodule:: ipykernel.log
102 | :members:
103 | :undoc-members:
104 | :show-inheritance:
105 |
106 |
107 | .. automodule:: ipykernel.parentpoller
108 | :members:
109 | :undoc-members:
110 | :show-inheritance:
111 |
112 |
113 | .. automodule:: ipykernel.shellchannel
114 | :members:
115 | :undoc-members:
116 | :show-inheritance:
117 |
118 |
119 | .. automodule:: ipykernel.socket_pair
120 | :members:
121 | :undoc-members:
122 | :show-inheritance:
123 |
124 |
125 | .. automodule:: ipykernel.subshell
126 | :members:
127 | :undoc-members:
128 | :show-inheritance:
129 |
130 |
131 | .. automodule:: ipykernel.subshell_manager
132 | :members:
133 | :undoc-members:
134 | :show-inheritance:
135 |
136 |
137 | .. automodule:: ipykernel.thread
138 | :members:
139 | :undoc-members:
140 | :show-inheritance:
141 |
142 |
143 | .. automodule:: ipykernel.trio_runner
144 | :members:
145 | :undoc-members:
146 | :show-inheritance:
147 |
148 |
149 | .. automodule:: ipykernel.zmqshell
150 | :members:
151 | :undoc-members:
152 | :show-inheritance:
153 |
154 | Module contents
155 | ---------------
156 |
157 | .. automodule:: ipykernel
158 | :members:
159 | :undoc-members:
160 | :show-inheritance:
161 |
--------------------------------------------------------------------------------
/tests/test_matplotlib_eventloops.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import time
4 |
5 | import pytest
6 | from jupyter_client.blocking.client import BlockingKernelClient
7 |
8 | from .test_eventloop import qt_guis_avail
9 | from .utils import assemble_output
10 |
11 | # these tests don't seem to work with xvfb yet
12 | # these tests seem to be a problem on CI in general
13 | pytestmark = pytest.mark.skipif(
14 | bool(os.getenv("CI")),
15 | reason="tests not working yet reliably on CI",
16 | )
17 |
18 | guis = []
19 | if not sys.platform.startswith("tk"):
20 | guis.append("tk")
21 | if qt_guis_avail:
22 | guis.append("qt")
23 | if sys.platform == "darwin":
24 | guis.append("osx")
25 |
26 | backends = {
27 | "tk": "tkagg",
28 | "qt": "qtagg",
29 | "osx": "macosx",
30 | }
31 |
32 |
33 | def execute(
34 | kc: BlockingKernelClient,
35 | code: str,
36 | timeout=120,
37 | ):
38 | msg_id = kc.execute(code)
39 | stdout, stderr = assemble_output(kc.get_iopub_msg, timeout=timeout, parent_msg_id=msg_id)
40 | assert not stderr.strip()
41 | return stdout.strip(), stderr.strip()
42 |
43 |
44 | @pytest.mark.parametrize("gui", guis)
45 | @pytest.mark.timeout(300)
46 | def test_matplotlib_gui(kc, gui):
47 | """Make sure matplotlib activates and its eventloop runs while the kernel is also responsive"""
48 | pytest.importorskip("matplotlib", reason="this test requires matplotlib")
49 | stdout, stderr = execute(kc, f"%matplotlib {gui}")
50 | assert not stderr
51 | # debug: show output from invoking the matplotlib magic
52 | print(stdout)
53 | execute(
54 | kc,
55 | """
56 | from concurrent.futures import Future
57 | import matplotlib as mpl
58 | import matplotlib.pyplot as plt
59 | """,
60 | )
61 | stdout, _ = execute(kc, "print(mpl.get_backend())")
62 | assert stdout == backends[gui]
63 | execute(
64 | kc,
65 | """
66 | fig, ax = plt.subplots()
67 | timer = fig.canvas.new_timer(interval=10)
68 | f = Future()
69 |
70 | call_count = 0
71 | def add_call():
72 | global call_count
73 | call_count += 1
74 | if not f.done():
75 | f.set_result(None)
76 |
77 | timer.add_callback(add_call)
78 | timer.start()
79 | """,
80 | )
81 | # wait for the first call (up to 60 seconds)
82 | deadline = time.monotonic() + 60
83 | done = False
84 | while time.monotonic() <= deadline:
85 | stdout, _ = execute(kc, "print(f.done())")
86 | if stdout.strip() == "True":
87 | done = True
88 | break
89 | if stdout == "False":
90 | time.sleep(0.1)
91 | else:
92 | pytest.fail(f"Unexpected output {stdout}")
93 | if not done:
94 | pytest.fail("future never finished...")
95 |
96 | time.sleep(0.25)
97 | stdout, _ = execute(kc, "print(call_count)")
98 | call_count = int(stdout)
99 | assert call_count > 0
100 | time.sleep(0.25)
101 | stdout, _ = execute(kc, "timer.stop()\nprint(call_count)")
102 | call_count_2 = int(stdout)
103 | assert call_count_2 > call_count
104 | stdout, _ = execute(kc, "print(call_count)")
105 | call_count_3 = int(stdout)
106 | assert call_count_3 <= call_count_2 + 5
107 |
--------------------------------------------------------------------------------
/tests/test_comm.py:
--------------------------------------------------------------------------------
1 | import unittest.mock
2 |
3 | import pytest
4 |
5 | from ipykernel.comm import Comm, CommManager
6 | from ipykernel.ipkernel import IPythonKernel
7 | from ipykernel.kernelbase import Kernel
8 |
9 |
10 | def test_comm(kernel: Kernel) -> None:
11 | manager = CommManager(kernel=kernel)
12 | kernel.comm_manager = manager # type:ignore
13 |
14 | with pytest.deprecated_call():
15 | c = Comm(kernel=kernel, target_name="bar")
16 | msgs = []
17 |
18 | assert kernel is c.kernel # type:ignore
19 |
20 | def on_close(msg):
21 | msgs.append(msg)
22 |
23 | def on_message(msg):
24 | msgs.append(msg)
25 |
26 | c.publish_msg("foo")
27 | c.open({})
28 | c.on_msg(on_message)
29 | c.on_close(on_close)
30 | c.handle_msg({})
31 | c.handle_close({})
32 | c.close()
33 | assert len(msgs) == 2
34 | assert c.target_name == "bar"
35 |
36 |
37 | def test_comm_manager(kernel: Kernel) -> None:
38 | manager = CommManager(kernel=kernel)
39 | msgs = []
40 |
41 | def foo(comm, msg):
42 | msgs.append(msg)
43 | comm.close()
44 |
45 | def fizz(comm, msg):
46 | raise RuntimeError("hi")
47 |
48 | def on_close(msg):
49 | msgs.append(msg)
50 |
51 | def on_msg(msg):
52 | msgs.append(msg)
53 |
54 | manager.register_target("foo", foo)
55 | manager.register_target("fizz", fizz)
56 |
57 | kernel.comm_manager = manager # type:ignore
58 | with unittest.mock.patch.object(Comm, "publish_msg") as publish_msg:
59 | with pytest.deprecated_call():
60 | comm = Comm()
61 | comm.on_msg(on_msg)
62 | comm.on_close(on_close)
63 | manager.register_comm(comm)
64 | assert publish_msg.call_count == 1
65 |
66 | # make sure that when we don't pass a kernel, the 'default' kernel is taken
67 | Kernel._instance = kernel # type:ignore
68 | assert comm.kernel is kernel # type:ignore
69 | Kernel.clear_instance()
70 |
71 | assert manager.get_comm(comm.comm_id) == comm
72 | assert manager.get_comm("foo") is None
73 |
74 | msg = dict(content=dict(comm_id=comm.comm_id, target_name="foo"))
75 | manager.comm_open(None, None, msg)
76 | assert len(msgs) == 1
77 | msg["content"]["target_name"] = "bar"
78 | manager.comm_open(None, None, msg)
79 | assert len(msgs) == 1
80 | msg = dict(content=dict(comm_id=comm.comm_id, target_name="fizz"))
81 | manager.comm_open(None, None, msg)
82 | assert len(msgs) == 1
83 |
84 | manager.register_comm(comm)
85 | assert manager.get_comm(comm.comm_id) == comm
86 | msg = dict(content=dict(comm_id=comm.comm_id))
87 | manager.comm_msg(None, None, msg)
88 | assert len(msgs) == 2
89 | msg["content"]["comm_id"] = "foo"
90 | manager.comm_msg(None, None, msg)
91 | assert len(msgs) == 2
92 |
93 | manager.register_comm(comm)
94 | assert manager.get_comm(comm.comm_id) == comm
95 | msg = dict(content=dict(comm_id=comm.comm_id))
96 | manager.comm_close(None, None, msg)
97 | assert len(msgs) == 3
98 |
99 | assert comm._closed
100 |
101 |
102 | def test_comm_in_manager(ipkernel: IPythonKernel) -> None:
103 | with pytest.deprecated_call():
104 | comm = Comm()
105 |
106 | assert comm.comm_id in ipkernel.comm_manager.comms
107 |
--------------------------------------------------------------------------------
/ipykernel/compiler.py:
--------------------------------------------------------------------------------
1 | """Compiler helpers for the debugger."""
2 |
3 | import os
4 | import sys
5 | import tempfile
6 |
7 | from IPython.core.compilerop import CachingCompiler
8 |
9 |
10 | def murmur2_x86(data, seed):
11 | """Get the murmur2 hash."""
12 | m = 0x5BD1E995
13 | data = [chr(d) for d in str.encode(data, "utf8")]
14 | length = len(data)
15 | h = seed ^ length
16 | rounded_end = length & 0xFFFFFFFC
17 | for i in range(0, rounded_end, 4):
18 | k = (
19 | (ord(data[i]) & 0xFF)
20 | | ((ord(data[i + 1]) & 0xFF) << 8)
21 | | ((ord(data[i + 2]) & 0xFF) << 16)
22 | | (ord(data[i + 3]) << 24)
23 | )
24 | k = (k * m) & 0xFFFFFFFF
25 | k ^= k >> 24
26 | k = (k * m) & 0xFFFFFFFF
27 |
28 | h = (h * m) & 0xFFFFFFFF
29 | h ^= k
30 |
31 | val = length & 0x03
32 | k = 0
33 | if val == 3:
34 | k = (ord(data[rounded_end + 2]) & 0xFF) << 16
35 | if val in [2, 3]:
36 | k |= (ord(data[rounded_end + 1]) & 0xFF) << 8
37 | if val in [1, 2, 3]:
38 | k |= ord(data[rounded_end]) & 0xFF
39 | h ^= k
40 | h = (h * m) & 0xFFFFFFFF
41 |
42 | h ^= h >> 13
43 | h = (h * m) & 0xFFFFFFFF
44 | h ^= h >> 15
45 |
46 | return h
47 |
48 |
49 | convert_to_long_pathname = lambda filename: filename # noqa: E731
50 |
51 | if sys.platform == "win32":
52 | try:
53 | import ctypes
54 | from ctypes.wintypes import DWORD, LPCWSTR, LPWSTR, MAX_PATH
55 |
56 | _GetLongPathName = ctypes.windll.kernel32.GetLongPathNameW
57 | _GetLongPathName.argtypes = [LPCWSTR, LPWSTR, DWORD]
58 | _GetLongPathName.restype = DWORD
59 |
60 | def _convert_to_long_pathname(filename):
61 | buf = ctypes.create_unicode_buffer(MAX_PATH)
62 | rv = _GetLongPathName(filename, buf, MAX_PATH)
63 | if rv != 0 and rv <= MAX_PATH:
64 | filename = buf.value
65 | return filename
66 |
67 | # test that it works so if there are any issues we fail just once here
68 | _convert_to_long_pathname(__file__)
69 | except Exception:
70 | pass
71 | else:
72 | convert_to_long_pathname = _convert_to_long_pathname
73 |
74 |
75 | def get_tmp_directory():
76 | """Get a temp directory."""
77 | tmp_dir = convert_to_long_pathname(tempfile.gettempdir())
78 | pid = os.getpid()
79 | return tmp_dir + os.sep + "ipykernel_" + str(pid)
80 |
81 |
82 | def get_tmp_hash_seed():
83 | """Get a temp hash seed."""
84 | return 0xC70F6907
85 |
86 |
87 | def get_file_name(code):
88 | """Get a file name."""
89 | cell_name = os.environ.get("IPYKERNEL_CELL_NAME")
90 | if cell_name is None:
91 | name = murmur2_x86(code, get_tmp_hash_seed())
92 | cell_name = get_tmp_directory() + os.sep + str(name) + ".py"
93 | return cell_name
94 |
95 |
96 | class XCachingCompiler(CachingCompiler):
97 | """A custom caching compiler."""
98 |
99 | def __init__(self, *args, **kwargs):
100 | """Initialize the compiler."""
101 | super().__init__(*args, **kwargs)
102 | self.log = None
103 |
104 | def get_code_name(self, raw_code, code, number):
105 | """Get the code name."""
106 | return get_file_name(raw_code)
107 |
--------------------------------------------------------------------------------
/ipykernel/inprocess/manager.py:
--------------------------------------------------------------------------------
1 | """A kernel manager for in-process kernels."""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 |
6 | from jupyter_client.manager import KernelManager
7 | from jupyter_client.managerabc import KernelManagerABC
8 | from jupyter_client.session import Session
9 | from traitlets import DottedObjectName, Instance, default
10 |
11 | from .constants import INPROCESS_KEY
12 |
13 |
14 | class InProcessKernelManager(KernelManager):
15 | """A manager for an in-process kernel.
16 |
17 | This class implements the interface of
18 | `jupyter_client.kernelmanagerabc.KernelManagerABC` and allows
19 | (asynchronous) frontends to be used seamlessly with an in-process kernel.
20 |
21 | See `jupyter_client.kernelmanager.KernelManager` for docstrings.
22 | """
23 |
24 | # The kernel process with which the KernelManager is communicating.
25 | kernel = Instance("ipykernel.inprocess.ipkernel.InProcessKernel", allow_none=True)
26 | # the client class for KM.client() shortcut
27 | client_class = DottedObjectName("ipykernel.inprocess.BlockingInProcessKernelClient")
28 |
29 | @default("blocking_class")
30 | def _default_blocking_class(self):
31 | from .blocking import BlockingInProcessKernelClient
32 |
33 | return BlockingInProcessKernelClient
34 |
35 | @default("session")
36 | def _default_session(self):
37 | # don't sign in-process messages
38 | return Session(key=INPROCESS_KEY, parent=self)
39 |
40 | # --------------------------------------------------------------------------
41 | # Kernel management methods
42 | # --------------------------------------------------------------------------
43 |
44 | def start_kernel(self, **kwds):
45 | """Start the kernel."""
46 | from ipykernel.inprocess.ipkernel import InProcessKernel
47 |
48 | self.kernel = InProcessKernel(parent=self, session=self.session)
49 |
50 | def shutdown_kernel(self):
51 | """Shutdown the kernel."""
52 | if self.kernel:
53 | self.kernel.iopub_thread.stop()
54 | self._kill_kernel()
55 |
56 | def restart_kernel(self, now=False, **kwds):
57 | """Restart the kernel."""
58 | self.shutdown_kernel()
59 | self.start_kernel(**kwds)
60 |
61 | @property
62 | def has_kernel(self):
63 | return self.kernel is not None
64 |
65 | def _kill_kernel(self):
66 | self.kernel = None
67 |
68 | def interrupt_kernel(self):
69 | """Interrupt the kernel."""
70 | msg = "Cannot interrupt in-process kernel."
71 | raise NotImplementedError(msg)
72 |
73 | def signal_kernel(self, signum):
74 | """Send a signal to the kernel."""
75 | msg = "Cannot signal in-process kernel."
76 | raise NotImplementedError(msg)
77 |
78 | def is_alive(self):
79 | """Test if the kernel is alive."""
80 | return self.kernel is not None
81 |
82 | def client(self, **kwargs):
83 | """Get a client for the kernel."""
84 | kwargs["kernel"] = self.kernel
85 | return super().client(**kwargs)
86 |
87 |
88 | # -----------------------------------------------------------------------------
89 | # ABC Registration
90 | # -----------------------------------------------------------------------------
91 |
92 | KernelManagerABC.register(InProcessKernelManager)
93 |
--------------------------------------------------------------------------------
/ipykernel/inprocess/channels.py:
--------------------------------------------------------------------------------
1 | """A kernel client for in-process kernels."""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 |
6 | from jupyter_client.channelsabc import HBChannelABC
7 |
8 | # -----------------------------------------------------------------------------
9 | # Channel classes
10 | # -----------------------------------------------------------------------------
11 |
12 |
13 | class InProcessChannel:
14 | """Base class for in-process channels."""
15 |
16 | proxy_methods: list[object] = []
17 |
18 | def __init__(self, client=None):
19 | """Initialize the channel."""
20 | super().__init__()
21 | self.client = client
22 | self._is_alive = False
23 |
24 | def is_alive(self):
25 | """Test if the channel is alive."""
26 | return self._is_alive
27 |
28 | def start(self):
29 | """Start the channel."""
30 | self._is_alive = True
31 |
32 | def stop(self):
33 | """Stop the channel."""
34 | self._is_alive = False
35 |
36 | def call_handlers(self, msg):
37 | """This method is called in the main thread when a message arrives.
38 |
39 | Subclasses should override this method to handle incoming messages.
40 | """
41 | msg = "call_handlers must be defined in a subclass."
42 | raise NotImplementedError(msg)
43 |
44 | def flush(self, timeout=1.0):
45 | """Flush the channel."""
46 |
47 | def call_handlers_later(self, *args, **kwds):
48 | """Call the message handlers later.
49 |
50 | The default implementation just calls the handlers immediately, but this
51 | method exists so that GUI toolkits can defer calling the handlers until
52 | after the event loop has run, as expected by GUI frontends.
53 | """
54 | self.call_handlers(*args, **kwds)
55 |
56 | def process_events(self):
57 | """Process any pending GUI events.
58 |
59 | This method will be never be called from a frontend without an event
60 | loop (e.g., a terminal frontend).
61 | """
62 | raise NotImplementedError
63 |
64 |
65 | class InProcessHBChannel:
66 | """A dummy heartbeat channel interface for in-process kernels.
67 |
68 | Normally we use the heartbeat to check that the kernel process is alive.
69 | When the kernel is in-process, that doesn't make sense, but clients still
70 | expect this interface.
71 | """
72 |
73 | time_to_dead = 3.0
74 |
75 | def __init__(self, client=None):
76 | """Initialize the channel."""
77 | super().__init__()
78 | self.client = client
79 | self._is_alive = False
80 | self._pause = True
81 |
82 | def is_alive(self):
83 | """Test if the channel is alive."""
84 | return self._is_alive
85 |
86 | def start(self):
87 | """Start the channel."""
88 | self._is_alive = True
89 |
90 | def stop(self):
91 | """Stop the channel."""
92 | self._is_alive = False
93 |
94 | def pause(self):
95 | """Pause the channel."""
96 | self._pause = True
97 |
98 | def unpause(self):
99 | """Unpause the channel."""
100 | self._pause = False
101 |
102 | def is_beating(self):
103 | """Test if the channel is beating."""
104 | return not self._pause
105 |
106 |
107 | HBChannelABC.register(InProcessHBChannel)
108 |
--------------------------------------------------------------------------------
/ipykernel/comm/comm.py:
--------------------------------------------------------------------------------
1 | """Base class for a Comm"""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 |
6 | import uuid
7 | from typing import Optional
8 | from warnings import warn
9 |
10 | import comm.base_comm
11 | import traitlets.config
12 | from traitlets import Bool, Bytes, Instance, Unicode, default
13 |
14 | from ipykernel.jsonutil import json_clean
15 | from ipykernel.kernelbase import Kernel
16 |
17 |
18 | # this is the class that will be created if we do comm.create_comm
19 | class BaseComm(comm.base_comm.BaseComm): # type:ignore[misc]
20 | """The base class for comms."""
21 |
22 | kernel: Optional["Kernel"] = None
23 |
24 | def publish_msg(self, msg_type, data=None, metadata=None, buffers=None, **keys):
25 | """Helper for sending a comm message on IOPub"""
26 | if not Kernel.initialized():
27 | return
28 |
29 | data = {} if data is None else data
30 | metadata = {} if metadata is None else metadata
31 | content = json_clean(dict(data=data, comm_id=self.comm_id, **keys))
32 |
33 | if self.kernel is None:
34 | self.kernel = Kernel.instance()
35 |
36 | assert self.kernel.session is not None
37 | self.kernel.session.send(
38 | self.kernel.iopub_socket,
39 | msg_type,
40 | content,
41 | metadata=json_clean(metadata),
42 | parent=self.kernel.get_parent(),
43 | ident=self.topic,
44 | buffers=buffers,
45 | )
46 |
47 |
48 | # but for backwards compatibility, we need to inherit from LoggingConfigurable
49 | class Comm(BaseComm, traitlets.config.LoggingConfigurable):
50 | """Class for communicating between a Frontend and a Kernel"""
51 |
52 | kernel = Instance("ipykernel.kernelbase.Kernel", allow_none=True)
53 | comm_id = Unicode()
54 | primary = Bool(True, help="Am I the primary or secondary Comm?")
55 |
56 | target_name = Unicode("comm")
57 | target_module = Unicode(
58 | None,
59 | allow_none=True,
60 | help="""requirejs module from
61 | which to load comm target.""",
62 | )
63 |
64 | topic = Bytes()
65 |
66 | @default("kernel")
67 | def _default_kernel(self):
68 | if Kernel.initialized():
69 | return Kernel.instance()
70 | return None
71 |
72 | @default("comm_id")
73 | def _default_comm_id(self):
74 | return uuid.uuid4().hex
75 |
76 | def __init__(
77 | self, target_name="", data=None, metadata=None, buffers=None, show_warning=True, **kwargs
78 | ):
79 | """Initialize a comm."""
80 | if show_warning:
81 | warn(
82 | "The `ipykernel.comm.Comm` class has been deprecated. Please use the `comm` module instead."
83 | "For creating comms, use the function `from comm import create_comm`.",
84 | DeprecationWarning,
85 | stacklevel=2,
86 | )
87 |
88 | # Handle differing arguments between base classes.
89 | had_kernel = "kernel" in kwargs
90 | kernel = kwargs.pop("kernel", None)
91 | if target_name:
92 | kwargs["target_name"] = target_name
93 | BaseComm.__init__(self, data=data, metadata=metadata, buffers=buffers, **kwargs) # type:ignore[call-arg]
94 | # only re-add kernel if explicitly provided
95 | if had_kernel:
96 | kwargs["kernel"] = kernel
97 | traitlets.config.LoggingConfigurable.__init__(self, **kwargs)
98 |
99 |
100 | __all__ = ["Comm"]
101 |
--------------------------------------------------------------------------------
/ipykernel/gui/gtkembed.py:
--------------------------------------------------------------------------------
1 | """GUI support for the IPython ZeroMQ kernel - GTK toolkit support."""
2 | # -----------------------------------------------------------------------------
3 | # Copyright (C) 2010-2011 The IPython Development Team
4 | #
5 | # Distributed under the terms of the BSD License. The full license is in
6 | # the file LICENSE, distributed as part of this software.
7 | # -----------------------------------------------------------------------------
8 |
9 | # -----------------------------------------------------------------------------
10 | # Imports
11 | # -----------------------------------------------------------------------------
12 | # stdlib
13 | import sys
14 | import warnings
15 |
16 | # Third-party
17 | import gobject
18 | import gtk
19 |
20 | warnings.warn(
21 | "The Gtk3 event loop for ipykernel is deprecated", category=DeprecationWarning, stacklevel=2
22 | )
23 |
24 | # -----------------------------------------------------------------------------
25 | # Classes and functions
26 | # -----------------------------------------------------------------------------
27 |
28 |
29 | class GTKEmbed:
30 | """A class to embed a kernel into the GTK main event loop."""
31 |
32 | def __init__(self, kernel):
33 | """Initialize the embed."""
34 | self.kernel = kernel
35 | # These two will later store the real gtk functions when we hijack them
36 | self.gtk_main = None
37 | self.gtk_main_quit = None
38 |
39 | def start(self):
40 | """Starts the GTK main event loop and sets our kernel startup routine."""
41 | # Register our function to initiate the kernel and start gtk
42 | gobject.idle_add(self._wire_kernel)
43 | gtk.main()
44 |
45 | def _wire_kernel(self):
46 | """Initializes the kernel inside GTK.
47 |
48 | This is meant to run only once at startup, so it does its job and
49 | returns False to ensure it doesn't get run again by GTK.
50 | """
51 | self.gtk_main, self.gtk_main_quit = self._hijack_gtk()
52 | gobject.timeout_add(int(1000 * self.kernel._poll_interval), self.iterate_kernel)
53 | return False
54 |
55 | def iterate_kernel(self):
56 | """Run one iteration of the kernel and return True.
57 |
58 | GTK timer functions must return True to be called again, so we make the
59 | call to :meth:`do_one_iteration` and then return True for GTK.
60 | """
61 | self.kernel.do_one_iteration()
62 | return True
63 |
64 | def stop(self):
65 | """Stop the embed."""
66 | # FIXME: this one isn't getting called because we have no reliable
67 | # kernel shutdown. We need to fix that: once the kernel has a
68 | # shutdown mechanism, it can call this.
69 | if self.gtk_main_quit:
70 | self.gtk_main_quit()
71 | sys.exit()
72 |
73 | def _hijack_gtk(self):
74 | """Hijack a few key functions in GTK for IPython integration.
75 |
76 | Modifies pyGTK's main and main_quit with a dummy so user code does not
77 | block IPython. This allows us to use %run to run arbitrary pygtk
78 | scripts from a long-lived IPython session, and when they attempt to
79 | start or stop
80 |
81 | Returns
82 | -------
83 | The original functions that have been hijacked:
84 | - gtk.main
85 | - gtk.main_quit
86 | """
87 |
88 | def dummy(*args, **kw):
89 | """No-op."""
90 |
91 | # save and trap main and main_quit from gtk
92 | orig_main, gtk.main = gtk.main, dummy
93 | orig_main_quit, gtk.main_quit = gtk.main_quit, dummy
94 | return orig_main, orig_main_quit
95 |
--------------------------------------------------------------------------------
/tests/test_start_kernel.py:
--------------------------------------------------------------------------------
1 | import os
2 | from textwrap import dedent
3 |
4 | import pytest
5 |
6 | from .test_embed_kernel import setup_kernel
7 |
8 | TIMEOUT = 15
9 |
10 | if os.name == "nt":
11 | pytest.skip("skipping tests on windows", allow_module_level=True)
12 |
13 |
14 | @pytest.mark.flaky(max_runs=3)
15 | def test_ipython_start_kernel_userns():
16 | import IPython
17 |
18 | if IPython.version_info > (9, 0): # noqa:SIM108
19 | EXPECTED = "IPythonMainModule"
20 | else:
21 | # not this since https://github.com/ipython/ipython/pull/14754
22 | EXPECTED = "DummyMod"
23 |
24 | cmd = dedent(
25 | """
26 | from ipykernel.kernelapp import launch_new_instance
27 | ns = {"custom": 123}
28 | launch_new_instance(user_ns=ns)
29 | """
30 | )
31 |
32 | with setup_kernel(cmd) as client:
33 | client.inspect("custom")
34 | msg = client.get_shell_msg(timeout=TIMEOUT)
35 | content = msg["content"]
36 | assert content["found"]
37 | text = content["data"]["text/plain"]
38 | assert "123" in text
39 |
40 | # user_module should be an instance of DummyMod
41 | client.execute("usermod = get_ipython().user_module")
42 | msg = client.get_shell_msg(timeout=TIMEOUT)
43 | content = msg["content"]
44 | assert content["status"] == "ok"
45 | client.inspect("usermod")
46 | msg = client.get_shell_msg(timeout=TIMEOUT)
47 | content = msg["content"]
48 | assert content["found"]
49 | text = content["data"]["text/plain"]
50 | assert EXPECTED in text
51 |
52 |
53 | def test_start_kernel_background_thread():
54 | cmd = dedent(
55 | """
56 | import threading
57 | import asyncio
58 | from ipykernel.kernelapp import launch_new_instance
59 |
60 | def launch():
61 | # Threads don't always have a default event loop so we need to
62 | # create and set a default
63 | loop = asyncio.new_event_loop()
64 | asyncio.set_event_loop(loop)
65 | launch_new_instance()
66 |
67 | thread = threading.Thread(target=launch)
68 | thread.start()
69 | thread.join()
70 | """
71 | )
72 |
73 | with setup_kernel(cmd) as client:
74 | client.execute("a = 1")
75 | msg = client.get_shell_msg(timeout=TIMEOUT)
76 | content = msg["content"]
77 | assert content["status"] == "ok"
78 |
79 | client.inspect("a")
80 | msg = client.get_shell_msg(timeout=TIMEOUT)
81 | content = msg["content"]
82 | assert content["found"]
83 | text = content["data"]["text/plain"]
84 | assert "1" in text
85 |
86 |
87 | @pytest.mark.flaky(max_runs=3)
88 | def test_ipython_start_kernel_no_userns():
89 | # Issue #4188 - user_ns should be passed to shell as None, not {}
90 | cmd = dedent(
91 | """
92 | from ipykernel.kernelapp import launch_new_instance
93 | launch_new_instance()
94 | """
95 | )
96 |
97 | with setup_kernel(cmd) as client:
98 | # user_module should not be an instance of DummyMod
99 | client.execute("usermod = get_ipython().user_module")
100 | msg = client.get_shell_msg(timeout=TIMEOUT)
101 | content = msg["content"]
102 | assert content["status"] == "ok"
103 | client.inspect("usermod")
104 | msg = client.get_shell_msg(timeout=TIMEOUT)
105 | content = msg["content"]
106 | assert content["found"]
107 | text = content["data"]["text/plain"]
108 | assert "DummyMod" not in text
109 |
--------------------------------------------------------------------------------
/ipykernel/gui/gtk3embed.py:
--------------------------------------------------------------------------------
1 | """GUI support for the IPython ZeroMQ kernel - GTK toolkit support."""
2 | # -----------------------------------------------------------------------------
3 | # Copyright (C) 2010-2011 The IPython Development Team
4 | #
5 | # Distributed under the terms of the BSD License. The full license is in
6 | # the file LICENSE, distributed as part of this software.
7 | # -----------------------------------------------------------------------------
8 |
9 | # -----------------------------------------------------------------------------
10 | # Imports
11 | # -----------------------------------------------------------------------------
12 | # stdlib
13 | import sys
14 | import warnings
15 |
16 | # Third-party
17 | import gi
18 |
19 | gi.require_version("Gdk", "3.0")
20 | gi.require_version("Gtk", "3.0")
21 | from gi.repository import GObject, Gtk # noqa: E402
22 |
23 | warnings.warn(
24 | "The Gtk3 event loop for ipykernel is deprecated", category=DeprecationWarning, stacklevel=2
25 | )
26 |
27 | # -----------------------------------------------------------------------------
28 | # Classes and functions
29 | # -----------------------------------------------------------------------------
30 |
31 |
32 | class GTKEmbed:
33 | """A class to embed a kernel into the GTK main event loop."""
34 |
35 | def __init__(self, kernel):
36 | """Initialize the embed."""
37 | self.kernel = kernel
38 | # These two will later store the real gtk functions when we hijack them
39 | self.gtk_main = None
40 | self.gtk_main_quit = None
41 |
42 | def start(self):
43 | """Starts the GTK main event loop and sets our kernel startup routine."""
44 | # Register our function to initiate the kernel and start gtk
45 | GObject.idle_add(self._wire_kernel)
46 | Gtk.main()
47 |
48 | def _wire_kernel(self):
49 | """Initializes the kernel inside GTK.
50 |
51 | This is meant to run only once at startup, so it does its job and
52 | returns False to ensure it doesn't get run again by GTK.
53 | """
54 | self.gtk_main, self.gtk_main_quit = self._hijack_gtk()
55 | GObject.timeout_add(int(1000 * self.kernel._poll_interval), self.iterate_kernel)
56 | return False
57 |
58 | def iterate_kernel(self):
59 | """Run one iteration of the kernel and return True.
60 |
61 | GTK timer functions must return True to be called again, so we make the
62 | call to :meth:`do_one_iteration` and then return True for GTK.
63 | """
64 | self.kernel.do_one_iteration()
65 | return True
66 |
67 | def stop(self):
68 | """Stop the embed."""
69 | # FIXME: this one isn't getting called because we have no reliable
70 | # kernel shutdown. We need to fix that: once the kernel has a
71 | # shutdown mechanism, it can call this.
72 | if self.gtk_main_quit:
73 | self.gtk_main_quit()
74 | sys.exit()
75 |
76 | def _hijack_gtk(self):
77 | """Hijack a few key functions in GTK for IPython integration.
78 |
79 | Modifies pyGTK's main and main_quit with a dummy so user code does not
80 | block IPython. This allows us to use %run to run arbitrary pygtk
81 | scripts from a long-lived IPython session, and when they attempt to
82 | start or stop
83 |
84 | Returns
85 | -------
86 | The original functions that have been hijacked:
87 | - Gtk.main
88 | - Gtk.main_quit
89 | """
90 |
91 | def dummy(*args, **kw):
92 | """No-op."""
93 |
94 | # save and trap main and main_quit from gtk
95 | orig_main, Gtk.main = Gtk.main, dummy
96 | orig_main_quit, Gtk.main_quit = Gtk.main_quit, dummy
97 | return orig_main, orig_main_quit
98 |
--------------------------------------------------------------------------------
/tests/inprocess/test_kernelmanager.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) IPython Development Team.
2 | # Distributed under the terms of the Modified BSD License.
3 |
4 | import unittest
5 |
6 | import pytest
7 |
8 | from ipykernel.inprocess.manager import InProcessKernelManager
9 |
10 | # -----------------------------------------------------------------------------
11 | # Test case
12 | # -----------------------------------------------------------------------------
13 |
14 |
15 | class InProcessKernelManagerTestCase(unittest.TestCase):
16 | def setUp(self):
17 | self.km = InProcessKernelManager()
18 |
19 | def tearDown(self):
20 | if self.km.has_kernel:
21 | self.km.shutdown_kernel()
22 |
23 | @pytest.mark.flaky
24 | def test_interface(self):
25 | """Does the in-process kernel manager implement the basic KM interface?"""
26 | km = self.km
27 | assert not km.has_kernel
28 |
29 | km.start_kernel()
30 | assert km.has_kernel
31 | assert km.kernel is not None
32 |
33 | kc = km.client()
34 | assert not kc.channels_running
35 |
36 | kc.start_channels()
37 | assert kc.channels_running
38 |
39 | old_kernel = km.kernel
40 | km.restart_kernel()
41 | assert km.kernel is not None
42 | assert km.kernel != old_kernel
43 |
44 | km.shutdown_kernel()
45 | assert not km.has_kernel
46 |
47 | with pytest.raises(NotImplementedError):
48 | km.interrupt_kernel()
49 |
50 | with pytest.raises(NotImplementedError):
51 | km.signal_kernel(9)
52 |
53 | kc.stop_channels()
54 | assert not kc.channels_running
55 |
56 | def test_execute(self):
57 | """Does executing code in an in-process kernel work?"""
58 | km = self.km
59 | km.start_kernel()
60 | kc = km.client()
61 | kc.start_channels()
62 | kc.wait_for_ready()
63 | kc.execute("foo = 1")
64 | assert km.kernel.shell.user_ns["foo"] == 1
65 |
66 | def test_complete(self):
67 | """Does requesting completion from an in-process kernel work?"""
68 | km = self.km
69 | km.start_kernel()
70 | kc = km.client()
71 | kc.start_channels()
72 | kc.wait_for_ready()
73 | km.kernel.shell.push({"my_bar": 0, "my_baz": 1})
74 | kc.complete("my_ba", 5)
75 | msg = kc.get_shell_msg()
76 | assert msg["header"]["msg_type"] == "complete_reply"
77 | assert sorted(msg["content"]["matches"]) == ["my_bar", "my_baz"]
78 |
79 | def test_inspect(self):
80 | """Does requesting object information from an in-process kernel work?"""
81 | km = self.km
82 | km.start_kernel()
83 | kc = km.client()
84 | kc.start_channels()
85 | kc.wait_for_ready()
86 | km.kernel.shell.user_ns["foo"] = 1
87 | kc.inspect("foo")
88 | msg = kc.get_shell_msg()
89 | assert msg["header"]["msg_type"] == "inspect_reply"
90 | content = msg["content"]
91 | assert content["found"]
92 | text = content["data"]["text/plain"]
93 | assert "int" in text
94 |
95 | def test_history(self):
96 | """Does requesting history from an in-process kernel work?"""
97 | km = self.km
98 | km.start_kernel()
99 | kc = km.client()
100 | kc.start_channels()
101 | kc.wait_for_ready()
102 | kc.execute("1")
103 | kc.history(hist_access_type="tail", n=1)
104 | msg = kc.shell_channel.get_msgs()[-1]
105 | assert msg["header"]["msg_type"] == "history_reply"
106 | history = msg["content"]["history"]
107 | assert len(history) == 1
108 | assert history[0][2] == "1"
109 |
110 |
111 | if __name__ == "__main__":
112 | unittest.main()
113 |
--------------------------------------------------------------------------------
/tests/inprocess/test_kernel.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) IPython Development Team.
2 | # Distributed under the terms of the Modified BSD License.
3 |
4 | import sys
5 | from contextlib import contextmanager
6 | from io import StringIO
7 |
8 | import pytest
9 | from IPython.utils.io import capture_output # type:ignore[attr-defined]
10 | from jupyter_client.session import Session
11 |
12 | from ipykernel.inprocess.blocking import BlockingInProcessKernelClient
13 | from ipykernel.inprocess.ipkernel import InProcessKernel
14 | from ipykernel.inprocess.manager import InProcessKernelManager
15 |
16 | from ..utils import assemble_output
17 |
18 | orig_msg = Session.msg
19 |
20 |
21 | def _inject_cell_id(_self, *args, **kwargs):
22 | """
23 | This patch jupyter_client.session:Session.msg to add a cell_id to the return message metadata
24 | """
25 | assert isinstance(_self, Session)
26 | res = orig_msg(_self, *args, **kwargs)
27 | assert "cellId" not in res["metadata"]
28 | res["metadata"]["cellId"] = "test_cell_id"
29 | return res
30 |
31 |
32 | @contextmanager
33 | def patch_cell_id():
34 | try:
35 | Session.msg = _inject_cell_id # type:ignore
36 | yield
37 | finally:
38 | Session.msg = orig_msg # type:ignore
39 |
40 |
41 | @pytest.fixture()
42 | def kc():
43 | km = InProcessKernelManager()
44 | km.start_kernel()
45 | kc = km.client()
46 | kc.start_channels()
47 | kc.wait_for_ready()
48 | return kc
49 |
50 |
51 | def test_with_cell_id(kc):
52 | with patch_cell_id():
53 | kc.execute("1+1")
54 |
55 |
56 | def test_pylab(kc):
57 | """Does %pylab work in the in-process kernel?"""
58 | _ = pytest.importorskip("matplotlib", reason="This test requires matplotlib")
59 | kc.execute("%pylab")
60 | out, _err = assemble_output(kc.get_iopub_msg)
61 | assert "matplotlib" in out
62 |
63 |
64 | def test_raw_input(kc):
65 | """Does the in-process kernel handle raw_input correctly?"""
66 | io = StringIO("foobar\n")
67 | sys_stdin = sys.stdin
68 | sys.stdin = io
69 | try:
70 | kc.execute("x = input()")
71 | finally:
72 | sys.stdin = sys_stdin
73 | assert kc.kernel.shell.user_ns.get("x") == "foobar"
74 |
75 |
76 | @pytest.mark.skipif("__pypy__" in sys.builtin_module_names, reason="fails on pypy")
77 | def test_stdout(kc):
78 | """Does the in-process kernel correctly capture IO?"""
79 | kernel = InProcessKernel()
80 |
81 | with capture_output() as io:
82 | kernel.shell.run_cell('print("foo")')
83 | assert io.stdout == "foo\n"
84 |
85 | kc = BlockingInProcessKernelClient(kernel=kernel, session=kernel.session)
86 | kernel.frontends.append(kc)
87 | kc.execute('print("bar")')
88 | out, _err = assemble_output(kc.get_iopub_msg)
89 | assert out == "bar\n"
90 |
91 |
92 | @pytest.mark.skip(reason="Currently don't capture during test as pytest does its own capturing")
93 | def test_capfd(kc):
94 | """Does correctly capture fd"""
95 | kernel = InProcessKernel()
96 |
97 | with capture_output() as io:
98 | kernel.shell.run_cell('print("foo")')
99 | assert io.stdout == "foo\n"
100 |
101 | kc = BlockingInProcessKernelClient(kernel=kernel, session=kernel.session)
102 | kernel.frontends.append(kc)
103 | kc.execute("import os")
104 | kc.execute('os.system("echo capfd")')
105 | out, _err = assemble_output(kc.iopub_channel)
106 | assert out == "capfd\n"
107 |
108 |
109 | def test_getpass_stream(kc):
110 | """Tests that kernel getpass accept the stream parameter"""
111 | kernel = InProcessKernel()
112 | kernel._allow_stdin = True
113 | kernel._input_request = lambda *args, **kwargs: None # type:ignore
114 |
115 | kernel.getpass(stream="non empty")
116 |
117 |
118 | async def test_do_execute(kc):
119 | kernel = InProcessKernel()
120 | await kernel.do_execute("a=1", True)
121 | assert kernel.shell.user_ns["a"] == 1
122 |
--------------------------------------------------------------------------------
/tests/test_jsonutil.py:
--------------------------------------------------------------------------------
1 | """Test suite for our JSON utilities."""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 |
6 | import json
7 | import numbers
8 | from binascii import a2b_base64
9 | from datetime import date, datetime
10 |
11 | import pytest
12 | from jupyter_client._version import version_info as jupyter_client_version
13 |
14 | from ipykernel import jsonutil
15 | from ipykernel.jsonutil import encode_images, json_clean
16 |
17 | JUPYTER_CLIENT_MAJOR_VERSION: int = jupyter_client_version[0] # type:ignore
18 |
19 |
20 | class MyInt:
21 | def __int__(self):
22 | return 389
23 |
24 |
25 | numbers.Integral.register(MyInt)
26 |
27 |
28 | class MyFloat:
29 | def __float__(self):
30 | return 3.14
31 |
32 |
33 | numbers.Real.register(MyFloat)
34 |
35 |
36 | @pytest.mark.skipif(JUPYTER_CLIENT_MAJOR_VERSION >= 7, reason="json_clean is a no-op")
37 | def test():
38 | # list of input/expected output. Use None for the expected output if it
39 | # can be the same as the input.
40 | pairs = [
41 | (1, None), # start with scalars
42 | (1.0, None),
43 | ("a", None),
44 | (True, None),
45 | (False, None),
46 | (None, None),
47 | # Containers
48 | ([1, 2], None),
49 | ((1, 2), [1, 2]),
50 | ({1, 2}, [1, 2]),
51 | (dict(x=1), None),
52 | ({"x": 1, "y": [1, 2, 3], "1": "int"}, None),
53 | # More exotic objects
54 | ((x for x in range(3)), [0, 1, 2]),
55 | (iter([1, 2]), [1, 2]),
56 | (datetime(1991, 7, 3, 12, 00), "1991-07-03T12:00:00.000000"),
57 | (date(1991, 7, 3), "1991-07-03T00:00:00.000000"),
58 | (MyFloat(), 3.14),
59 | (MyInt(), 389),
60 | ]
61 |
62 | for val, jval in pairs:
63 | if jval is None:
64 | jval = val # type:ignore
65 | out = json_clean(val)
66 | # validate our cleanup
67 | assert out == jval
68 | # and ensure that what we return, indeed encodes cleanly
69 | json.loads(json.dumps(out))
70 |
71 |
72 | @pytest.mark.skipif(JUPYTER_CLIENT_MAJOR_VERSION >= 7, reason="json_clean is a no-op")
73 | def test_encode_images():
74 | # invalid data, but the header and footer are from real files
75 | pngdata = b"\x89PNG\r\n\x1a\nblahblahnotactuallyvalidIEND\xaeB`\x82"
76 | jpegdata = b"\xff\xd8\xff\xe0\x00\x10JFIFblahblahjpeg(\xa0\x0f\xff\xd9"
77 | pdfdata = b"%PDF-1.\ntrailer<>]>>>>>>"
78 | bindata = b"\xff\xff\xff\xff"
79 |
80 | fmt = {
81 | "image/png": pngdata,
82 | "image/jpeg": jpegdata,
83 | "application/pdf": pdfdata,
84 | "application/unrecognized": bindata,
85 | }
86 | encoded = json_clean(encode_images(fmt))
87 | for key, value in fmt.items():
88 | # encoded has unicode, want bytes
89 | decoded = a2b_base64(encoded[key])
90 | assert decoded == value
91 | encoded2 = json_clean(encode_images(encoded))
92 | assert encoded == encoded2
93 |
94 | for key, value in fmt.items():
95 | decoded = a2b_base64(encoded[key])
96 | assert decoded == value
97 |
98 |
99 | @pytest.mark.skipif(JUPYTER_CLIENT_MAJOR_VERSION >= 7, reason="json_clean is a no-op")
100 | def test_lambda():
101 | with pytest.raises(ValueError): # noqa: PT011
102 | json_clean(lambda: 1)
103 |
104 |
105 | @pytest.mark.skipif(JUPYTER_CLIENT_MAJOR_VERSION >= 7, reason="json_clean is a no-op")
106 | def test_exception():
107 | bad_dicts = [
108 | {1: "number", "1": "string"},
109 | {True: "bool", "True": "string"},
110 | ]
111 | for d in bad_dicts:
112 | with pytest.raises(ValueError): # noqa: PT011
113 | json_clean(d)
114 |
115 |
116 | @pytest.mark.skipif(JUPYTER_CLIENT_MAJOR_VERSION >= 7, reason="json_clean is a no-op")
117 | def test_unicode_dict():
118 | data = {"üniço∂e": "üniço∂e"}
119 | clean = jsonutil.json_clean(data)
120 | assert data == clean
121 |
--------------------------------------------------------------------------------
/ipykernel/inprocess/blocking.py:
--------------------------------------------------------------------------------
1 | """Implements a fully blocking kernel client.
2 |
3 | Useful for test suites and blocking terminal interfaces.
4 | """
5 |
6 | import sys
7 |
8 | # -----------------------------------------------------------------------------
9 | # Copyright (C) 2012 The IPython Development Team
10 | #
11 | # Distributed under the terms of the BSD License. The full license is in
12 | # the file LICENSE, distributed as part of this software.
13 | # -----------------------------------------------------------------------------
14 | from queue import Empty, Queue
15 |
16 | # IPython imports
17 | from traitlets import Type
18 |
19 | # Local imports
20 | from .channels import InProcessChannel
21 | from .client import InProcessKernelClient
22 |
23 |
24 | class BlockingInProcessChannel(InProcessChannel):
25 | """A blocking in-process channel."""
26 |
27 | def __init__(self, *args, **kwds):
28 | """Initialize the channel."""
29 | super().__init__(*args, **kwds)
30 | self._in_queue: Queue[object] = Queue()
31 |
32 | def call_handlers(self, msg):
33 | """Call the handlers for a message."""
34 | self._in_queue.put(msg)
35 |
36 | def get_msg(self, block=True, timeout=None):
37 | """Gets a message if there is one that is ready."""
38 | if timeout is None:
39 | # Queue.get(timeout=None) has stupid uninteruptible
40 | # behavior, so wait for a week instead
41 | timeout = 604800
42 | return self._in_queue.get(block, timeout)
43 |
44 | def get_msgs(self):
45 | """Get all messages that are currently ready."""
46 | msgs = []
47 | while True:
48 | try:
49 | msgs.append(self.get_msg(block=False))
50 | except Empty:
51 | break
52 | return msgs
53 |
54 | def msg_ready(self):
55 | """Is there a message that has been received?"""
56 | return not self._in_queue.empty()
57 |
58 |
59 | class BlockingInProcessStdInChannel(BlockingInProcessChannel):
60 | """A blocking in-process stdin channel."""
61 |
62 | def call_handlers(self, msg):
63 | """Overridden for the in-process channel.
64 |
65 | This methods simply calls raw_input directly.
66 | """
67 | msg_type = msg["header"]["msg_type"]
68 | if msg_type == "input_request":
69 | _raw_input = self.client.kernel._sys_raw_input
70 | prompt = msg["content"]["prompt"]
71 | print(prompt, end="", file=sys.__stdout__)
72 | assert sys.__stdout__ is not None
73 | sys.__stdout__.flush()
74 | self.client.input(_raw_input())
75 |
76 |
77 | class BlockingInProcessKernelClient(InProcessKernelClient):
78 | """A blocking in-process kernel client."""
79 |
80 | # The classes to use for the various channels.
81 | shell_channel_class = Type(BlockingInProcessChannel)
82 | iopub_channel_class = Type(BlockingInProcessChannel)
83 | stdin_channel_class = Type(BlockingInProcessStdInChannel)
84 |
85 | def wait_for_ready(self):
86 | """Wait for kernel info reply on shell channel."""
87 | while True:
88 | self.kernel_info()
89 | try:
90 | msg = self.shell_channel.get_msg(block=True, timeout=1)
91 | except Empty:
92 | pass
93 | else:
94 | if msg["msg_type"] == "kernel_info_reply":
95 | # Checking that IOPub is connected. If it is not connected, start over.
96 | try:
97 | self.iopub_channel.get_msg(block=True, timeout=0.2)
98 | except Empty:
99 | pass
100 | else:
101 | self._handle_kernel_info_reply(msg)
102 | break
103 |
104 | # Flush IOPub channel
105 | while True:
106 | try:
107 | msg = self.iopub_channel.get_msg(block=True, timeout=0.2)
108 | print(msg["msg_type"])
109 | except Empty:
110 | break
111 |
--------------------------------------------------------------------------------
/tests/test_kernelapp.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import threading
4 | import time
5 | from unittest.mock import patch
6 |
7 | import pytest
8 | from jupyter_core.paths import secure_write
9 | from traitlets.config.loader import Config
10 |
11 | from ipykernel.kernelapp import IPKernelApp
12 |
13 | from .conftest import MockKernel
14 | from .utils import TemporaryWorkingDirectory
15 |
16 | try:
17 | import trio
18 | except ImportError:
19 | trio = None
20 |
21 |
22 | @pytest.mark.skipif(os.name == "nt", reason="requires ipc")
23 | def test_init_ipc_socket():
24 | app = IPKernelApp(transport="ipc")
25 | app.init_sockets()
26 | app.cleanup_connection_file()
27 | app.close()
28 |
29 |
30 | def test_blackhole():
31 | app = IPKernelApp()
32 | app.no_stderr = True
33 | app.no_stdout = True
34 | app.init_blackhole()
35 |
36 |
37 | def test_start_app():
38 | app = IPKernelApp()
39 | app.kernel = MockKernel()
40 |
41 | def trigger_stop():
42 | time.sleep(1)
43 | app.io_loop.add_callback(app.io_loop.stop)
44 |
45 | thread = threading.Thread(target=trigger_stop)
46 | thread.start()
47 | app.init_sockets()
48 | app.start()
49 | app.cleanup_connection_file()
50 | app.kernel.destroy()
51 | app.close()
52 |
53 |
54 | @pytest.mark.skipif(os.name == "nt", reason="permission errors on windows")
55 | def test_merge_connection_file():
56 | cfg = Config()
57 | with TemporaryWorkingDirectory() as d:
58 | cfg.ProfileDir.location = d
59 | cf = os.path.join(d, "kernel.json")
60 | initial_connection_info = {
61 | "ip": "*",
62 | "transport": "tcp",
63 | "shell_port": 0,
64 | "hb_port": 0,
65 | "iopub_port": 0,
66 | "stdin_port": 0,
67 | "control_port": 53555,
68 | "key": "abc123",
69 | "signature_scheme": "hmac-sha256",
70 | "kernel_name": "My Kernel",
71 | }
72 | # We cannot use connect.write_connection_file since
73 | # it replaces port number 0 with a random port
74 | # and we want IPKernelApp to do that replacement.
75 | with secure_write(cf) as f:
76 | json.dump(initial_connection_info, f)
77 | assert os.path.exists(cf)
78 |
79 | app = IPKernelApp(config=cfg, connection_file=cf)
80 |
81 | # Calling app.initialize() does not work in the test, so we call the relevant functions that initialize() calls
82 | # We must pass in an empty argv, otherwise the default is to try to parse the test runner's argv
83 | super(IPKernelApp, app).initialize(argv=[""])
84 | app.init_connection_file()
85 | app.init_sockets()
86 | app.init_heartbeat()
87 | app.write_connection_file()
88 |
89 | # Initialize should have merged the actual connection info
90 | # with the connection info in the file
91 | assert cf == app.abs_connection_file
92 | assert os.path.exists(cf)
93 |
94 | with open(cf) as f:
95 | new_connection_info = json.load(f)
96 |
97 | # ports originally set as 0 have been replaced
98 | for port in ("shell", "hb", "iopub", "stdin"):
99 | key = f"{port}_port"
100 | # We initially had the port as 0
101 | assert initial_connection_info[key] == 0
102 | # the port is not 0 now
103 | assert new_connection_info[key] > 0
104 | # the port matches the port the kernel actually used
105 | assert new_connection_info[key] == getattr(app, key), f"{key}"
106 | del new_connection_info[key]
107 | del initial_connection_info[key]
108 |
109 | # The wildcard ip address was also replaced
110 | assert new_connection_info["ip"] != "*"
111 | del new_connection_info["ip"]
112 | del initial_connection_info["ip"]
113 |
114 | # everything else in the connection file is the same
115 | assert initial_connection_info == new_connection_info
116 |
117 | app.close()
118 | os.remove(cf)
119 |
120 |
121 | @pytest.mark.skipif(trio is None, reason="requires trio")
122 | def test_trio_loop():
123 | app = IPKernelApp(trio_loop=True)
124 | app.kernel = MockKernel()
125 | app.init_sockets()
126 | with patch("ipykernel.trio_runner.TrioRunner.run", lambda _: None):
127 | app.start()
128 | app.cleanup_connection_file()
129 | app.io_loop.add_callback(app.io_loop.stop)
130 | app.kernel.destroy()
131 | app.close()
132 |
--------------------------------------------------------------------------------
/tests/test_eventloop.py:
--------------------------------------------------------------------------------
1 | """Test eventloop integration"""
2 |
3 | import asyncio
4 | import os
5 | import sys
6 | import threading
7 | import time
8 |
9 | import pytest
10 |
11 | from ipykernel.eventloops import (
12 | enable_gui,
13 | loop_asyncio,
14 | loop_cocoa,
15 | loop_tk,
16 | )
17 |
18 | from .utils import flush_channels, start_new_kernel
19 |
20 | KC = KM = None
21 |
22 | qt_guis_avail = []
23 |
24 | gui_to_module = {"qt6": "PySide6", "qt5": "PyQt5"}
25 |
26 |
27 | def _get_qt_vers():
28 | """If any version of Qt is available, this will populate `guis_avail` with 'qt' and 'qtx'. Due
29 | to the import mechanism, we can't import multiple versions of Qt in one session."""
30 | for gui in ["qt6", "qt5"]:
31 | print(f"Trying {gui}")
32 | try:
33 | __import__(gui_to_module[gui])
34 | qt_guis_avail.append(gui)
35 | if "QT_API" in os.environ:
36 | del os.environ["QT_API"]
37 | except ImportError:
38 | pass # that version of Qt isn't available.
39 |
40 |
41 | _get_qt_vers()
42 |
43 |
44 | @pytest.fixture(autouse=True)
45 | def _setup_env():
46 | """start the global kernel (if it isn't running) and return its client"""
47 | global KM, KC
48 | KM, KC = start_new_kernel()
49 | flush_channels(KC)
50 | yield
51 | assert KM is not None
52 | assert KC is not None
53 | KC.stop_channels()
54 | KM.shutdown_kernel(now=True)
55 |
56 |
57 | windows_skip = pytest.mark.skipif(os.name == "nt", reason="causing failures on windows")
58 |
59 | # some part of this module seems to hang when run with xvfb
60 | pytestmark = pytest.mark.skipif(
61 | sys.platform == "linux" and bool(os.getenv("CI")), reason="hangs on linux CI"
62 | )
63 |
64 |
65 | @windows_skip
66 | @pytest.mark.skipif(sys.platform == "darwin", reason="hangs on macos")
67 | def test_tk_loop(kernel):
68 | def do_thing():
69 | time.sleep(1)
70 | try:
71 | kernel.app_wrapper.app.quit()
72 | # guard for tk failing to start (if there is no display)
73 | except AttributeError:
74 | pass
75 |
76 | t = threading.Thread(target=do_thing)
77 | t.start()
78 | # guard for tk failing to start (if there is no display)
79 | try:
80 | loop_tk(kernel)
81 | except Exception:
82 | pass
83 | t.join()
84 |
85 |
86 | @windows_skip
87 | def test_asyncio_loop(kernel):
88 | def do_thing():
89 | loop.call_soon(loop.stop)
90 |
91 | loop = asyncio.get_event_loop()
92 | loop.call_soon(do_thing)
93 | loop_asyncio(kernel)
94 |
95 |
96 | @windows_skip
97 | def test_enable_gui(kernel):
98 | enable_gui("tk", kernel)
99 |
100 |
101 | @pytest.mark.skipif(sys.platform != "darwin", reason="MacOS-only")
102 | def test_cocoa_loop(kernel):
103 | loop_cocoa(kernel)
104 |
105 |
106 | @pytest.mark.parametrize("gui", qt_guis_avail)
107 | def test_qt_enable_gui(gui, kernel, capsys):
108 | if os.getenv("GITHUB_ACTIONS", None) == "true" and gui == "qt5":
109 | pytest.skip("Qt5 and GitHub action crash CPython")
110 | if gui == "qt6" and sys.version_info < (3, 10):
111 | pytest.skip(
112 | "qt6 fails on 3.9 with AttributeError: module 'PySide6.QtPrintSupport' has no attribute 'QApplication'"
113 | )
114 | if sys.platform == "linux" and gui == "qt6" and os.getenv("GITHUB_ACTIONS", None) == "true":
115 | pytest.skip("qt6 fails on github CI with missing libEGL.so.1")
116 | enable_gui(gui, kernel)
117 |
118 | # We store the `QApplication` instance in the kernel.
119 | assert hasattr(kernel, "app")
120 |
121 | # And the `QEventLoop` is added to `app`:`
122 | assert hasattr(kernel.app, "qt_event_loop")
123 |
124 | # Don't create another app even if `gui` is the same.
125 | app = kernel.app
126 | enable_gui(gui, kernel)
127 | assert app == kernel.app
128 |
129 | # Event loop integration can be turned off.
130 | enable_gui(None, kernel)
131 | assert not hasattr(kernel, "app")
132 |
133 | # But now we're stuck with this version of Qt for good; can't switch.
134 | for not_gui in ["qt6", "qt5"]:
135 | if not_gui not in qt_guis_avail:
136 | break
137 |
138 | enable_gui(not_gui, kernel)
139 | captured = capsys.readouterr()
140 | assert captured.out == f"Cannot switch Qt versions for this session; you must use {gui}.\n"
141 |
142 | # Check 'qt' gui, which means "the best available"
143 | enable_gui(None, kernel)
144 | enable_gui("qt", kernel)
145 | assert gui_to_module[gui] in str(kernel.app)
146 |
--------------------------------------------------------------------------------
/ipykernel/displayhook.py:
--------------------------------------------------------------------------------
1 | """Replacements for sys.displayhook that publish over ZMQ."""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 | from __future__ import annotations
6 |
7 | import builtins
8 | import sys
9 | import typing as t
10 | from contextvars import ContextVar
11 |
12 | from IPython.core.displayhook import DisplayHook
13 | from jupyter_client.session import Session, extract_header
14 | from traitlets import Any, Instance
15 |
16 | from ipykernel.jsonutil import encode_images, json_clean
17 |
18 |
19 | class ZMQDisplayHook:
20 | """A simple displayhook that publishes the object's repr over a ZeroMQ
21 | socket."""
22 |
23 | topic = b"execute_result"
24 |
25 | def __init__(self, session, pub_socket):
26 | """Initialize the hook."""
27 | self.session = session
28 | self.pub_socket = pub_socket
29 |
30 | self._parent_header: ContextVar[dict[str, Any]] = ContextVar("parent_header")
31 | self._parent_header.set({})
32 | self._parent_header_global = {}
33 |
34 | def get_execution_count(self):
35 | """This method is replaced in kernelapp"""
36 | return 0
37 |
38 | def __call__(self, obj):
39 | """Handle a hook call."""
40 | if obj is None:
41 | return
42 |
43 | builtins._ = obj # type:ignore[attr-defined]
44 | sys.stdout.flush()
45 | sys.stderr.flush()
46 | contents = {
47 | "execution_count": self.get_execution_count(),
48 | "data": {"text/plain": repr(obj)},
49 | "metadata": {},
50 | }
51 | self.session.send(
52 | self.pub_socket,
53 | "execute_result",
54 | contents,
55 | parent=self.parent_header,
56 | ident=self.topic,
57 | )
58 |
59 | @property
60 | def parent_header(self):
61 | try:
62 | return self._parent_header.get()
63 | except LookupError:
64 | return self._parent_header_global
65 |
66 | def set_parent(self, parent):
67 | """Set the parent header."""
68 | parent_header = extract_header(parent)
69 | self._parent_header.set(parent_header)
70 | self._parent_header_global = parent_header
71 |
72 |
73 | class ZMQShellDisplayHook(DisplayHook):
74 | """A displayhook subclass that publishes data using ZeroMQ. This is intended
75 | to work with an InteractiveShell instance. It sends a dict of different
76 | representations of the object."""
77 |
78 | topic = None
79 |
80 | session = Instance(Session, allow_none=True)
81 | pub_socket = Any(allow_none=True)
82 | _parent_header: ContextVar[dict[str, Any]]
83 | msg: dict[str, t.Any] | None
84 |
85 | def __init__(self, *args, **kwargs):
86 | super().__init__(*args, **kwargs)
87 | self._parent_header = ContextVar("parent_header")
88 | self._parent_header.set({})
89 |
90 | @property
91 | def parent_header(self):
92 | try:
93 | return self._parent_header.get()
94 | except LookupError:
95 | return self._parent_header_global
96 |
97 | def set_parent(self, parent):
98 | """Set the parent header."""
99 | parent_header = extract_header(parent)
100 | self._parent_header.set(parent_header)
101 | self._parent_header_global = parent_header
102 |
103 | def start_displayhook(self):
104 | """Start the display hook."""
105 | if self.session:
106 | self.msg = self.session.msg(
107 | "execute_result",
108 | {
109 | "data": {},
110 | "metadata": {},
111 | },
112 | parent=self.parent_header,
113 | )
114 |
115 | def write_output_prompt(self):
116 | """Write the output prompt."""
117 | if self.msg:
118 | self.msg["content"]["execution_count"] = self.prompt_count
119 |
120 | def write_format_data(self, format_dict, md_dict=None):
121 | """Write format data to the message."""
122 | if self.msg:
123 | self.msg["content"]["data"] = json_clean(encode_images(format_dict))
124 | self.msg["content"]["metadata"] = md_dict
125 |
126 | def finish_displayhook(self):
127 | """Finish up all displayhook activities."""
128 | sys.stdout.flush()
129 | sys.stderr.flush()
130 | if self.msg and self.msg["content"]["data"] and self.session:
131 | self.session.send(self.pub_socket, self.msg, ident=self.topic)
132 | self.msg = None
133 |
--------------------------------------------------------------------------------
/ipykernel/connect.py:
--------------------------------------------------------------------------------
1 | """Connection file-related utilities for the kernel"""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 | from __future__ import annotations
6 |
7 | import json
8 | import sys
9 | from subprocess import PIPE, Popen
10 | from typing import TYPE_CHECKING, Any
11 |
12 | import jupyter_client
13 | from jupyter_client import write_connection_file
14 |
15 | if TYPE_CHECKING:
16 | from ipykernel.kernelapp import IPKernelApp
17 |
18 |
19 | def get_connection_file(app: IPKernelApp | None = None) -> str:
20 | """Return the path to the connection file of an app
21 |
22 | Parameters
23 | ----------
24 | app : IPKernelApp instance [optional]
25 | If unspecified, the currently running app will be used
26 | """
27 | from traitlets.utils import filefind
28 |
29 | if app is None:
30 | from ipykernel.kernelapp import IPKernelApp
31 |
32 | if not IPKernelApp.initialized():
33 | msg = "app not specified, and not in a running Kernel"
34 | raise RuntimeError(msg)
35 |
36 | app = IPKernelApp.instance()
37 | return filefind(app.connection_file, [".", app.connection_dir])
38 |
39 |
40 | def _find_connection_file(connection_file):
41 | """Return the absolute path for a connection file
42 |
43 | - If nothing specified, return current Kernel's connection file
44 | - Otherwise, call jupyter_client.find_connection_file
45 | """
46 | if connection_file is None:
47 | # get connection file from current kernel
48 | return get_connection_file()
49 | return jupyter_client.find_connection_file(connection_file)
50 |
51 |
52 | def get_connection_info(
53 | connection_file: str | None = None, unpack: bool = False
54 | ) -> str | dict[str, Any]:
55 | """Return the connection information for the current Kernel.
56 |
57 | Parameters
58 | ----------
59 | connection_file : str [optional]
60 | The connection file to be used. Can be given by absolute path, or
61 | IPython will search in the security directory.
62 | If run from IPython,
63 |
64 | If unspecified, the connection file for the currently running
65 | IPython Kernel will be used, which is only allowed from inside a kernel.
66 |
67 | unpack : bool [default: False]
68 | if True, return the unpacked dict, otherwise just the string contents
69 | of the file.
70 |
71 | Returns
72 | -------
73 | The connection dictionary of the current kernel, as string or dict,
74 | depending on `unpack`.
75 | """
76 | cf = _find_connection_file(connection_file)
77 |
78 | with open(cf) as f:
79 | info_str = f.read()
80 |
81 | if unpack:
82 | info = json.loads(info_str)
83 | # ensure key is bytes:
84 | info["key"] = info.get("key", "").encode()
85 | return info # type:ignore[no-any-return]
86 |
87 | return info_str
88 |
89 |
90 | def connect_qtconsole(
91 | connection_file: str | None = None, argv: list[str] | None = None
92 | ) -> Popen[Any]:
93 | """Connect a qtconsole to the current kernel.
94 |
95 | This is useful for connecting a second qtconsole to a kernel, or to a
96 | local notebook.
97 |
98 | Parameters
99 | ----------
100 | connection_file : str [optional]
101 | The connection file to be used. Can be given by absolute path, or
102 | IPython will search in the security directory.
103 | If run from IPython,
104 |
105 | If unspecified, the connection file for the currently running
106 | IPython Kernel will be used, which is only allowed from inside a kernel.
107 |
108 | argv : list [optional]
109 | Any extra args to be passed to the console.
110 |
111 | Returns
112 | -------
113 | :class:`subprocess.Popen` instance running the qtconsole frontend
114 | """
115 | argv = [] if argv is None else argv
116 |
117 | cf = _find_connection_file(connection_file)
118 |
119 | cmd = ";".join(["from qtconsole import qtconsoleapp", "qtconsoleapp.main()"])
120 |
121 | kwargs: dict[str, Any] = {}
122 | # Launch the Qt console in a separate session & process group, so
123 | # interrupting the kernel doesn't kill it.
124 | kwargs["start_new_session"] = True
125 |
126 | return Popen(
127 | [sys.executable, "-c", cmd, "--existing", cf, *argv],
128 | stdout=PIPE,
129 | stderr=PIPE,
130 | close_fds=(sys.platform != "win32"),
131 | **kwargs,
132 | )
133 |
134 |
135 | __all__ = [
136 | "connect_qtconsole",
137 | "get_connection_file",
138 | "get_connection_info",
139 | "write_connection_file",
140 | ]
141 |
--------------------------------------------------------------------------------
/examples/embedding/ipkernel_wxapp.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Example integrating an IPython kernel into a GUI App.
3 |
4 | This trivial GUI application internally starts an IPython kernel, to which Qt
5 | consoles can be connected either by the user at the command line or started
6 | from the GUI itself, via a button. The GUI can also manipulate one variable in
7 | the kernel's namespace, and print the namespace to the console.
8 |
9 | Play with it by running the script and then opening one or more consoles, and
10 | pushing the 'Counter++' and 'Namespace' buttons.
11 |
12 | Upon exit, it should automatically close all consoles opened from the GUI.
13 |
14 | Consoles attached separately from a terminal will not be terminated, though
15 | they will notice that their kernel died.
16 |
17 | Ref: Modified from wxPython source code wxPython/samples/simple/simple.py
18 | """
19 |
20 | # -----------------------------------------------------------------------------
21 | # Imports
22 | # -----------------------------------------------------------------------------
23 | import sys
24 |
25 | import wx
26 | from internal_ipkernel import InternalIPKernel
27 |
28 | # -----------------------------------------------------------------------------
29 | # Functions and classes
30 | # -----------------------------------------------------------------------------
31 |
32 |
33 | class MyFrame(wx.Frame, InternalIPKernel):
34 | """
35 | This is MyFrame. It just shows a few controls on a wxPanel,
36 | and has a simple menu.
37 | """
38 |
39 | def __init__(self, parent, title):
40 | """Initialize the frame."""
41 | wx.Frame.__init__(self, parent, -1, title, pos=(150, 150), size=(350, 285))
42 |
43 | # Create the menubar
44 | menuBar = wx.MenuBar()
45 |
46 | # and a menu
47 | menu = wx.Menu()
48 |
49 | # add an item to the menu, using \tKeyName automatically
50 | # creates an accelerator, the third param is some help text
51 | # that will show up in the statusbar
52 | menu.Append(wx.ID_EXIT, "E&xit\tAlt-X", "Exit this simple sample")
53 |
54 | # bind the menu event to an event handler
55 | self.Bind(wx.EVT_MENU, self.OnTimeToClose, id=wx.ID_EXIT)
56 |
57 | # and put the menu on the menubar
58 | menuBar.Append(menu, "&File")
59 | self.SetMenuBar(menuBar)
60 |
61 | self.CreateStatusBar()
62 |
63 | # Now create the Panel to put the other controls on.
64 | panel = wx.Panel(self)
65 |
66 | # and a few controls
67 | text = wx.StaticText(panel, -1, "Hello World!")
68 | text.SetFont(wx.Font(14, wx.SWISS, wx.NORMAL, wx.BOLD))
69 | text.SetSize(text.GetBestSize())
70 | qtconsole_btn = wx.Button(panel, -1, "Qt Console")
71 | ns_btn = wx.Button(panel, -1, "Namespace")
72 | count_btn = wx.Button(panel, -1, "Count++")
73 | close_btn = wx.Button(panel, -1, "Quit")
74 |
75 | # bind the button events to handlers
76 | self.Bind(wx.EVT_BUTTON, self.new_qt_console, qtconsole_btn)
77 | self.Bind(wx.EVT_BUTTON, self.print_namespace, ns_btn)
78 | self.Bind(wx.EVT_BUTTON, self.count, count_btn)
79 | self.Bind(wx.EVT_BUTTON, self.OnTimeToClose, close_btn)
80 |
81 | # Use a sizer to layout the controls, stacked vertically and with
82 | # a 10 pixel border around each
83 | sizer = wx.BoxSizer(wx.VERTICAL)
84 | for ctrl in [text, qtconsole_btn, ns_btn, count_btn, close_btn]:
85 | sizer.Add(ctrl, 0, wx.ALL, 10)
86 | panel.SetSizer(sizer)
87 | panel.Layout()
88 |
89 | # Start the IPython kernel with gui support
90 | self.init_ipkernel("wx")
91 |
92 | def OnTimeToClose(self, evt):
93 | """Event handler for the button click."""
94 | print("See ya later!")
95 | sys.stdout.flush()
96 | self.cleanup_consoles(evt)
97 | self.Close()
98 | # Not sure why, but our IPython kernel seems to prevent normal WX
99 | # shutdown, so an explicit exit() call is needed.
100 | sys.exit()
101 |
102 |
103 | class MyApp(wx.App):
104 | """A custom wx app."""
105 |
106 | def OnInit(self):
107 | """Initialize app."""
108 | frame = MyFrame(None, "Simple wxPython App")
109 | self.SetTopWindow(frame)
110 | frame.Show(True)
111 | self.ipkernel = frame.ipkernel
112 | return True
113 |
114 |
115 | # -----------------------------------------------------------------------------
116 | # Main script
117 | # -----------------------------------------------------------------------------
118 |
119 | if __name__ == "__main__":
120 | app = MyApp(redirect=False, clearSigInt=False)
121 |
122 | # Very important, IPython-specific step: this gets GUI event loop
123 | # integration going, and it replaces calling app.MainLoop()
124 | app.ipkernel.start()
125 |
--------------------------------------------------------------------------------
/ipykernel/heartbeat.py:
--------------------------------------------------------------------------------
1 | """The client and server for a basic ping-pong style heartbeat."""
2 |
3 | # -----------------------------------------------------------------------------
4 | # Copyright (C) 2008-2011 The IPython Development Team
5 | #
6 | # Distributed under the terms of the BSD License. The full license is in
7 | # the file LICENSE, distributed as part of this software.
8 | # -----------------------------------------------------------------------------
9 |
10 | # -----------------------------------------------------------------------------
11 | # Imports
12 | # -----------------------------------------------------------------------------
13 |
14 | import errno
15 | import socket
16 | from pathlib import Path
17 | from threading import Thread
18 |
19 | import zmq
20 | from jupyter_client.localinterfaces import localhost
21 |
22 | # -----------------------------------------------------------------------------
23 | # Code
24 | # -----------------------------------------------------------------------------
25 |
26 |
27 | class Heartbeat(Thread):
28 | """A simple ping-pong style heartbeat that runs in a thread."""
29 |
30 | def __init__(self, context, addr=None):
31 | """Initialize the heartbeat thread."""
32 | if addr is None:
33 | addr = ("tcp", localhost(), 0)
34 | Thread.__init__(self, name="Heartbeat")
35 | self.context = context
36 | self.transport, self.ip, self.port = addr
37 | self.original_port = self.port
38 | if self.original_port == 0:
39 | self.pick_port()
40 | self.addr = (self.ip, self.port)
41 | self.daemon = True
42 | self.pydev_do_not_trace = True
43 | self.is_pydev_daemon_thread = True
44 | self.name = "Heartbeat"
45 |
46 | def pick_port(self):
47 | """Pick a port for the heartbeat."""
48 | if self.transport == "tcp":
49 | s = socket.socket()
50 | # '*' means all interfaces to 0MQ, which is '' to socket.socket
51 | s.bind(("" if self.ip == "*" else self.ip, 0))
52 | self.port = s.getsockname()[1]
53 | s.close()
54 | elif self.transport == "ipc":
55 | self.port = 1
56 | while Path(f"{self.ip}-{self.port}").exists():
57 | self.port = self.port + 1
58 | else:
59 | raise ValueError("Unrecognized zmq transport: %s" % self.transport)
60 | return self.port
61 |
62 | def _try_bind_socket(self):
63 | c = ":" if self.transport == "tcp" else "-"
64 | return self.socket.bind(f"{self.transport}://{self.ip}" + c + str(self.port))
65 |
66 | def _bind_socket(self):
67 | try:
68 | win_in_use = errno.WSAEADDRINUSE # type:ignore[attr-defined]
69 | except AttributeError:
70 | win_in_use = None
71 |
72 | # Try up to 100 times to bind a port when in conflict to avoid
73 | # infinite attempts in bad setups
74 | max_attempts = 1 if self.original_port else 100
75 | for attempt in range(max_attempts):
76 | try:
77 | self._try_bind_socket()
78 | except zmq.ZMQError as ze:
79 | if attempt == max_attempts - 1:
80 | raise
81 | # Raise if we have any error not related to socket binding
82 | if ze.errno != errno.EADDRINUSE and ze.errno != win_in_use:
83 | raise
84 | # Raise if we have any error not related to socket binding
85 | if self.original_port == 0:
86 | self.pick_port()
87 | else:
88 | raise
89 | else:
90 | return
91 |
92 | def run(self):
93 | """Run the heartbeat thread."""
94 | self.name = "Heartbeat"
95 | self.socket = self.context.socket(zmq.ROUTER)
96 | self.socket.linger = 1000
97 | try:
98 | self._bind_socket()
99 | except Exception:
100 | self.socket.close()
101 | raise
102 |
103 | while True:
104 | try:
105 | zmq.device(zmq.QUEUE, self.socket, self.socket) # type:ignore[attr-defined]
106 | except zmq.ZMQError as e:
107 | if e.errno == errno.EINTR:
108 | # signal interrupt, resume heartbeat
109 | continue
110 | if e.errno == zmq.ETERM:
111 | # context terminated, close socket and exit
112 | try:
113 | self.socket.close()
114 | except zmq.ZMQError:
115 | # suppress further errors during cleanup
116 | # this shouldn't happen, though
117 | pass
118 | break
119 | if e.errno == zmq.ENOTSOCK:
120 | # socket closed elsewhere, exit
121 | break
122 | raise
123 | else:
124 | break
125 |
--------------------------------------------------------------------------------
/tests/test_connect.py:
--------------------------------------------------------------------------------
1 | """Tests for kernel connection utilities"""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 | import errno
6 | import json
7 | import os
8 | from tempfile import TemporaryDirectory
9 | from typing import no_type_check
10 | from unittest.mock import patch
11 |
12 | import pytest
13 | import zmq
14 | from traitlets.config.loader import Config
15 |
16 | from ipykernel import connect
17 | from ipykernel.kernelapp import IPKernelApp
18 |
19 | from .utils import TemporaryWorkingDirectory
20 |
21 |
22 | @pytest.fixture(scope="module", autouse=True)
23 | def _enable_tracemalloc():
24 | try:
25 | import tracemalloc
26 | except ModuleNotFoundError:
27 | # pypy
28 | tracemalloc = None
29 | if tracemalloc is not None:
30 | tracemalloc.start()
31 | yield
32 | if tracemalloc is not None:
33 | tracemalloc.stop()
34 |
35 |
36 | sample_info: dict = {
37 | "ip": "1.2.3.4",
38 | "transport": "ipc",
39 | "shell_port": 1,
40 | "hb_port": 2,
41 | "iopub_port": 3,
42 | "stdin_port": 4,
43 | "control_port": 5,
44 | "key": b"abc123",
45 | "signature_scheme": "hmac-md5",
46 | }
47 |
48 |
49 | class DummyKernelApp(IPKernelApp):
50 | def _default_shell_port(self):
51 | return 0
52 |
53 | def initialize(self, argv=None):
54 | self.init_profile_dir()
55 | self.init_connection_file()
56 |
57 |
58 | def test_get_connection_file():
59 | cfg = Config()
60 | with TemporaryWorkingDirectory() as d:
61 | cfg.ProfileDir.location = d
62 | cf = "kernel.json"
63 | app = DummyKernelApp(config=cfg, connection_file=cf)
64 | app.initialize()
65 |
66 | profile_cf = os.path.join(app.connection_dir, cf)
67 | assert profile_cf == app.abs_connection_file
68 | with open(profile_cf, "w") as f:
69 | f.write("{}")
70 | assert os.path.exists(profile_cf)
71 | assert connect.get_connection_file(app) == profile_cf
72 |
73 | app.connection_file = cf
74 | assert connect.get_connection_file(app) == profile_cf
75 |
76 |
77 | def test_get_connection_info():
78 | with TemporaryDirectory() as d:
79 | cf = os.path.join(d, "kernel.json")
80 | connect.write_connection_file(cf, **sample_info)
81 | json_info = connect.get_connection_info(cf)
82 | info = connect.get_connection_info(cf, unpack=True)
83 | assert isinstance(json_info, str)
84 |
85 | sub_info = {k: v for k, v in info.items() if k in sample_info}
86 | assert sub_info == sample_info
87 |
88 | info2 = json.loads(json_info)
89 | info2["key"] = info2["key"].encode("utf-8")
90 | sub_info2 = {k: v for k, v in info.items() if k in sample_info}
91 | assert sub_info2 == sample_info
92 |
93 |
94 | def test_port_bind_failure_raises(request):
95 | cfg = Config()
96 | with TemporaryWorkingDirectory() as d:
97 | cfg.ProfileDir.location = d
98 | cf = "kernel.json"
99 | app = DummyKernelApp(config=cfg, connection_file=cf)
100 | request.addfinalizer(app.close)
101 | app.initialize()
102 | with patch.object(app, "_try_bind_socket") as mock_try_bind:
103 | mock_try_bind.side_effect = zmq.ZMQError(-100, "fails for unknown error types")
104 | with pytest.raises(zmq.ZMQError):
105 | app.init_sockets()
106 | assert mock_try_bind.call_count == 1
107 |
108 |
109 | @no_type_check
110 | def test_port_bind_failure_recovery(request):
111 | try:
112 | errno.WSAEADDRINUSE
113 | except AttributeError:
114 | # Fake windows address in-use code
115 | p = patch.object(errno, "WSAEADDRINUSE", 12345, create=True)
116 | p.start()
117 | request.addfinalizer(p.stop)
118 |
119 | cfg = Config()
120 | with TemporaryWorkingDirectory() as d:
121 | cfg.ProfileDir.location = d
122 | cf = "kernel.json"
123 | app = DummyKernelApp(config=cfg, connection_file=cf)
124 | request.addfinalizer(app.close)
125 | app.initialize()
126 | with patch.object(app, "_try_bind_socket") as mock_try_bind:
127 | mock_try_bind.side_effect = [
128 | zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind unix"),
129 | zmq.ZMQError(errno.WSAEADDRINUSE, "fails for non-bind windows"),
130 | ] + [0] * 100
131 | # Shouldn't raise anything as retries will kick in
132 | app.init_sockets()
133 |
134 |
135 | def test_port_bind_failure_gives_up_retries(request):
136 | cfg = Config()
137 | with TemporaryWorkingDirectory() as d:
138 | cfg.ProfileDir.location = d
139 | cf = "kernel.json"
140 | app = DummyKernelApp(config=cfg, connection_file=cf)
141 | request.addfinalizer(app.close)
142 | app.initialize()
143 | with patch.object(app, "_try_bind_socket") as mock_try_bind:
144 | mock_try_bind.side_effect = zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind")
145 | with pytest.raises(zmq.ZMQError):
146 | app.init_sockets()
147 | assert mock_try_bind.call_count == 100
148 |
--------------------------------------------------------------------------------
/ipykernel/_eventloop_macos.py:
--------------------------------------------------------------------------------
1 | """Eventloop hook for OS X
2 |
3 | Calls NSApp / CoreFoundation APIs via ctypes.
4 | """
5 |
6 | # cribbed heavily from IPython.terminal.pt_inputhooks.osx
7 | # obj-c boilerplate from appnope, used under BSD 2-clause
8 |
9 | import ctypes
10 | import ctypes.util
11 | from threading import Event
12 |
13 | objc = ctypes.cdll.LoadLibrary(ctypes.util.find_library("objc")) # type:ignore[arg-type]
14 |
15 | void_p = ctypes.c_void_p
16 |
17 | objc.objc_getClass.restype = void_p
18 | objc.sel_registerName.restype = void_p
19 | objc.objc_msgSend.restype = void_p
20 |
21 | msg = objc.objc_msgSend
22 |
23 |
24 | def _utf8(s):
25 | """ensure utf8 bytes"""
26 | if not isinstance(s, bytes):
27 | s = s.encode("utf8")
28 | return s
29 |
30 |
31 | def n(name):
32 | """create a selector name (for ObjC methods)"""
33 | return objc.sel_registerName(_utf8(name))
34 |
35 |
36 | def C(classname):
37 | """get an ObjC Class by name"""
38 | return objc.objc_getClass(_utf8(classname))
39 |
40 |
41 | # end obj-c boilerplate from appnope
42 |
43 | # CoreFoundation C-API calls we will use:
44 | CoreFoundation = ctypes.cdll.LoadLibrary(
45 | ctypes.util.find_library("CoreFoundation") # type:ignore[arg-type]
46 | )
47 |
48 | CFAbsoluteTimeGetCurrent = CoreFoundation.CFAbsoluteTimeGetCurrent
49 | CFAbsoluteTimeGetCurrent.restype = ctypes.c_double
50 |
51 | CFRunLoopGetCurrent = CoreFoundation.CFRunLoopGetCurrent
52 | CFRunLoopGetCurrent.restype = void_p
53 |
54 | CFRunLoopGetMain = CoreFoundation.CFRunLoopGetMain
55 | CFRunLoopGetMain.restype = void_p
56 |
57 | CFRunLoopStop = CoreFoundation.CFRunLoopStop
58 | CFRunLoopStop.restype = None
59 | CFRunLoopStop.argtypes = [void_p]
60 |
61 | CFRunLoopTimerCreate = CoreFoundation.CFRunLoopTimerCreate
62 | CFRunLoopTimerCreate.restype = void_p
63 | CFRunLoopTimerCreate.argtypes = [
64 | void_p, # allocator (NULL)
65 | ctypes.c_double, # fireDate
66 | ctypes.c_double, # interval
67 | ctypes.c_int, # flags (0)
68 | ctypes.c_int, # order (0)
69 | void_p, # callout
70 | void_p, # context
71 | ]
72 |
73 | CFRunLoopAddTimer = CoreFoundation.CFRunLoopAddTimer
74 | CFRunLoopAddTimer.restype = None
75 | CFRunLoopAddTimer.argtypes = [void_p, void_p, void_p]
76 |
77 | kCFRunLoopCommonModes = void_p.in_dll(CoreFoundation, "kCFRunLoopCommonModes")
78 |
79 |
80 | def _NSApp():
81 | """Return the global NSApplication instance (NSApp)"""
82 | objc.objc_msgSend.argtypes = [void_p, void_p]
83 | return msg(C("NSApplication"), n("sharedApplication"))
84 |
85 |
86 | def _wake(NSApp):
87 | """Wake the Application"""
88 | objc.objc_msgSend.argtypes = [
89 | void_p,
90 | void_p,
91 | void_p,
92 | void_p,
93 | void_p,
94 | void_p,
95 | void_p,
96 | void_p,
97 | void_p,
98 | void_p,
99 | void_p,
100 | ]
101 | event = msg(
102 | C("NSEvent"),
103 | n(
104 | "otherEventWithType:location:modifierFlags:"
105 | "timestamp:windowNumber:context:subtype:data1:data2:"
106 | ),
107 | 15, # Type
108 | 0, # location
109 | 0, # flags
110 | 0, # timestamp
111 | 0, # window
112 | None, # context
113 | 0, # subtype
114 | 0, # data1
115 | 0, # data2
116 | )
117 | objc.objc_msgSend.argtypes = [void_p, void_p, void_p, void_p]
118 | msg(NSApp, n("postEvent:atStart:"), void_p(event), True)
119 |
120 |
121 | _triggered = Event()
122 |
123 |
124 | def stop(timer=None, loop=None):
125 | """Callback to fire when there's input to be read"""
126 | _triggered.set()
127 | NSApp = _NSApp()
128 | # if NSApp is not running, stop CFRunLoop directly,
129 | # otherwise stop and wake NSApp
130 | objc.objc_msgSend.argtypes = [void_p, void_p]
131 | if msg(NSApp, n("isRunning")):
132 | objc.objc_msgSend.argtypes = [void_p, void_p, void_p]
133 | msg(NSApp, n("stop:"), NSApp)
134 | _wake(NSApp)
135 | else:
136 | CFRunLoopStop(CFRunLoopGetCurrent())
137 |
138 |
139 | _c_callback_func_type = ctypes.CFUNCTYPE(None, void_p, void_p)
140 | _c_stop_callback = _c_callback_func_type(stop)
141 |
142 |
143 | def _stop_after(delay):
144 | """Register callback to stop eventloop after a delay"""
145 | timer = CFRunLoopTimerCreate(
146 | None, # allocator
147 | CFAbsoluteTimeGetCurrent() + delay, # fireDate
148 | 0, # interval
149 | 0, # flags
150 | 0, # order
151 | _c_stop_callback,
152 | None,
153 | )
154 | CFRunLoopAddTimer(
155 | CFRunLoopGetMain(),
156 | timer,
157 | kCFRunLoopCommonModes,
158 | )
159 |
160 |
161 | def mainloop(duration=1):
162 | """run the Cocoa eventloop for the specified duration (seconds)"""
163 |
164 | _triggered.clear()
165 | NSApp = _NSApp()
166 | _stop_after(duration)
167 | objc.objc_msgSend.argtypes = [void_p, void_p]
168 | msg(NSApp, n("run"))
169 | if not _triggered.is_set():
170 | # app closed without firing callback,
171 | # probably due to last window being closed.
172 | # Run the loop manually in this case,
173 | # since there may be events still to process (ipython/ipython#9734)
174 | CoreFoundation.CFRunLoopRun()
175 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import logging
3 | import os
4 | from typing import no_type_check
5 | from unittest.mock import MagicMock
6 |
7 | import pytest
8 | import pytest_asyncio
9 | import zmq
10 | from jupyter_client.session import Session
11 | from tornado.ioloop import IOLoop
12 | from zmq.eventloop.zmqstream import ZMQStream
13 |
14 | from ipykernel.ipkernel import IPythonKernel
15 | from ipykernel.kernelbase import Kernel
16 | from ipykernel.zmqshell import ZMQInteractiveShell
17 |
18 | try:
19 | import resource
20 | except ImportError:
21 | # Windows
22 | resource = None # type:ignore
23 |
24 | from .utils import new_kernel
25 |
26 | # Handle resource limit
27 | # Ensure a minimal soft limit of DEFAULT_SOFT if the current hard limit is at least that much.
28 | if resource is not None:
29 | soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
30 |
31 | DEFAULT_SOFT = 4096
32 | if hard >= DEFAULT_SOFT:
33 | soft = DEFAULT_SOFT
34 |
35 | if hard < soft:
36 | hard = soft
37 |
38 | resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard))
39 |
40 |
41 | # Enforce selector event loop on Windows.
42 | if os.name == "nt":
43 | asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) # type:ignore
44 |
45 |
46 | class KernelMixin:
47 | log = logging.getLogger()
48 |
49 | def _initialize(self):
50 | self.context = context = zmq.Context()
51 | self.iopub_socket = context.socket(zmq.PUB)
52 | self.stdin_socket = context.socket(zmq.ROUTER)
53 | self.session = Session()
54 | self.test_sockets = [self.iopub_socket]
55 | self.test_streams = []
56 |
57 | for name in ["shell", "control"]:
58 | socket = context.socket(zmq.ROUTER)
59 | stream = ZMQStream(socket)
60 | stream.on_send(self._on_send)
61 | self.test_sockets.append(socket)
62 | self.test_streams.append(stream)
63 | setattr(self, f"{name}_stream", stream)
64 |
65 | async def do_debug_request(self, msg):
66 | return {}
67 |
68 | def destroy(self):
69 | for stream in self.test_streams:
70 | stream.close()
71 | for socket in self.test_sockets:
72 | socket.close()
73 | self.context.destroy()
74 |
75 | @no_type_check
76 | async def test_shell_message(self, *args, **kwargs):
77 | msg_list = self._prep_msg(*args, **kwargs)
78 | await self.dispatch_shell(msg_list)
79 | self.shell_stream.flush()
80 | return await self._wait_for_msg()
81 |
82 | @no_type_check
83 | async def test_control_message(self, *args, **kwargs):
84 | msg_list = self._prep_msg(*args, **kwargs)
85 | await self.process_control(msg_list)
86 | self.control_stream.flush()
87 | return await self._wait_for_msg()
88 |
89 | def _on_send(self, msg, *args, **kwargs):
90 | self._reply = msg
91 |
92 | def _prep_msg(self, *args, **kwargs):
93 | self._reply = None
94 | raw_msg = self.session.msg(*args, **kwargs)
95 | msg = self.session.serialize(raw_msg)
96 | return [zmq.Message(m) for m in msg]
97 |
98 | async def _wait_for_msg(self):
99 | while not self._reply:
100 | await asyncio.sleep(0.1)
101 | _, msg = self.session.feed_identities(self._reply)
102 | return self.session.deserialize(msg)
103 |
104 | def _send_interrupt_children(self):
105 | # override to prevent deadlock
106 | pass
107 |
108 |
109 | class MockKernel(KernelMixin, Kernel): # type:ignore
110 | implementation = "test"
111 | implementation_version = "1.0"
112 | language = "no-op"
113 | language_version = "0.1"
114 | language_info = {
115 | "name": "test",
116 | "mimetype": "text/plain",
117 | "file_extension": ".txt",
118 | }
119 | banner = "test kernel"
120 |
121 | def __init__(self, *args, **kwargs):
122 | self._initialize()
123 | self.shell = MagicMock()
124 | super().__init__(*args, **kwargs)
125 |
126 | async def do_execute(
127 | self, code, silent, store_history=True, user_expressions=None, allow_stdin=False
128 | ):
129 | if not silent:
130 | stream_content = {"name": "stdout", "text": code}
131 | self.send_response(self.iopub_socket, "stream", stream_content)
132 |
133 | return {
134 | "status": "ok",
135 | # The base class increments the execution count
136 | "execution_count": self.execution_count,
137 | "payload": [],
138 | "user_expressions": {},
139 | }
140 |
141 |
142 | class MockIPyKernel(KernelMixin, IPythonKernel): # type:ignore
143 | def __init__(self, *args, **kwargs):
144 | self._initialize()
145 | super().__init__(*args, **kwargs)
146 |
147 |
148 | @pytest_asyncio.fixture()
149 | def kernel():
150 | kernel = MockKernel()
151 | kernel.io_loop = IOLoop.current()
152 | yield kernel
153 | kernel.destroy()
154 |
155 |
156 | @pytest_asyncio.fixture()
157 | def ipkernel():
158 | kernel = MockIPyKernel()
159 | kernel.io_loop = IOLoop.current()
160 | yield kernel
161 | kernel.destroy()
162 | ZMQInteractiveShell.clear_instance()
163 |
164 |
165 | @pytest.fixture
166 | def kc():
167 | with new_kernel() as kc:
168 | yield kc
169 |
--------------------------------------------------------------------------------
/.github/workflows/downstream.yml:
--------------------------------------------------------------------------------
1 | name: Test downstream projects
2 |
3 | on:
4 | push:
5 | branches: ["main"]
6 | pull_request:
7 |
8 | concurrency:
9 | group: downstream-${{ github.ref }}
10 | cancel-in-progress: true
11 |
12 | jobs:
13 | nbclient:
14 | runs-on: ubuntu-latest
15 | steps:
16 | - name: Checkout
17 | uses: actions/checkout@v6
18 |
19 | - name: Base Setup
20 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
21 |
22 | - name: Run Test
23 | uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1
24 | with:
25 | package_name: nbclient
26 | env_values: IPYKERNEL_CELL_NAME=\
27 |
28 | ipywidgets:
29 | runs-on: ubuntu-latest
30 | steps:
31 | - name: Checkout
32 | uses: actions/checkout@v6
33 |
34 | - name: Base Setup
35 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
36 |
37 | - name: Run Test
38 | uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1
39 | with:
40 | package_name: ipywidgets
41 | test_command: pytest -vv -raXxs -W default --durations 10 --color=yes
42 |
43 | jupyter_client:
44 | runs-on: ubuntu-latest
45 | steps:
46 | - name: Checkout
47 | uses: actions/checkout@v6
48 |
49 | - name: Base Setup
50 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
51 |
52 | - name: Run Test
53 | uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1
54 | with:
55 | package_name: jupyter_client
56 | test_command: "pytest -vv -raXxs -W default --durations 10 --color=yes -k 'not (test_input_request or signal_kernel_subprocess)'"
57 |
58 | ipyparallel:
59 | runs-on: ubuntu-latest
60 | timeout-minutes: 20
61 | steps:
62 | - name: Checkout
63 | uses: actions/checkout@v6
64 |
65 | - name: Base Setup
66 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
67 |
68 | - name: Run Test
69 | uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1
70 | with:
71 | package_name: ipyparallel
72 | package_spec: '-e ".[test]"'
73 |
74 | jupyter_kernel_test:
75 | runs-on: ubuntu-latest
76 | steps:
77 | - name: Checkout
78 | uses: actions/checkout@v6
79 |
80 | - name: Base Setup
81 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
82 |
83 | - name: Run Test
84 | run: |
85 | git clone https://github.com/jupyter/jupyter_kernel_test.git
86 | cd jupyter_kernel_test
87 | pip install -e ".[test]"
88 | python test_ipykernel.py
89 |
90 | qtconsole:
91 | runs-on: ubuntu-latest
92 | timeout-minutes: 20
93 | steps:
94 | - name: Checkout
95 | uses: actions/checkout@v6
96 | - name: Setup Python
97 | uses: actions/setup-python@v6
98 | with:
99 | python-version: "3.10"
100 | architecture: "x64"
101 | - name: Install System Packages
102 | run: |
103 | sudo apt-get update
104 | sudo apt-get install -y --no-install-recommends '^libxcb.*-dev' libx11-xcb-dev libglu1-mesa-dev libxrender-dev libxi-dev libxkbcommon-dev libxkbcommon-x11-dev
105 | - name: Install qtconsole dependencies
106 | shell: bash -l {0}
107 | run: |
108 | cd ${GITHUB_WORKSPACE}/..
109 | git clone https://github.com/spyder-ide/qtconsole.git
110 | cd qtconsole
111 | ${pythonLocation}/bin/python -m pip install -e ".[test]"
112 | ${pythonLocation}/bin/python -m pip install pyqt5
113 | - name: Install Ipykernel changes
114 | shell: bash -l {0}
115 | run: ${pythonLocation}/bin/python -m pip install -e .
116 | - name: Test qtconsole
117 | shell: bash -l {0}
118 | run: |
119 | cd ${GITHUB_WORKSPACE}/../qtconsole
120 | xvfb-run --auto-servernum ${pythonLocation}/bin/python -m pytest -x -vv -s --full-trace --color=yes qtconsole -k "not test_scroll"
121 |
122 | spyder_kernels:
123 | runs-on: ubuntu-latest
124 | if: false
125 | timeout-minutes: 20
126 | steps:
127 | - name: Checkout
128 | uses: actions/checkout@v6
129 | - name: Setup Python
130 | uses: actions/setup-python@v6
131 | with:
132 | python-version: "3.10"
133 | architecture: "x64"
134 | - name: Install System Packages
135 | run: |
136 | sudo apt-get update
137 | sudo apt-get install -y --no-install-recommends libgl1 libglx-mesa0
138 | - name: Install spyder-kernels dependencies
139 | shell: bash -l {0}
140 | run: |
141 | cd ${GITHUB_WORKSPACE}/..
142 | git clone https://github.com/spyder-ide/spyder-kernels.git
143 | cd spyder-kernels
144 | ${pythonLocation}/bin/python -m pip install -e ".[test]"
145 | - name: Install IPykernel changes
146 | shell: bash -l {0}
147 | run: ${pythonLocation}/bin/python -m pip install -e .
148 | - name: Test spyder-kernels
149 | shell: bash -l {0}
150 | run: |
151 | cd ${GITHUB_WORKSPACE}/../spyder-kernels
152 | xvfb-run --auto-servernum ${pythonLocation}/bin/python -m pytest -x -vv -s --full-trace --color=yes spyder_kernels
153 |
--------------------------------------------------------------------------------
/ipykernel/jsonutil.py:
--------------------------------------------------------------------------------
1 | """Utilities to manipulate JSON objects."""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 |
6 | import math
7 | import numbers
8 | import re
9 | import types
10 | from binascii import b2a_base64
11 | from datetime import date, datetime
12 |
13 | from jupyter_client._version import version_info as jupyter_client_version
14 |
15 | next_attr_name = "__next__"
16 |
17 | # -----------------------------------------------------------------------------
18 | # Globals and constants
19 | # -----------------------------------------------------------------------------
20 |
21 | # timestamp formats
22 | ISO8601 = "%Y-%m-%dT%H:%M:%S.%f"
23 | ISO8601_PAT = re.compile(
24 | r"^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})(\.\d{1,6})?Z?([\+\-]\d{2}:?\d{2})?$"
25 | )
26 |
27 | # holy crap, strptime is not threadsafe.
28 | # Calling it once at import seems to help.
29 | datetime.strptime("2000-01-01", "%Y-%m-%d")
30 |
31 | # -----------------------------------------------------------------------------
32 | # Classes and functions
33 | # -----------------------------------------------------------------------------
34 |
35 |
36 | # constants for identifying png/jpeg data
37 | PNG = b"\x89PNG\r\n\x1a\n"
38 | # front of PNG base64-encoded
39 | PNG64 = b"iVBORw0KG"
40 | JPEG = b"\xff\xd8"
41 | # front of JPEG base64-encoded
42 | JPEG64 = b"/9"
43 | # constants for identifying gif data
44 | GIF_64 = b"R0lGODdh"
45 | GIF89_64 = b"R0lGODlh"
46 | # front of PDF base64-encoded
47 | PDF64 = b"JVBER"
48 |
49 | JUPYTER_CLIENT_MAJOR_VERSION = jupyter_client_version[0]
50 |
51 |
52 | def encode_images(format_dict):
53 | """b64-encodes images in a displaypub format dict
54 |
55 | Perhaps this should be handled in json_clean itself?
56 |
57 | Parameters
58 | ----------
59 | format_dict : dict
60 | A dictionary of display data keyed by mime-type
61 |
62 | Returns
63 | -------
64 | format_dict : dict
65 | A copy of the same dictionary,
66 | but binary image data ('image/png', 'image/jpeg' or 'application/pdf')
67 | is base64-encoded.
68 |
69 | """
70 |
71 | # no need for handling of ambiguous bytestrings on Python 3,
72 | # where bytes objects always represent binary data and thus
73 | # base64-encoded.
74 | return format_dict
75 |
76 |
77 | def json_clean(obj): # pragma: no cover
78 | """Deprecated, this is a no-op for jupyter-client>=7.
79 |
80 | Clean an object to ensure it's safe to encode in JSON.
81 |
82 | Atomic, immutable objects are returned unmodified. Sets and tuples are
83 | converted to lists, lists are copied and dicts are also copied.
84 |
85 | Note: dicts whose keys could cause collisions upon encoding (such as a dict
86 | with both the number 1 and the string '1' as keys) will cause a ValueError
87 | to be raised.
88 |
89 | Parameters
90 | ----------
91 | obj : any python object
92 |
93 | Returns
94 | -------
95 | out : object
96 | A version of the input which will not cause an encoding error when
97 | encoded as JSON. Note that this function does not *encode* its inputs,
98 | it simply sanitizes it so that there will be no encoding errors later.
99 |
100 | """
101 | if int(JUPYTER_CLIENT_MAJOR_VERSION) >= 7:
102 | return obj
103 |
104 | # types that are 'atomic' and ok in json as-is.
105 | atomic_ok = (str, type(None))
106 |
107 | # containers that we need to convert into lists
108 | container_to_list = (tuple, set, types.GeneratorType)
109 |
110 | # Since bools are a subtype of Integrals, which are a subtype of Reals,
111 | # we have to check them in that order.
112 |
113 | if isinstance(obj, bool):
114 | return obj
115 |
116 | if isinstance(obj, numbers.Integral):
117 | # cast int to int, in case subclasses override __str__ (e.g. boost enum, #4598)
118 | return int(obj)
119 |
120 | if isinstance(obj, numbers.Real):
121 | # cast out-of-range floats to their reprs
122 | if math.isnan(obj) or math.isinf(obj):
123 | return repr(obj)
124 | return float(obj)
125 |
126 | if isinstance(obj, atomic_ok):
127 | return obj
128 |
129 | if isinstance(obj, bytes):
130 | # unanmbiguous binary data is base64-encoded
131 | # (this probably should have happened upstream)
132 | return b2a_base64(obj).decode("ascii")
133 |
134 | if isinstance(obj, container_to_list) or (
135 | hasattr(obj, "__iter__") and hasattr(obj, next_attr_name)
136 | ):
137 | obj = list(obj)
138 |
139 | if isinstance(obj, list):
140 | return [json_clean(x) for x in obj]
141 |
142 | if isinstance(obj, dict):
143 | # First, validate that the dict won't lose data in conversion due to
144 | # key collisions after stringification. This can happen with keys like
145 | # True and 'true' or 1 and '1', which collide in JSON.
146 | nkeys = len(obj)
147 | nkeys_collapsed = len(set(map(str, obj)))
148 | if nkeys != nkeys_collapsed:
149 | msg = (
150 | "dict cannot be safely converted to JSON: "
151 | "key collision would lead to dropped values"
152 | )
153 | raise ValueError(msg)
154 | # If all OK, proceed by making the new dict that will be json-safe
155 | out = {}
156 | for k, v in obj.items():
157 | out[str(k)] = json_clean(v)
158 | return out
159 | if isinstance(obj, datetime | date):
160 | return obj.strftime(ISO8601)
161 |
162 | # we don't understand it, it's probably an unserializable object
163 | raise ValueError("Can't clean for JSON: %r" % obj)
164 |
--------------------------------------------------------------------------------
/ipykernel/parentpoller.py:
--------------------------------------------------------------------------------
1 | """A parent poller for unix."""
2 | # Copyright (c) IPython Development Team.
3 | # Distributed under the terms of the Modified BSD License.
4 |
5 | try:
6 | import ctypes
7 | except ImportError:
8 | ctypes = None # type:ignore[assignment]
9 | import os
10 | import platform
11 | import signal
12 | import time
13 | import warnings
14 | from _thread import interrupt_main # Py 3
15 | from threading import Thread
16 |
17 | from traitlets.log import get_logger
18 |
19 |
20 | class ParentPollerUnix(Thread):
21 | """A Unix-specific daemon thread that terminates the program immediately
22 | when the parent process no longer exists.
23 | """
24 |
25 | def __init__(self, parent_pid=0):
26 | """Initialize the poller.
27 |
28 | Parameters
29 | ----------
30 | parent_handle : int, optional
31 | If provided, the program will terminate immediately when
32 | process parent is no longer this original parent.
33 | """
34 | super().__init__()
35 | self.parent_pid = parent_pid
36 | self.daemon = True
37 |
38 | def run(self):
39 | """Run the poller."""
40 | # We cannot use os.waitpid because it works only for child processes.
41 | from errno import EINTR
42 |
43 | # before start, check if the passed-in parent pid is valid
44 | original_ppid = os.getppid()
45 | if original_ppid != self.parent_pid:
46 | self.parent_pid = 0
47 |
48 | get_logger().debug(
49 | "%s: poll for parent change with original parent pid=%d",
50 | type(self).__name__,
51 | self.parent_pid,
52 | )
53 |
54 | while True:
55 | try:
56 | ppid = os.getppid()
57 | parent_is_init = not self.parent_pid and ppid == 1
58 | parent_has_changed = self.parent_pid and ppid != self.parent_pid
59 | if parent_is_init or parent_has_changed:
60 | get_logger().warning("Parent appears to have exited, shutting down.")
61 | os._exit(1)
62 | time.sleep(1.0)
63 | except OSError as e:
64 | if e.errno == EINTR:
65 | continue
66 | raise
67 |
68 |
69 | class ParentPollerWindows(Thread):
70 | """A Windows-specific daemon thread that listens for a special event that
71 | signals an interrupt and, optionally, terminates the program immediately
72 | when the parent process no longer exists.
73 | """
74 |
75 | def __init__(self, interrupt_handle=None, parent_handle=None):
76 | """Create the poller. At least one of the optional parameters must be
77 | provided.
78 |
79 | Parameters
80 | ----------
81 | interrupt_handle : HANDLE (int), optional
82 | If provided, the program will generate a Ctrl+C event when this
83 | handle is signaled.
84 | parent_handle : HANDLE (int), optional
85 | If provided, the program will terminate immediately when this
86 | handle is signaled.
87 | """
88 | assert interrupt_handle or parent_handle
89 | super().__init__()
90 | if ctypes is None:
91 | msg = "ParentPollerWindows requires ctypes" # type:ignore[unreachable]
92 | raise ImportError(msg)
93 | self.daemon = True
94 | self.interrupt_handle = interrupt_handle
95 | self.parent_handle = parent_handle
96 |
97 | def run(self):
98 | """Run the poll loop. This method never returns."""
99 | try:
100 | from _winapi import INFINITE, WAIT_OBJECT_0 # type:ignore[attr-defined]
101 | except ImportError:
102 | from _subprocess import INFINITE, WAIT_OBJECT_0
103 |
104 | # Build the list of handle to listen on.
105 | handles = []
106 | if self.interrupt_handle:
107 | handles.append(self.interrupt_handle)
108 | if self.parent_handle:
109 | handles.append(self.parent_handle)
110 | arch = platform.architecture()[0]
111 | c_int = ctypes.c_int64 if arch.startswith("64") else ctypes.c_int
112 |
113 | # Listen forever.
114 | while True:
115 | result = ctypes.windll.kernel32.WaitForMultipleObjects( # type:ignore[attr-defined]
116 | len(handles), # nCount
117 | (c_int * len(handles))(*handles), # lpHandles
118 | False, # bWaitAll
119 | INFINITE,
120 | ) # dwMilliseconds
121 |
122 | if WAIT_OBJECT_0 <= result < len(handles):
123 | handle = handles[result - WAIT_OBJECT_0]
124 | ctypes.windll.kernel32.ResetEvent(handle) # type:ignore[attr-defined]
125 |
126 | if handle == self.interrupt_handle:
127 | # check if signal handler is callable
128 | # to avoid 'int not callable' error (Python issue #23395)
129 | if callable(signal.getsignal(signal.SIGINT)):
130 | interrupt_main()
131 |
132 | elif handle == self.parent_handle:
133 | get_logger().warning("Parent appears to have exited, shutting down.")
134 | os._exit(1)
135 | elif result < 0:
136 | # wait failed, just give up and stop polling.
137 | warnings.warn(
138 | """Parent poll failed. If the frontend dies,
139 | the kernel may be left running. Please let us know
140 | about your system (bitness, Python, etc.) at
141 | ipython-dev@scipy.org""",
142 | stacklevel=2,
143 | )
144 | return
145 |
--------------------------------------------------------------------------------
/tests/test_kernel_direct.py:
--------------------------------------------------------------------------------
1 | """test the IPython Kernel"""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 |
6 | import os
7 | import warnings
8 |
9 | import pytest
10 |
11 | if os.name == "nt":
12 | pytest.skip("skipping tests on windows", allow_module_level=True)
13 |
14 |
15 | async def test_direct_kernel_info_request(kernel):
16 | reply = await kernel.test_shell_message("kernel_info_request", {})
17 | assert reply["header"]["msg_type"] == "kernel_info_reply"
18 |
19 |
20 | async def test_direct_execute_request(kernel):
21 | reply = await kernel.test_shell_message("execute_request", dict(code="hello", silent=False))
22 | assert reply["header"]["msg_type"] == "execute_reply"
23 |
24 |
25 | async def test_direct_execute_request_aborting(kernel):
26 | kernel._aborting = True
27 | reply = await kernel.test_shell_message("execute_request", dict(code="hello", silent=False))
28 | assert reply["header"]["msg_type"] == "execute_reply"
29 | assert reply["content"]["status"] == "aborted"
30 |
31 |
32 | async def test_direct_execute_request_error(kernel):
33 | await kernel.execute_request(None, None, None)
34 |
35 |
36 | async def test_complete_request(kernel):
37 | reply = await kernel.test_shell_message("complete_request", dict(code="hello", cursor_pos=0))
38 | assert reply["header"]["msg_type"] == "complete_reply"
39 |
40 |
41 | async def test_inspect_request(kernel):
42 | reply = await kernel.test_shell_message("inspect_request", dict(code="hello", cursor_pos=0))
43 | assert reply["header"]["msg_type"] == "inspect_reply"
44 |
45 |
46 | async def test_history_request(kernel):
47 | reply = await kernel.test_shell_message(
48 | "history_request", dict(hist_access_type="", output="", raw="")
49 | )
50 | assert reply["header"]["msg_type"] == "history_reply"
51 | reply = await kernel.test_shell_message(
52 | "history_request", dict(hist_access_type="tail", output="", raw="")
53 | )
54 | assert reply["header"]["msg_type"] == "history_reply"
55 | reply = await kernel.test_shell_message(
56 | "history_request", dict(hist_access_type="range", output="", raw="")
57 | )
58 | assert reply["header"]["msg_type"] == "history_reply"
59 | reply = await kernel.test_shell_message(
60 | "history_request", dict(hist_access_type="search", output="", raw="")
61 | )
62 | assert reply["header"]["msg_type"] == "history_reply"
63 |
64 |
65 | async def test_comm_info_request(kernel):
66 | reply = await kernel.test_shell_message("comm_info_request")
67 | assert reply["header"]["msg_type"] == "comm_info_reply"
68 |
69 |
70 | async def test_direct_interrupt_request(kernel):
71 | reply = await kernel.test_control_message("interrupt_request", {})
72 | assert reply["header"]["msg_type"] == "interrupt_reply"
73 | assert reply["content"] == {"status": "ok"}
74 |
75 | # test failure on interrupt request
76 | def raiseOSError():
77 | msg = "evalue"
78 | raise OSError(msg)
79 |
80 | kernel._send_interrupt_children = raiseOSError
81 | reply = await kernel.test_control_message("interrupt_request", {})
82 | assert reply["header"]["msg_type"] == "interrupt_reply"
83 | assert reply["content"]["status"] == "error"
84 | assert reply["content"]["ename"] == "OSError"
85 | assert reply["content"]["evalue"] == "evalue"
86 | assert len(reply["content"]["traceback"]) > 0
87 |
88 |
89 | async def test_direct_shutdown_request(kernel):
90 | reply = await kernel.test_shell_message("shutdown_request", dict(restart=False))
91 | assert reply["header"]["msg_type"] == "shutdown_reply"
92 | reply = await kernel.test_shell_message("shutdown_request", dict(restart=True))
93 | assert reply["header"]["msg_type"] == "shutdown_reply"
94 |
95 |
96 | async def test_is_complete_request(kernel):
97 | reply = await kernel.test_shell_message("is_complete_request", dict(code="hello"))
98 | assert reply["header"]["msg_type"] == "is_complete_reply"
99 |
100 |
101 | async def test_direct_debug_request(kernel):
102 | reply = await kernel.test_control_message("debug_request", {})
103 | assert reply["header"]["msg_type"] == "debug_reply"
104 |
105 |
106 | async def test_deprecated_features(kernel):
107 | with warnings.catch_warnings():
108 | warnings.simplefilter("ignore", DeprecationWarning)
109 | header = kernel._parent_header
110 | assert isinstance(header, dict)
111 | shell_streams = kernel.shell_streams
112 | assert len(shell_streams) == 1
113 | assert shell_streams[0] == kernel.shell_stream
114 | warnings.simplefilter("ignore", RuntimeWarning)
115 | kernel.shell_streams = [kernel.shell_stream, kernel.shell_stream]
116 |
117 |
118 | async def test_process_control(kernel):
119 | from jupyter_client.session import DELIM
120 |
121 | class FakeMsg:
122 | def __init__(self, bytes):
123 | self.bytes = bytes
124 |
125 | await kernel.process_control([FakeMsg(DELIM), 1])
126 | msg = kernel._prep_msg("does_not_exist")
127 | await kernel.process_control(msg)
128 |
129 |
130 | def test_should_handle(kernel):
131 | msg = kernel.session.msg("debug_request", {})
132 | assert kernel.should_handle(kernel.control_stream, msg, []) is True
133 |
134 |
135 | async def test_dispatch_shell(kernel):
136 | from jupyter_client.session import DELIM
137 |
138 | class FakeMsg:
139 | def __init__(self, bytes):
140 | self.bytes = bytes
141 |
142 | await kernel.dispatch_shell([FakeMsg(DELIM), 1])
143 | msg = kernel._prep_msg("does_not_exist")
144 | await kernel.dispatch_shell(msg)
145 |
146 |
147 | async def test_publish_debug_event(kernel):
148 | kernel._publish_debug_event({})
149 |
150 |
151 | async def test_connect_request(kernel):
152 | await kernel.connect_request(kernel.shell_stream, "foo", {})
153 |
154 |
155 | async def test_send_interrupt_children(kernel):
156 | kernel._send_interrupt_children()
157 |
158 |
159 | # TODO: this causes deadlock
160 | # async def test_direct_usage_request(kernel):
161 | # reply = await kernel.test_control_message("usage_request", {})
162 | # assert reply['header']['msg_type'] == 'usage_reply'
163 |
--------------------------------------------------------------------------------
/tests/test_kernelspec.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) IPython Development Team.
2 | # Distributed under the terms of the Modified BSD License.
3 |
4 | import json
5 | import os
6 | import platform
7 | import shutil
8 | import sys
9 | import tempfile
10 | from unittest import mock
11 |
12 | import pytest
13 | from jupyter_core.paths import jupyter_data_dir
14 |
15 | from ipykernel.kernelspec import (
16 | KERNEL_NAME,
17 | RESOURCES,
18 | InstallIPythonKernelSpecApp,
19 | get_kernel_dict,
20 | install,
21 | make_ipkernel_cmd,
22 | write_kernel_spec,
23 | )
24 |
25 | pjoin = os.path.join
26 | is_cpython = platform.python_implementation() == "CPython"
27 |
28 |
29 | def test_make_ipkernel_cmd():
30 | cmd = make_ipkernel_cmd()
31 | assert cmd == [sys.executable, "-m", "ipykernel_launcher", "-f", "{connection_file}"]
32 |
33 |
34 | def assert_kernel_dict(d):
35 | assert d["argv"] == make_ipkernel_cmd()
36 | assert d["display_name"] == "Python %i (ipykernel)" % sys.version_info[0]
37 | assert d["language"] == "python"
38 |
39 |
40 | def test_get_kernel_dict():
41 | d = get_kernel_dict()
42 | assert_kernel_dict(d)
43 |
44 |
45 | def assert_kernel_dict_with_profile(d):
46 | assert d["argv"] == make_ipkernel_cmd(extra_arguments=["--profile", "test"])
47 | assert d["display_name"] == "Python %i (ipykernel)" % sys.version_info[0]
48 | assert d["language"] == "python"
49 |
50 |
51 | def test_get_kernel_dict_with_profile():
52 | d = get_kernel_dict(["--profile", "test"])
53 | assert_kernel_dict_with_profile(d)
54 |
55 |
56 | def assert_is_spec(path):
57 | for fname in os.listdir(RESOURCES):
58 | dst = pjoin(path, fname)
59 | assert os.path.exists(dst)
60 | kernel_json = pjoin(path, "kernel.json")
61 | assert os.path.exists(kernel_json)
62 | with open(kernel_json, encoding="utf8") as f:
63 | json.load(f)
64 |
65 |
66 | def test_write_kernel_spec():
67 | path = write_kernel_spec()
68 | assert_is_spec(path)
69 | shutil.rmtree(path)
70 |
71 |
72 | def test_write_kernel_spec_path():
73 | path = os.path.join(tempfile.mkdtemp(), KERNEL_NAME)
74 | path2 = write_kernel_spec(path)
75 | assert path == path2
76 | assert_is_spec(path)
77 | shutil.rmtree(path)
78 |
79 |
80 | def test_install_kernelspec():
81 | path = tempfile.mkdtemp()
82 | try:
83 | InstallIPythonKernelSpecApp.launch_instance(argv=["--prefix", path])
84 | assert_is_spec(os.path.join(path, "share", "jupyter", "kernels", KERNEL_NAME))
85 | finally:
86 | shutil.rmtree(path)
87 |
88 |
89 | def test_install_user():
90 | tmp = tempfile.mkdtemp()
91 |
92 | with mock.patch.dict(os.environ, {"HOME": tmp}):
93 | install(user=True)
94 | data_dir = jupyter_data_dir()
95 |
96 | assert_is_spec(os.path.join(data_dir, "kernels", KERNEL_NAME))
97 |
98 |
99 | def test_install():
100 | system_jupyter_dir = tempfile.mkdtemp()
101 |
102 | with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]):
103 | install()
104 |
105 | assert_is_spec(os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME))
106 |
107 |
108 | def test_install_profile():
109 | system_jupyter_dir = tempfile.mkdtemp()
110 |
111 | with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]):
112 | install(profile="Test")
113 |
114 | spec_file = os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME, "kernel.json")
115 | with open(spec_file) as f:
116 | spec = json.load(f)
117 | assert spec["display_name"].endswith(" [profile=Test]")
118 | assert spec["argv"][-2:] == ["--profile", "Test"]
119 |
120 |
121 | def test_install_display_name_overrides_profile():
122 | system_jupyter_dir = tempfile.mkdtemp()
123 |
124 | with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]):
125 | install(display_name="Display", profile="Test")
126 |
127 | spec_file = os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME, "kernel.json")
128 | with open(spec_file) as f:
129 | spec = json.load(f)
130 | assert spec["display_name"] == "Display"
131 |
132 |
133 | @pytest.mark.parametrize("env", [None, dict(spam="spam"), dict(spam="spam", foo="bar")])
134 | def test_install_env(tmp_path, env):
135 | # python 3.5 // tmp_path must be converted to str
136 | with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [str(tmp_path)]):
137 | install(env=env)
138 |
139 | spec = tmp_path / "kernels" / KERNEL_NAME / "kernel.json"
140 | with spec.open() as f:
141 | spec = json.load(f)
142 |
143 | if env:
144 | assert len(env) == len(spec["env"])
145 | for k, v in env.items():
146 | assert spec["env"][k] == v
147 | else:
148 | assert "env" not in spec
149 |
150 |
151 | @pytest.mark.skipif(sys.version_info < (3, 11) or not is_cpython, reason="requires cPython 3.11")
152 | def test_install_frozen_modules_on():
153 | system_jupyter_dir = tempfile.mkdtemp()
154 |
155 | with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]):
156 | install(frozen_modules=True)
157 |
158 | spec_file = os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME, "kernel.json")
159 | with open(spec_file) as f:
160 | spec = json.load(f)
161 | assert spec["env"]["PYDEVD_DISABLE_FILE_VALIDATION"] == "1"
162 | assert "-Xfrozen_modules=off" not in spec["argv"]
163 |
164 |
165 | @pytest.mark.skipif(sys.version_info < (3, 11) or not is_cpython, reason="requires cPython 3.11")
166 | def test_install_frozen_modules_off():
167 | system_jupyter_dir = tempfile.mkdtemp()
168 |
169 | with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]):
170 | install(frozen_modules=False)
171 |
172 | spec_file = os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME, "kernel.json")
173 | with open(spec_file) as f:
174 | spec = json.load(f)
175 | assert "env" not in spec
176 | assert spec["argv"][1] == "-Xfrozen_modules=off"
177 |
178 |
179 | @pytest.mark.skipif(
180 | sys.version_info >= (3, 11) or is_cpython,
181 | reason="checks versions older than 3.11 and other Python implementations",
182 | )
183 | def test_install_frozen_modules_no_op():
184 | # ensure we do not add add Xfrozen_modules on older Python versions
185 | # (although cPython does not error out on unknown X options as of 3.8)
186 | system_jupyter_dir = tempfile.mkdtemp()
187 |
188 | with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]):
189 | install(frozen_modules=False)
190 |
191 | spec_file = os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME, "kernel.json")
192 | with open(spec_file) as f:
193 | spec = json.load(f)
194 | assert "-Xfrozen_modules=off" not in spec["argv"]
195 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: ipykernel tests
2 |
3 | on:
4 | push:
5 | branches: ["main"]
6 | pull_request:
7 | schedule:
8 | - cron: "0 0 * * *"
9 |
10 | concurrency:
11 | group: ci-${{ github.ref }}
12 | cancel-in-progress: true
13 |
14 | defaults:
15 | run:
16 | shell: bash -eux {0}
17 |
18 | jobs:
19 | build:
20 | runs-on: ${{ matrix.os }}
21 | strategy:
22 | fail-fast: false
23 | matrix:
24 | os: [ubuntu-latest, windows-latest, macos-latest]
25 | qt:
26 | - qt5
27 | - qt6
28 | python-version:
29 | - "3.10"
30 | - "3.11"
31 | - "3.12"
32 | - "3.13"
33 | - "3.14"
34 | # 3.14t needs a jupyter-core release
35 | # - "3.14t"
36 | - "pypy-3.11"
37 | exclude:
38 | # qt6 not supported on 3.14 yet
39 | - python-version: "3.14"
40 | qt: qt6
41 | - python-version: "3.13"
42 | qt: qt5
43 | - python-version: "3.12"
44 | qt: qt5
45 | - python-version: "3.11"
46 | qt: qt5
47 |
48 | steps:
49 | - name: Checkout
50 | uses: actions/checkout@v6
51 |
52 | - uses: actions/setup-python@v6
53 | with:
54 | python-version: ${{ matrix.python-version }}
55 |
56 | - name: set qt env
57 | run: |
58 | echo "QT=${{ matrix.qt }}" >> $GITHUB_ENV
59 | shell: bash
60 |
61 | - name: Install hatch
62 | run: |
63 | python --version
64 | python -m pip install hatch
65 |
66 | - name: Run the tests
67 | timeout-minutes: 15
68 | if: ${{ !startsWith( matrix.python-version, 'pypy' ) && !startsWith(matrix.os, 'windows') }}
69 | run: |
70 | hatch run cov:test --cov-fail-under 50
71 |
72 | - name: Run the tests on pypy
73 | timeout-minutes: 15
74 | if: ${{ startsWith( matrix.python-version, 'pypy' ) }}
75 | run: |
76 | hatch run test:nowarn --ignore=tests/test_debugger.py
77 |
78 | - name: Run the tests on Windows
79 | timeout-minutes: 15
80 | if: ${{ !startsWith( matrix.python-version, 'pypy' ) && startsWith(matrix.os, 'windows') }}
81 | run: |
82 | hatch run cov:nowarn
83 |
84 | - name: Check Launcher
85 | run: |
86 | pip install .
87 | cd $HOME
88 | python -m ipykernel_launcher --help
89 |
90 | - uses: jupyterlab/maintainer-tools/.github/actions/upload-coverage@v1
91 |
92 | coverage:
93 | runs-on: ubuntu-latest
94 | needs:
95 | - build
96 | steps:
97 | - uses: actions/checkout@v6
98 | - uses: jupyterlab/maintainer-tools/.github/actions/report-coverage@v1
99 | with:
100 | fail_under: 80
101 |
102 | test_lint:
103 | name: Test Lint
104 | runs-on: ubuntu-latest
105 | steps:
106 | - uses: actions/checkout@v6
107 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
108 | - name: Run Linters
109 | run: |
110 | hatch run typing:test
111 | hatch run lint:build
112 | pipx run interrogate -vv .
113 | pipx run doc8 --max-line-length=200
114 |
115 | check_release:
116 | runs-on: ubuntu-latest
117 | steps:
118 | - uses: actions/checkout@v6
119 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
120 | - uses: jupyter-server/jupyter_releaser/.github/actions/check-release@v2
121 | with:
122 | token: ${{ secrets.GITHUB_TOKEN }}
123 |
124 | test_docs:
125 | runs-on: ubuntu-latest
126 | steps:
127 | - uses: actions/checkout@v6
128 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
129 | - name: Build API docs
130 | run: |
131 | hatch run docs:api
132 | # If this fails run `hatch run docs:api` locally
133 | # and commit.
134 | git status --porcelain
135 | git status -s | grep "A" && exit 1
136 | git status -s | grep "M" && exit 1
137 | echo "API docs done"
138 | - run: hatch run docs:build
139 |
140 | test_without_debugpy:
141 | runs-on: ${{ matrix.os }}
142 | strategy:
143 | fail-fast: false
144 | matrix:
145 | os: [ubuntu-latest]
146 | python-version: ["3.10"]
147 | steps:
148 | - name: Checkout
149 | uses: actions/checkout@v6
150 |
151 | - name: Base Setup
152 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
153 |
154 | - name: Install the Python dependencies without debugpy
155 | run: |
156 | pip install .[test]
157 | pip uninstall --yes debugpy
158 |
159 | - name: List installed packages
160 | run: |
161 | pip freeze
162 |
163 | - name: Run the tests
164 | timeout-minutes: 15
165 | run: pytest -W default -vv
166 |
167 | test_miniumum_versions:
168 | name: Test Minimum Versions
169 | timeout-minutes: 20
170 | runs-on: ubuntu-latest
171 | steps:
172 | - uses: actions/checkout@v6
173 | - name: Base Setup
174 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
175 | with:
176 | dependency_type: minimum
177 | python_version: "3.10"
178 |
179 | - name: List installed packages
180 | run: |
181 | hatch -v run test:list
182 |
183 | - name: Run the unit tests
184 | run: |
185 | hatch -v run test:nowarn
186 |
187 | test_prereleases:
188 | name: Test Prereleases
189 | runs-on: ubuntu-latest
190 | timeout-minutes: 20
191 | steps:
192 | - name: Checkout
193 | uses: actions/checkout@v6
194 | - name: Base Setup
195 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
196 | with:
197 | dependency_type: pre
198 | - name: Run the tests
199 | run: |
200 | hatch run test:nowarn
201 |
202 | make_sdist:
203 | name: Make SDist
204 | runs-on: ubuntu-latest
205 | timeout-minutes: 20
206 | steps:
207 | - uses: actions/checkout@v6
208 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
209 | - uses: jupyterlab/maintainer-tools/.github/actions/make-sdist@v1
210 |
211 | test_sdist:
212 | runs-on: ubuntu-latest
213 | needs: [make_sdist]
214 | name: Install from SDist and Test
215 | timeout-minutes: 20
216 | steps:
217 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
218 | - uses: jupyterlab/maintainer-tools/.github/actions/test-sdist@v1
219 |
220 | link_check:
221 | runs-on: ubuntu-latest
222 | steps:
223 | - uses: actions/checkout@v6
224 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
225 | - uses: jupyterlab/maintainer-tools/.github/actions/check-links@v1
226 |
--------------------------------------------------------------------------------
/tests/test_embed_kernel.py:
--------------------------------------------------------------------------------
1 | """test embed_kernel"""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 |
6 | import json
7 | import os
8 | import sys
9 | import threading
10 | import time
11 | from contextlib import contextmanager
12 | from subprocess import PIPE, Popen
13 |
14 | import pytest
15 | from jupyter_client.blocking.client import BlockingKernelClient
16 | from jupyter_core import paths
17 |
18 | from ipykernel.embed import IPKernelApp, embed_kernel # type:ignore[attr-defined]
19 |
20 | SETUP_TIMEOUT = 60
21 | TIMEOUT = 15
22 |
23 |
24 | if os.name == "nt":
25 | pytest.skip("skipping tests on windows", allow_module_level=True)
26 |
27 |
28 | @contextmanager
29 | def setup_kernel(cmd):
30 | """start an embedded kernel in a subprocess, and wait for it to be ready
31 |
32 | Returns
33 | -------
34 | kernel_manager: connected KernelManager instance
35 | """
36 |
37 | def connection_file_ready(connection_file):
38 | """Check if connection_file is a readable json file."""
39 | if not os.path.exists(connection_file):
40 | return False
41 | try:
42 | with open(connection_file) as f:
43 | json.load(f)
44 | return True
45 | except ValueError:
46 | return False
47 |
48 | kernel = Popen([sys.executable, "-c", cmd], stdout=PIPE, stderr=PIPE, encoding="utf-8")
49 | try:
50 | connection_file = os.path.join(
51 | paths.jupyter_runtime_dir(),
52 | "kernel-%i.json" % kernel.pid,
53 | )
54 | # wait for connection file to exist, timeout after 5s
55 | tic = time.time()
56 | while (
57 | not connection_file_ready(connection_file)
58 | and kernel.poll() is None
59 | and time.time() < tic + SETUP_TIMEOUT
60 | ):
61 | time.sleep(0.1)
62 |
63 | # Wait 100ms for the writing to finish
64 | time.sleep(0.1)
65 |
66 | if kernel.poll() is not None:
67 | _o, e = kernel.communicate()
68 | raise OSError("Kernel failed to start:\n%s" % e)
69 |
70 | if not os.path.exists(connection_file):
71 | if kernel.poll() is None:
72 | kernel.terminate()
73 | raise OSError("Connection file %r never arrived" % connection_file)
74 |
75 | client = BlockingKernelClient(connection_file=connection_file)
76 | client.load_connection_file()
77 | client.start_channels()
78 | client.wait_for_ready()
79 | try:
80 | yield client
81 | finally:
82 | client.stop_channels()
83 | finally:
84 | kernel.terminate()
85 | kernel.wait()
86 | # Make sure all the fds get closed.
87 | for attr in ["stdout", "stderr", "stdin"]:
88 | fid = getattr(kernel, attr)
89 | if fid:
90 | fid.close()
91 |
92 |
93 | @pytest.mark.flaky(max_runs=3)
94 | def test_embed_kernel_basic():
95 | """IPython.embed_kernel() is basically functional"""
96 | cmd = "\n".join(
97 | [
98 | "from IPython import embed_kernel",
99 | "def go():",
100 | " a=5",
101 | ' b="hi there"',
102 | " embed_kernel()",
103 | "go()",
104 | "",
105 | ]
106 | )
107 |
108 | with setup_kernel(cmd) as client:
109 | # oinfo a (int)
110 | client.inspect("a")
111 | msg = client.get_shell_msg(timeout=TIMEOUT)
112 | content = msg["content"]
113 | assert content["found"]
114 |
115 | client.execute("c=a*2")
116 | msg = client.get_shell_msg(timeout=TIMEOUT)
117 | content = msg["content"]
118 | assert content["status"] == "ok"
119 |
120 | # oinfo c (should be 10)
121 | client.inspect("c")
122 | msg = client.get_shell_msg(timeout=TIMEOUT)
123 | content = msg["content"]
124 | assert content["found"]
125 | text = content["data"]["text/plain"]
126 | assert "10" in text
127 |
128 |
129 | @pytest.mark.flaky(max_runs=3)
130 | def test_embed_kernel_namespace():
131 | """IPython.embed_kernel() inherits calling namespace"""
132 | cmd = "\n".join(
133 | [
134 | "from IPython import embed_kernel",
135 | "def go():",
136 | " a=5",
137 | ' b="hi there"',
138 | " embed_kernel()",
139 | "go()",
140 | "",
141 | ]
142 | )
143 |
144 | with setup_kernel(cmd) as client:
145 | # oinfo a (int)
146 | client.inspect("a")
147 | msg = client.get_shell_msg(timeout=TIMEOUT)
148 | content = msg["content"]
149 | assert content["found"]
150 | text = content["data"]["text/plain"]
151 | assert "5" in text
152 |
153 | # oinfo b (str)
154 | client.inspect("b")
155 | msg = client.get_shell_msg(timeout=TIMEOUT)
156 | content = msg["content"]
157 | assert content["found"]
158 | text = content["data"]["text/plain"]
159 | assert "hi there" in text
160 |
161 | # oinfo c (undefined)
162 | client.inspect("c")
163 | msg = client.get_shell_msg(timeout=TIMEOUT)
164 | content = msg["content"]
165 | assert not content["found"]
166 |
167 |
168 | @pytest.mark.flaky(max_runs=3)
169 | def test_embed_kernel_reentrant():
170 | """IPython.embed_kernel() can be called multiple times"""
171 | cmd = "\n".join(
172 | [
173 | "from IPython import embed_kernel",
174 | "count = 0",
175 | "def go():",
176 | " global count",
177 | " embed_kernel()",
178 | " count = count + 1",
179 | "",
180 | "while True: go()",
181 | "",
182 | ]
183 | )
184 |
185 | with setup_kernel(cmd) as client:
186 | for i in range(5):
187 | client.inspect("count")
188 | msg = client.get_shell_msg(timeout=TIMEOUT)
189 | content = msg["content"]
190 | assert content["found"]
191 | text = content["data"]["text/plain"]
192 | assert str(i) in text
193 |
194 | # exit from embed_kernel
195 | client.execute("get_ipython().exit_now = True")
196 | msg = client.get_shell_msg(timeout=TIMEOUT)
197 | time.sleep(0.2)
198 |
199 |
200 | def test_embed_kernel_func():
201 | from types import ModuleType
202 |
203 | module = ModuleType("test")
204 |
205 | def trigger_stop():
206 | time.sleep(1)
207 | app = IPKernelApp.instance()
208 | app.io_loop.add_callback(app.io_loop.stop)
209 | IPKernelApp.clear_instance()
210 |
211 | thread = threading.Thread(target=trigger_stop)
212 | thread.start()
213 |
214 | embed_kernel(module, outstream_class=None)
215 |
--------------------------------------------------------------------------------
/tests/utils.py:
--------------------------------------------------------------------------------
1 | """utilities for testing IPython kernels"""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 | from __future__ import annotations
6 |
7 | import atexit
8 | import os
9 | import sys
10 | from contextlib import contextmanager
11 | from queue import Empty
12 | from subprocess import STDOUT
13 | from tempfile import TemporaryDirectory
14 | from time import time
15 |
16 | from jupyter_client import manager
17 | from jupyter_client.blocking.client import BlockingKernelClient
18 |
19 | STARTUP_TIMEOUT = 60
20 | TIMEOUT = 100
21 |
22 | KM: manager.KernelManager = None # type:ignore
23 | KC: BlockingKernelClient = None # type:ignore
24 |
25 |
26 | def start_new_kernel(**kwargs):
27 | """start a new kernel, and return its Manager and Client
28 |
29 | Integrates with our output capturing for tests.
30 | """
31 | kwargs["stderr"] = STDOUT
32 | return manager.start_new_kernel(startup_timeout=STARTUP_TIMEOUT, **kwargs)
33 |
34 |
35 | def flush_channels(kc=None):
36 | """flush any messages waiting on the queue"""
37 | from .test_message_spec import validate_message
38 |
39 | if kc is None:
40 | kc = KC
41 | for get_msg in (kc.get_shell_msg, kc.get_iopub_msg):
42 | while True:
43 | try:
44 | msg = get_msg(timeout=0.1)
45 | except Empty:
46 | break
47 | else:
48 | validate_message(msg)
49 |
50 |
51 | def get_reply(kc, msg_id, timeout=TIMEOUT, channel="shell"):
52 | t0 = time()
53 | while True:
54 | get_msg = getattr(kc, f"get_{channel}_msg")
55 | reply = get_msg(timeout=timeout)
56 | if reply["parent_header"]["msg_id"] == msg_id:
57 | break
58 | # Allow debugging ignored replies
59 | print(f"Ignoring reply not to {msg_id}: {reply}")
60 | t1 = time()
61 | timeout -= t1 - t0
62 | t0 = t1
63 | return reply
64 |
65 |
66 | def get_replies(kc, msg_ids: list[str], timeout=TIMEOUT, channel="shell"):
67 | # Get replies which may arrive in any order as they may be running on different subshells.
68 | # Replies are returned in the same order as the msg_ids, not in the order of arrival.
69 | count = 0
70 | replies = [None] * len(msg_ids)
71 | while count < len(msg_ids):
72 | get_msg = getattr(kc, f"get_{channel}_msg")
73 | reply = get_msg(timeout=timeout)
74 | try:
75 | msg_id = reply["parent_header"]["msg_id"]
76 | replies[msg_ids.index(msg_id)] = reply
77 | count += 1
78 | except ValueError:
79 | # Allow debugging ignored replies
80 | print(f"Ignoring reply not to any of {msg_ids}: {reply}")
81 | return replies
82 |
83 |
84 | def execute(code="", kc=None, **kwargs):
85 | """wrapper for doing common steps for validating an execution request"""
86 | from .test_message_spec import validate_message
87 |
88 | if kc is None:
89 | kc = KC
90 | msg_id = kc.execute(code=code, **kwargs)
91 | reply = get_reply(kc, msg_id, TIMEOUT)
92 | validate_message(reply, "execute_reply", msg_id)
93 | busy = kc.get_iopub_msg(timeout=TIMEOUT)
94 | validate_message(busy, "status", msg_id)
95 | assert busy["content"]["execution_state"] == "busy"
96 |
97 | if not kwargs.get("silent"):
98 | execute_input = kc.get_iopub_msg(timeout=TIMEOUT)
99 | validate_message(execute_input, "execute_input", msg_id)
100 | assert execute_input["content"]["code"] == code
101 |
102 | # show tracebacks if present for debugging
103 | if reply["content"].get("traceback"):
104 | print("\n".join(reply["content"]["traceback"]), file=sys.stderr)
105 |
106 | return msg_id, reply["content"]
107 |
108 |
109 | def start_global_kernel():
110 | """start the global kernel (if it isn't running) and return its client"""
111 | global KM, KC
112 | if KM is None:
113 | KM, KC = start_new_kernel()
114 | atexit.register(stop_global_kernel)
115 | else:
116 | flush_channels(KC)
117 | return KC
118 |
119 |
120 | @contextmanager
121 | def kernel():
122 | """Context manager for the global kernel instance
123 |
124 | Should be used for most kernel tests
125 |
126 | Returns
127 | -------
128 | kernel_client: connected KernelClient instance
129 | """
130 | yield start_global_kernel()
131 |
132 |
133 | def uses_kernel(test_f):
134 | """Decorator for tests that use the global kernel"""
135 |
136 | def wrapped_test():
137 | with kernel() as kc:
138 | test_f(kc)
139 |
140 | wrapped_test.__doc__ = test_f.__doc__
141 | wrapped_test.__name__ = test_f.__name__
142 | return wrapped_test
143 |
144 |
145 | def stop_global_kernel():
146 | """Stop the global shared kernel instance, if it exists"""
147 | global KM, KC
148 | KC.stop_channels()
149 | KC = None # type:ignore
150 | if KM is None:
151 | return
152 | KM.shutdown_kernel(now=True)
153 | KM = None # type:ignore
154 |
155 |
156 | def new_kernel(argv=None):
157 | """Context manager for a new kernel in a subprocess
158 |
159 | Should only be used for tests where the kernel must not be reused.
160 |
161 | Returns
162 | -------
163 | kernel_client: connected KernelClient instance
164 | """
165 | kwargs = {"stderr": STDOUT}
166 | if argv is not None:
167 | kwargs["extra_arguments"] = argv
168 | return manager.run_kernel(**kwargs)
169 |
170 |
171 | def assemble_output(get_msg, timeout=1, parent_msg_id: str | None = None, raise_error=True):
172 | """assemble stdout/err from an execution"""
173 | stdout = ""
174 | stderr = ""
175 | while True:
176 | msg = get_msg(timeout=timeout)
177 | msg_type = msg["msg_type"]
178 | content = msg["content"]
179 |
180 | if parent_msg_id is not None and msg["parent_header"]["msg_id"] != parent_msg_id:
181 | # Ignore message for wrong parent message
182 | continue
183 |
184 | if msg_type == "status" and content["execution_state"] == "idle":
185 | # idle message signals end of output
186 | break
187 | elif msg["msg_type"] == "stream":
188 | if content["name"] == "stdout":
189 | stdout += content["text"]
190 | elif content["name"] == "stderr":
191 | stderr += content["text"]
192 | else:
193 | raise KeyError("bad stream: %r" % content["name"])
194 | elif raise_error and msg["msg_type"] == "error":
195 | tb = "\n".join(msg["content"]["traceback"])
196 | msg = f"Execution failed with:\n{tb}"
197 | if stderr:
198 | msg = f"{msg}\nstderr:\n{stderr}"
199 | raise RuntimeError(msg)
200 | else:
201 | # other output, ignored
202 | pass
203 | return stdout, stderr
204 |
205 |
206 | def wait_for_idle(kc, parent_msg_id: str | None = None):
207 | while True:
208 | msg = kc.get_iopub_msg(timeout=1)
209 | msg_type = msg["msg_type"]
210 | content = msg["content"]
211 | if (
212 | msg_type == "status"
213 | and content["execution_state"] == "idle"
214 | and (parent_msg_id is None or msg["parent_header"]["msg_id"] == parent_msg_id)
215 | ):
216 | break
217 |
218 |
219 | class TemporaryWorkingDirectory(TemporaryDirectory):
220 | """
221 | Creates a temporary directory and sets the cwd to that directory.
222 | Automatically reverts to previous cwd upon cleanup.
223 | Usage example:
224 |
225 | with TemporaryWorkingDirectory() as tmpdir:
226 | ...
227 | """
228 |
229 | def __enter__(self):
230 | self.old_wd = os.getcwd()
231 | os.chdir(self.name)
232 | return super().__enter__()
233 |
234 | def __exit__(self, exc, value, tb):
235 | os.chdir(self.old_wd)
236 | return super().__exit__(exc, value, tb)
237 |
--------------------------------------------------------------------------------
/ipykernel/inprocess/ipkernel.py:
--------------------------------------------------------------------------------
1 | """An in-process kernel"""
2 |
3 | # Copyright (c) IPython Development Team.
4 | # Distributed under the terms of the Modified BSD License.
5 | from __future__ import annotations
6 |
7 | import logging
8 | import sys
9 | from contextlib import contextmanager
10 |
11 | from IPython.core.interactiveshell import InteractiveShellABC
12 | from traitlets import Any, Enum, Instance, List, Type, default
13 |
14 | from ipykernel.ipkernel import IPythonKernel
15 | from ipykernel.jsonutil import json_clean
16 | from ipykernel.zmqshell import ZMQInteractiveShell
17 |
18 | from ..iostream import BackgroundSocket, IOPubThread, OutStream
19 | from .constants import INPROCESS_KEY
20 | from .socket import DummySocket
21 |
22 | # -----------------------------------------------------------------------------
23 | # Main kernel class
24 | # -----------------------------------------------------------------------------
25 |
26 |
27 | class InProcessKernel(IPythonKernel):
28 | """An in-process kernel."""
29 |
30 | # -------------------------------------------------------------------------
31 | # InProcessKernel interface
32 | # -------------------------------------------------------------------------
33 |
34 | # The frontends connected to this kernel.
35 | frontends = List(Instance("ipykernel.inprocess.client.InProcessKernelClient", allow_none=True))
36 |
37 | # The GUI environment that the kernel is running under. This need not be
38 | # specified for the normal operation for the kernel, but is required for
39 | # IPython's GUI support (including pylab). The default is 'inline' because
40 | # it is safe under all GUI toolkits.
41 | gui = Enum(("tk", "gtk", "wx", "qt", "qt4", "inline"), default_value="inline")
42 |
43 | raw_input_str = Any()
44 | stdout = Any()
45 | stderr = Any()
46 |
47 | # -------------------------------------------------------------------------
48 | # Kernel interface
49 | # -------------------------------------------------------------------------
50 |
51 | shell_class = Type(allow_none=True) # type:ignore[assignment]
52 | _underlying_iopub_socket = Instance(DummySocket, ())
53 | iopub_thread: IOPubThread = Instance(IOPubThread) # type:ignore[assignment]
54 |
55 | shell_stream = Instance(DummySocket, ()) # type:ignore[assignment]
56 |
57 | @default("iopub_thread")
58 | def _default_iopub_thread(self):
59 | thread = IOPubThread(self._underlying_iopub_socket)
60 | thread.start()
61 | return thread
62 |
63 | iopub_socket: BackgroundSocket = Instance(BackgroundSocket) # type:ignore[assignment]
64 |
65 | @default("iopub_socket")
66 | def _default_iopub_socket(self):
67 | return self.iopub_thread.background_socket
68 |
69 | stdin_socket = Instance(DummySocket, ())
70 |
71 | def __init__(self, **traits):
72 | """Initialize the kernel."""
73 | super().__init__(**traits)
74 |
75 | self._underlying_iopub_socket.observe(self._io_dispatch, names=["message_sent"])
76 | if self.shell:
77 | self.shell.kernel = self
78 |
79 | async def execute_request(self, stream, ident, parent):
80 | """Override for temporary IO redirection."""
81 | with self._redirected_io():
82 | await super().execute_request(stream, ident, parent)
83 |
84 | def start(self):
85 | """Override registration of dispatchers for streams."""
86 | if self.shell:
87 | self.shell.exit_now = False
88 |
89 | def _abort_queues(self, subshell_id: str | None = ...):
90 | """The in-process kernel doesn't abort requests."""
91 |
92 | def _input_request(self, prompt, ident, parent, password=False):
93 | # Flush output before making the request.
94 | self.raw_input_str = None
95 | if sys.stdout is not None:
96 | sys.stdout.flush()
97 | if sys.stderr is not None:
98 | sys.stderr.flush()
99 |
100 | # Send the input request.
101 | content = json_clean(dict(prompt=prompt, password=password))
102 | assert self.session is not None
103 | msg = self.session.msg("input_request", content, parent)
104 | for frontend in self.frontends:
105 | assert frontend is not None
106 | if frontend.session.session == parent["header"]["session"]:
107 | frontend.stdin_channel.call_handlers(msg)
108 | break
109 | else:
110 | logging.error("No frontend found for raw_input request")
111 | return ""
112 |
113 | # Await a response.
114 | while self.raw_input_str is None:
115 | frontend.stdin_channel.process_events()
116 | return self.raw_input_str # type:ignore[unreachable]
117 |
118 | # -------------------------------------------------------------------------
119 | # Protected interface
120 | # -------------------------------------------------------------------------
121 |
122 | @contextmanager
123 | def _redirected_io(self):
124 | """Temporarily redirect IO to the kernel."""
125 | sys_stdout, sys_stderr = sys.stdout, sys.stderr
126 | try:
127 | sys.stdout, sys.stderr = self.stdout, self.stderr
128 | yield
129 | finally:
130 | sys.stdout, sys.stderr = sys_stdout, sys_stderr
131 |
132 | # ------ Trait change handlers --------------------------------------------
133 |
134 | def _io_dispatch(self, change):
135 | """Called when a message is sent to the IO socket."""
136 | assert self.iopub_socket.io_thread is not None
137 | assert self.session is not None
138 | _ident, msg = self.session.recv(self.iopub_socket.io_thread.socket, copy=False)
139 | for frontend in self.frontends:
140 | assert frontend is not None
141 | frontend.iopub_channel.call_handlers(msg)
142 |
143 | # ------ Trait initializers -----------------------------------------------
144 |
145 | @default("log")
146 | def _default_log(self):
147 | return logging.getLogger(__name__)
148 |
149 | @default("session")
150 | def _default_session(self):
151 | from jupyter_client.session import Session
152 |
153 | return Session(parent=self, key=INPROCESS_KEY)
154 |
155 | @default("shell_class")
156 | def _default_shell_class(self):
157 | return InProcessInteractiveShell
158 |
159 | @default("stdout")
160 | def _default_stdout(self):
161 | return OutStream(self.session, self.iopub_thread, "stdout", watchfd=False)
162 |
163 | @default("stderr")
164 | def _default_stderr(self):
165 | return OutStream(self.session, self.iopub_thread, "stderr", watchfd=False)
166 |
167 |
168 | # -----------------------------------------------------------------------------
169 | # Interactive shell subclass
170 | # -----------------------------------------------------------------------------
171 |
172 |
173 | class InProcessInteractiveShell(ZMQInteractiveShell):
174 | """An in-process interactive shell."""
175 |
176 | kernel: InProcessKernel = Instance(
177 | "ipykernel.inprocess.ipkernel.InProcessKernel", allow_none=True
178 | ) # type:ignore[assignment]
179 |
180 | # -------------------------------------------------------------------------
181 | # InteractiveShell interface
182 | # -------------------------------------------------------------------------
183 |
184 | def enable_gui(self, gui=None):
185 | """Enable GUI integration for the kernel."""
186 | if not gui:
187 | gui = self.kernel.gui
188 | self.active_eventloop = gui
189 |
190 | def enable_matplotlib(self, gui=None):
191 | """Enable matplotlib integration for the kernel."""
192 | if not gui:
193 | gui = self.kernel.gui
194 | return super().enable_matplotlib(gui)
195 |
196 | def enable_pylab(self, gui=None, import_all=True):
197 | """Activate pylab support at runtime."""
198 | if not gui:
199 | gui = self.kernel.gui
200 | return super().enable_pylab(gui, import_all)
201 |
202 |
203 | InteractiveShellABC.register(InProcessInteractiveShell)
204 |
--------------------------------------------------------------------------------