├── .git-blame-ignore-revs ├── .github ├── dependabot.yml └── workflows │ ├── codeql-analysis.yml │ ├── downstream.yml │ ├── enforce-label.yml │ ├── prep-release.yml │ ├── publish-changelog.yml │ ├── publish-release.yml │ └── test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── RELEASE.md ├── SECURITY.md ├── docs ├── Makefile ├── api │ ├── jupyter_core.rst │ ├── jupyter_core.utils.rst │ └── modules.rst ├── conf.py ├── index.rst └── make.bat ├── examples ├── completions-zsh └── jupyter-completion.bash ├── jupyter.py ├── jupyter_core ├── __init__.py ├── __main__.py ├── application.py ├── command.py ├── migrate.py ├── paths.py ├── py.typed ├── troubleshoot.py ├── utils │ └── __init__.py └── version.py ├── pyproject.toml ├── scripts ├── jupyter └── jupyter-migrate └── tests ├── __init__.py ├── dotipython ├── nbextensions │ └── myext.js └── profile_default │ ├── ipython_config.py │ ├── ipython_console_config.py │ ├── ipython_kernel_config.py │ ├── ipython_nbconvert_config.py │ ├── ipython_notebook_config.py │ └── static │ └── custom │ ├── custom.css │ └── custom.js ├── dotipython_empty └── profile_default │ ├── ipython_config.py │ ├── ipython_console_config.py │ ├── ipython_kernel_config.py │ ├── ipython_nbconvert_config.py │ ├── ipython_notebook_config.py │ └── static │ └── custom │ ├── custom.css │ └── custom.js ├── mocking.py ├── test_application.py ├── test_command.py ├── test_migrate.py ├── test_paths.py ├── test_troubleshoot.py └── test_utils.py /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Initial pre-commit reformat 2 | d6a8168b9f6b8a28bba5f7cca3d6a9c31da041b6 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | groups: 8 | actions: 9 | patterns: 10 | - "*" 11 | - package-ecosystem: "pip" 12 | directory: "/" 13 | schedule: 14 | interval: "weekly" 15 | groups: 16 | actions: 17 | patterns: 18 | - "*" 19 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # ******** NOTE ******** 12 | 13 | name: "CodeQL" 14 | 15 | on: 16 | push: 17 | branches: [master] 18 | pull_request: 19 | # The branches below must be a subset of the branches above 20 | branches: [master] 21 | schedule: 22 | # Make a pass every Saturday at 06:41 UTC 23 | - cron: "41 6 * * 6" 24 | 25 | permissions: 26 | security-events: write 27 | 28 | jobs: 29 | analyze: 30 | name: Analyze 31 | runs-on: ubuntu-latest 32 | 33 | strategy: 34 | fail-fast: false 35 | matrix: 36 | language: ["python"] 37 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] 38 | # Learn more... 39 | # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection 40 | 41 | steps: 42 | - name: Checkout repository 43 | uses: actions/checkout@v4 44 | 45 | # Initializes the CodeQL tools for scanning. 46 | - name: Initialize CodeQL 47 | uses: github/codeql-action/init@v3 48 | with: 49 | languages: ${{ matrix.language }} 50 | # If you wish to specify custom queries, you can do so here or in a config file. 51 | # By default, queries listed here will override any specified in a config file. 52 | # Prefix the list here with "+" to use these queries and those in the config file. 53 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 54 | queries: security-and-quality 55 | 56 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 57 | # If this step fails, then you should remove it and run the build manually (see below) 58 | - name: Autobuild 59 | uses: github/codeql-action/autobuild@v3 60 | 61 | # ℹ️ Command-line programs to run using the OS shell. 62 | # 📚 https://git.io/JvXDl 63 | 64 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 65 | # and modify them (or add more) to build your code if your project 66 | # uses a compiled language 67 | 68 | #- run: | 69 | # make bootstrap 70 | # make release 71 | 72 | - name: Perform CodeQL Analysis 73 | uses: github/codeql-action/analyze@v3 74 | -------------------------------------------------------------------------------- /.github/workflows/downstream.yml: -------------------------------------------------------------------------------- 1 | name: Test downstream projects 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | pull_request: 7 | 8 | concurrency: 9 | group: downstream-${{ github.ref }} 10 | cancel-in-progress: true 11 | 12 | jobs: 13 | ipykernel: 14 | runs-on: ubuntu-latest 15 | timeout-minutes: 15 16 | steps: 17 | - uses: actions/checkout@v4 18 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 19 | - uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 20 | with: 21 | package_name: ipykernel 22 | 23 | nbclient: 24 | runs-on: ubuntu-latest 25 | timeout-minutes: 15 26 | steps: 27 | - uses: actions/checkout@v4 28 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 29 | - uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 30 | with: 31 | package_name: nbclient 32 | env_values: IPYKERNEL_CELL_NAME=\ 33 | 34 | nbconvert: 35 | runs-on: ubuntu-latest 36 | timeout-minutes: 15 37 | steps: 38 | - uses: actions/checkout@v4 39 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 40 | - uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 41 | with: 42 | package_name: nbconvert 43 | package_spec: -e ".[test]" 44 | 45 | jupyter_server: 46 | runs-on: ubuntu-latest 47 | timeout-minutes: 15 48 | steps: 49 | - uses: actions/checkout@v4 50 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 51 | - uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 52 | with: 53 | package_name: jupyter_server 54 | 55 | jupyter_client: 56 | runs-on: ubuntu-latest 57 | steps: 58 | - name: Checkout 59 | uses: actions/checkout@v4 60 | 61 | - name: Base Setup 62 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 63 | 64 | - name: Run Test 65 | uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 66 | with: 67 | package_name: jupyter_client 68 | 69 | pytest_jupyter: 70 | runs-on: ubuntu-latest 71 | steps: 72 | - name: Checkout 73 | uses: actions/checkout@v4 74 | 75 | - name: Base Setup 76 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 77 | 78 | - name: Run Test 79 | uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 80 | with: 81 | package_name: pytest_jupyter 82 | package_spec: -e ".[test,client,server]" 83 | -------------------------------------------------------------------------------- /.github/workflows/enforce-label.yml: -------------------------------------------------------------------------------- 1 | name: Enforce PR label 2 | 3 | on: 4 | pull_request: 5 | types: [labeled, unlabeled, opened, edited, synchronize] 6 | jobs: 7 | enforce-label: 8 | runs-on: ubuntu-latest 9 | permissions: 10 | pull-requests: write 11 | steps: 12 | - name: enforce-triage-label 13 | uses: jupyterlab/maintainer-tools/.github/actions/enforce-label@v1 14 | -------------------------------------------------------------------------------- /.github/workflows/prep-release.yml: -------------------------------------------------------------------------------- 1 | name: "Step 1: Prep Release" 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | version_spec: 6 | description: "New Version Specifier" 7 | default: "next" 8 | required: false 9 | branch: 10 | description: "The branch to target" 11 | required: false 12 | post_version_spec: 13 | description: "Post Version Specifier" 14 | required: false 15 | silent: 16 | description: "Set a placeholder in the changelog and don't publish the release." 17 | required: false 18 | type: boolean 19 | since: 20 | description: "Use PRs with activity since this date or git reference" 21 | required: false 22 | since_last_stable: 23 | description: "Use PRs with activity since the last stable git tag" 24 | required: false 25 | type: boolean 26 | jobs: 27 | prep_release: 28 | runs-on: ubuntu-latest 29 | permissions: 30 | contents: write 31 | steps: 32 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 33 | 34 | - name: Prep Release 35 | id: prep-release 36 | uses: jupyter-server/jupyter_releaser/.github/actions/prep-release@v2 37 | with: 38 | token: ${{ secrets.GITHUB_TOKEN }} 39 | version_spec: ${{ github.event.inputs.version_spec }} 40 | silent: ${{ github.event.inputs.silent }} 41 | post_version_spec: ${{ github.event.inputs.post_version_spec }} 42 | target: ${{ github.event.inputs.target }} 43 | branch: ${{ github.event.inputs.branch }} 44 | since: ${{ github.event.inputs.since }} 45 | since_last_stable: ${{ github.event.inputs.since_last_stable }} 46 | 47 | - name: "** Next Step **" 48 | run: | 49 | echo "Optional): Review Draft Release: ${{ steps.prep-release.outputs.release_url }}" 50 | -------------------------------------------------------------------------------- /.github/workflows/publish-changelog.yml: -------------------------------------------------------------------------------- 1 | name: "Publish Changelog" 2 | on: 3 | release: 4 | types: [published] 5 | 6 | workflow_dispatch: 7 | inputs: 8 | branch: 9 | description: "The branch to target" 10 | required: false 11 | 12 | jobs: 13 | publish_changelog: 14 | runs-on: ubuntu-latest 15 | environment: release 16 | steps: 17 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 18 | 19 | - uses: actions/create-github-app-token@v2 20 | id: app-token 21 | with: 22 | app-id: ${{ vars.APP_ID }} 23 | private-key: ${{ secrets.APP_PRIVATE_KEY }} 24 | 25 | - name: Publish changelog 26 | id: publish-changelog 27 | uses: jupyter-server/jupyter_releaser/.github/actions/publish-changelog@v2 28 | with: 29 | token: ${{ steps.app-token.outputs.token }} 30 | branch: ${{ github.event.inputs.branch }} 31 | 32 | - name: "** Next Step **" 33 | run: | 34 | echo "Merge the changelog update PR: ${{ steps.publish-changelog.outputs.pr_url }}" 35 | -------------------------------------------------------------------------------- /.github/workflows/publish-release.yml: -------------------------------------------------------------------------------- 1 | name: "Step 2: Publish Release" 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | branch: 6 | description: "The target branch" 7 | required: false 8 | release_url: 9 | description: "The URL of the draft GitHub release" 10 | required: false 11 | steps_to_skip: 12 | description: "Comma separated list of steps to skip" 13 | required: false 14 | 15 | jobs: 16 | publish_release: 17 | runs-on: ubuntu-latest 18 | environment: release 19 | permissions: 20 | id-token: write 21 | steps: 22 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 23 | 24 | - uses: actions/create-github-app-token@v2 25 | id: app-token 26 | with: 27 | app-id: ${{ vars.APP_ID }} 28 | private-key: ${{ secrets.APP_PRIVATE_KEY }} 29 | 30 | - name: Populate Release 31 | id: populate-release 32 | uses: jupyter-server/jupyter_releaser/.github/actions/populate-release@v2 33 | with: 34 | token: ${{ steps.app-token.outputs.token }} 35 | branch: ${{ github.event.inputs.branch }} 36 | release_url: ${{ github.event.inputs.release_url }} 37 | steps_to_skip: ${{ github.event.inputs.steps_to_skip }} 38 | 39 | - name: Finalize Release 40 | id: finalize-release 41 | uses: jupyter-server/jupyter_releaser/.github/actions/finalize-release@v2 42 | with: 43 | token: ${{ steps.app-token.outputs.token }} 44 | release_url: ${{ steps.populate-release.outputs.release_url }} 45 | 46 | - name: "** Next Step **" 47 | if: ${{ success() }} 48 | run: | 49 | echo "Verify the final release" 50 | echo ${{ steps.finalize-release.outputs.release_url }} 51 | 52 | - name: "** Failure Message **" 53 | if: ${{ failure() }} 54 | run: | 55 | echo "Failed to Publish the Draft Release Url:" 56 | echo ${{ steps.populate-release.outputs.release_url }} 57 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Python package 5 | 6 | on: 7 | push: 8 | branches: ["main"] 9 | pull_request: 10 | schedule: 11 | - cron: "0 8 * * *" 12 | 13 | concurrency: 14 | group: >- 15 | ${{ github.workflow }}- 16 | ${{ github.ref_type }}- 17 | ${{ github.event.pull_request.number || github.sha }} 18 | cancel-in-progress: true 19 | 20 | defaults: 21 | run: 22 | shell: bash -eux {0} 23 | 24 | jobs: 25 | build: 26 | runs-on: ${{ matrix.os }} 27 | strategy: 28 | fail-fast: false 29 | matrix: 30 | os: [ubuntu-latest, windows-latest, macos-latest] 31 | python-version: ["3.8", "3.12", "3.13"] 32 | include: 33 | - os: windows-latest 34 | python-version: "3.9" 35 | - os: ubuntu-latest 36 | python-version: "3.11" 37 | - os: ubuntu-latest 38 | python-version: "pypy-3.9" 39 | - os: macos-latest 40 | python-version: "3.10" 41 | steps: 42 | - uses: actions/checkout@v4 43 | - uses: actions/setup-python@v5 44 | with: 45 | python-version: ${{ matrix.python-version }} 46 | cache: pip 47 | 48 | - name: install hatch 49 | run: | 50 | pip install --upgrade pip pipx 51 | if [[ "${{ matrix.python-version }}" == "3.8" ]]; then 52 | PIPX_HOME=$HOME/.pipx_home 53 | mkdir $PIPX_HOME 54 | fi 55 | pipx install hatch 56 | 57 | - name: Test 58 | run: | 59 | hatch run cov:test 60 | - name: Check CLI 61 | run: | 62 | pip install . 63 | cd $HOME 64 | jupyter troubleshoot 65 | - uses: jupyterlab/maintainer-tools/.github/actions/upload-coverage@v1 66 | 67 | coverage: 68 | runs-on: ubuntu-latest 69 | if: always() 70 | needs: 71 | - build 72 | steps: 73 | - uses: actions/checkout@v4 74 | - uses: jupyterlab/maintainer-tools/.github/actions/report-coverage@v1 75 | 76 | test_minimum_versions: 77 | name: Test Minimum Versions 78 | timeout-minutes: 20 79 | runs-on: ubuntu-latest 80 | steps: 81 | - uses: actions/checkout@v4 82 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 83 | with: 84 | dependency_type: minimum 85 | - name: Run the unit tests 86 | run: | 87 | hatch run test:nowarn || hatch run test:nowarn --lf 88 | 89 | test_prereleases: 90 | name: Test Prereleases 91 | runs-on: ubuntu-latest 92 | timeout-minutes: 20 93 | steps: 94 | - uses: actions/checkout@v4 95 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 96 | with: 97 | dependency_type: pre 98 | - name: Run the tests 99 | run: | 100 | hatch run test:nowarn || hatch run test:nowarn --lf 101 | 102 | test_docs: 103 | name: Test Docs 104 | runs-on: ubuntu-latest 105 | steps: 106 | - uses: actions/checkout@v4 107 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 108 | - name: Build API docs 109 | run: | 110 | hatch run docs:api 111 | # If this fails run `hatch run docs:api` locally 112 | # and commit. 113 | git status --porcelain 114 | git status -s | grep "A" && exit 1 115 | git status -s | grep "M" && exit 1 116 | echo "API docs done" 117 | - run: hatch run docs:build 118 | 119 | test_lint: 120 | name: Test Lint 121 | runs-on: ubuntu-latest 122 | steps: 123 | - uses: actions/checkout@v4 124 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 125 | - name: Run Linters 126 | run: | 127 | hatch run typing:test 128 | hatch run lint:build 129 | hatch run typing:test 130 | pipx run interrogate -v . 131 | pipx run doc8 --max-line-length=200 132 | 133 | make_sdist: 134 | name: Make SDist 135 | runs-on: ubuntu-latest 136 | timeout-minutes: 10 137 | steps: 138 | - uses: actions/checkout@v4 139 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 140 | - uses: jupyterlab/maintainer-tools/.github/actions/make-sdist@v1 141 | 142 | test_sdist: 143 | runs-on: ubuntu-latest 144 | needs: [make_sdist] 145 | name: Install from SDist and Test 146 | timeout-minutes: 20 147 | steps: 148 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 149 | - uses: jupyterlab/maintainer-tools/.github/actions/test-sdist@v1 150 | 151 | check_release: 152 | runs-on: ubuntu-latest 153 | steps: 154 | - name: Checkout 155 | uses: actions/checkout@v4 156 | - name: Base Setup 157 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 158 | - name: Install Dependencies 159 | run: | 160 | pip install -e . 161 | - name: Check Release 162 | uses: jupyter-server/jupyter_releaser/.github/actions/check-release@v2 163 | with: 164 | token: ${{ secrets.GITHUB_TOKEN }} 165 | 166 | check_links: 167 | name: Check Links 168 | runs-on: ubuntu-latest 169 | timeout-minutes: 15 170 | steps: 171 | - uses: actions/checkout@v4 172 | - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 173 | - uses: jupyterlab/maintainer-tools/.github/actions/check-links@v1 174 | 175 | tests_check: # This job does nothing and is only used for the branch protection 176 | if: always() 177 | needs: 178 | - coverage 179 | - test_lint 180 | - test_docs 181 | - test_minimum_versions 182 | - test_prereleases 183 | - check_links 184 | - check_release 185 | - test_sdist 186 | runs-on: ubuntu-latest 187 | steps: 188 | - name: Decide whether the needed jobs succeeded or failed 189 | uses: re-actors/alls-green@release/v1 190 | with: 191 | jobs: ${{ toJSON(needs) }} 192 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | MANIFEST 2 | build 3 | dist 4 | _build 5 | docs/gh-pages 6 | *.py[co] 7 | __pycache__ 8 | *.egg-info 9 | *~ 10 | *.bak 11 | .ipynb_checkpoints 12 | .tox 13 | .DS_Store 14 | \#*# 15 | .#* 16 | .coverage 17 | htmlcov 18 | .cache 19 | .idea 20 | docs/changelog.md 21 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | ci: 2 | autoupdate_schedule: monthly 3 | autoupdate_commit_msg: "chore: update pre-commit hooks" 4 | 5 | repos: 6 | - repo: https://github.com/pre-commit/pre-commit-hooks 7 | rev: v5.0.0 8 | hooks: 9 | - id: check-case-conflict 10 | - id: check-ast 11 | - id: check-docstring-first 12 | - id: check-executables-have-shebangs 13 | - id: check-added-large-files 14 | - id: check-case-conflict 15 | - id: check-merge-conflict 16 | - id: check-json 17 | - id: check-toml 18 | - id: check-yaml 19 | - id: debug-statements 20 | - id: end-of-file-fixer 21 | - id: trailing-whitespace 22 | 23 | - repo: https://github.com/python-jsonschema/check-jsonschema 24 | rev: 0.33.0 25 | hooks: 26 | - id: check-github-workflows 27 | 28 | - repo: https://github.com/executablebooks/mdformat 29 | rev: 0.7.22 30 | hooks: 31 | - id: mdformat 32 | additional_dependencies: 33 | [mdformat-gfm, mdformat-frontmatter, mdformat-footnote] 34 | 35 | - repo: https://github.com/pre-commit/mirrors-prettier 36 | rev: "v4.0.0-alpha.8" 37 | hooks: 38 | - id: prettier 39 | types_or: [yaml, html, json] 40 | 41 | - repo: https://github.com/adamchainz/blacken-docs 42 | rev: "1.19.1" 43 | hooks: 44 | - id: blacken-docs 45 | additional_dependencies: [black==23.7.0] 46 | 47 | - repo: https://github.com/pre-commit/mirrors-mypy 48 | rev: "v1.15.0" 49 | hooks: 50 | - id: mypy 51 | files: jupyter_core 52 | stages: [manual] 53 | additional_dependencies: 54 | ["traitlets>=5.13", "platformdirs>=3.11", "argcomplete"] 55 | 56 | - repo: https://github.com/codespell-project/codespell 57 | rev: "v2.4.1" 58 | hooks: 59 | - id: codespell 60 | args: ["-L", "re-use"] 61 | 62 | - repo: https://github.com/pre-commit/pygrep-hooks 63 | rev: "v1.10.0" 64 | hooks: 65 | - id: rst-backticks 66 | - id: rst-directive-colons 67 | - id: rst-inline-touching-normal 68 | 69 | - repo: https://github.com/astral-sh/ruff-pre-commit 70 | rev: v0.11.11 71 | hooks: 72 | - id: ruff 73 | types_or: [python, jupyter] 74 | args: ["--fix", "--show-fixes"] 75 | - id: ruff-format 76 | types_or: [python, jupyter] 77 | 78 | - repo: https://github.com/scientific-python/cookie 79 | rev: "2025.05.02" 80 | hooks: 81 | - id: sp-repo-review 82 | additional_dependencies: ["repo-review[cli]"] 83 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | python: 3 | install: 4 | # install itself with pip install . 5 | - method: pip 6 | path: . 7 | extra_requirements: 8 | - docs 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.12" 13 | sphinx: 14 | configuration: docs/conf.py 15 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | We follow the [Jupyter Contributing Guide](https://docs.jupyter.org/en/latest/contributing/content-contributor.html). 4 | 5 | See the [README](https://github.com/jupyter/jupyter_core/blob/master/README.md) on how to set up a development environment. 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | - Copyright (c) 2015-, Jupyter Development Team 4 | 5 | All rights reserved. 6 | 7 | Redistribution and use in source and binary forms, with or without 8 | modification, are permitted provided that the following conditions are met: 9 | 10 | 1. Redistributions of source code must retain the above copyright notice, this 11 | list of conditions and the following disclaimer. 12 | 13 | 2. Redistributions in binary form must reproduce the above copyright notice, 14 | this list of conditions and the following disclaimer in the documentation 15 | and/or other materials provided with the distribution. 16 | 17 | 3. Neither the name of the copyright holder nor the names of its 18 | contributors may be used to endorse or promote products derived from 19 | this software without specific prior written permission. 20 | 21 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 22 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 23 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 24 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 25 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 26 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 27 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 28 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 29 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Jupyter Core 2 | 3 | [![Build Status](https://github.com/jupyter/jupyter_core/actions/workflows/test.yml/badge.svg?query=branch%3Amain++)](https://github.com/jupyter/jupyter_core/actions/workflows/test.yml/badge.svg?query=branch%3Amain++) 4 | [![Documentation Status](https://readthedocs.org/projects/jupyter-core/badge/?version=latest)](http://jupyter-core.readthedocs.io/en/latest/?badge=latest) 5 | 6 | Core common functionality of Jupyter projects. 7 | 8 | This package contains base application classes and configuration inherited by other projects. 9 | It doesn't do much on its own. 10 | 11 | # Development Setup 12 | 13 | The [Jupyter Contributor Guides](https://docs.jupyter.org/en/latest/contributing/content-contributor.html) provide extensive information on contributing code or documentation to Jupyter projects. The limited instructions below for setting up a development environment are for your convenience. 14 | 15 | ## Coding 16 | 17 | You'll need Python and `pip` on the search path. Clone the Jupyter Core git repository to your computer, for example in `/my/projects/jupyter_core`. 18 | Now create an [editable install](https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs) 19 | and download the dependencies of code and test suite by executing: 20 | 21 | ``` 22 | cd /my/projects/jupyter_core/ 23 | pip install -e ".[test]" 24 | py.test 25 | ``` 26 | 27 | The last command runs the test suite to verify the setup. During development, you can pass filenames to `py.test`, and it will execute only those tests. 28 | 29 | ## Code Styling 30 | 31 | `jupyter_core` has adopted automatic code formatting so you shouldn't 32 | need to worry too much about your code style. 33 | As long as your code is valid, 34 | the pre-commit hook should take care of how it should look. 35 | `pre-commit` and its associated hooks will automatically be installed when 36 | you run `pip install -e ".[test]"` 37 | 38 | To install `pre-commit` manually, run the following: 39 | 40 | ```bash 41 | pip install pre-commit 42 | pre-commit install 43 | ``` 44 | 45 | You can invoke the pre-commit hook by hand at any time with: 46 | 47 | ```bash 48 | pre-commit run 49 | ``` 50 | 51 | which should run any autoformatting on your code 52 | and tell you about any errors it couldn't fix automatically. 53 | You may also install [black integration](https://github.com/psf/black#editor-integration) 54 | into your text editor to format code automatically. 55 | 56 | If you have already committed files before setting up the pre-commit 57 | hook with `pre-commit install`, you can fix everything up using 58 | `pre-commit run --all-files`. You need to make the fixing commit 59 | yourself after that. 60 | 61 | ## Documentation 62 | 63 | The documentation of Jupyter Core is generated from the files in `docs/` using Sphinx. Instructions for setting up Sphinx with a selection of optional modules are in the [Documentation Guide](https://docs.jupyter.org/en/latest/contributing/content-contributor.html). You'll also need the `make` command. 64 | For a minimal Sphinx installation to process the Jupyter Core docs, execute: 65 | 66 | ``` 67 | pip install sphinx 68 | ``` 69 | 70 | The following commands build the documentation in HTML format and check for broken links: 71 | 72 | ``` 73 | cd /my/projects/jupyter_core/docs/ 74 | make html linkcheck 75 | ``` 76 | 77 | Point your browser to the following URL to access the generated documentation: 78 | 79 | _file:///my/projects/jupyter_core/docs/\_build/html/index.html_ 80 | 81 | ## About the Jupyter Development Team 82 | 83 | The Jupyter Development Team is the set of all contributors to the Jupyter 84 | project. This includes all of the Jupyter subprojects. A full list with 85 | details is kept in the documentation directory, in the file 86 | `about/credits.txt`. 87 | 88 | The core team that coordinates development on GitHub can be found here: 89 | https://github.com/ipython/. 90 | 91 | ## Our Copyright Policy 92 | 93 | Jupyter uses a shared copyright model. Each contributor maintains copyright 94 | over their contributions to Jupyter. It is important to note that these 95 | contributions are typically only changes to the repositories. Thus, the Jupyter 96 | source code in its entirety is not the copyright of any single person or 97 | institution. Instead, it is the collective copyright of the entire Jupyter 98 | Development Team. If individual contributors want to maintain a record of what 99 | changes/contributions they have specific copyright on, they should indicate 100 | their copyright in the commit message of the change, when they commit the 101 | change to one of the Jupyter repositories. 102 | 103 | With this in mind, the following banner should be used in any source code file 104 | to indicate the copyright and license terms: 105 | 106 | ``` 107 | # Copyright (c) Jupyter Development Team. 108 | # Distributed under the terms of the Modified BSD License. 109 | ``` 110 | -------------------------------------------------------------------------------- /RELEASE.md: -------------------------------------------------------------------------------- 1 | # Making a Release 2 | 3 | The recommended way to make a release is to use [`jupyter_releaser`](https://jupyter-releaser.readthedocs.io/en/latest/get_started/making_release_from_repo.html). 4 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Reporting a Vulnerability 4 | 5 | All IPython and Jupyter security are handled via security@ipython.org. 6 | You can find more information on the Jupyter website. https://jupyter.org/security 7 | 8 | ## Tidelift 9 | 10 | You can report security concerns for Jupyter-Core via the [Tidelift platform](https://tidelift.com/security). 11 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " applehelp to make an Apple Help Book" 34 | @echo " devhelp to make HTML files and a Devhelp project" 35 | @echo " epub to make an epub" 36 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 37 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 38 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 39 | @echo " text to make text files" 40 | @echo " man to make manual pages" 41 | @echo " texinfo to make Texinfo files" 42 | @echo " info to make Texinfo files and run them through makeinfo" 43 | @echo " gettext to make PO message catalogs" 44 | @echo " changes to make an overview of all changed/added/deprecated items" 45 | @echo " xml to make Docutils-native XML files" 46 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 47 | @echo " linkcheck to check all external links for integrity" 48 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 49 | @echo " coverage to run coverage check of the documentation (if enabled)" 50 | 51 | clean: 52 | rm -rf $(BUILDDIR)/* 53 | 54 | html: 55 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 56 | @echo 57 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 58 | 59 | dirhtml: 60 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 61 | @echo 62 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 63 | 64 | singlehtml: 65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 66 | @echo 67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 68 | 69 | pickle: 70 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 71 | @echo 72 | @echo "Build finished; now you can process the pickle files." 73 | 74 | json: 75 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 76 | @echo 77 | @echo "Build finished; now you can process the JSON files." 78 | 79 | htmlhelp: 80 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 81 | @echo 82 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 83 | ".hhp project file in $(BUILDDIR)/htmlhelp." 84 | 85 | qthelp: 86 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 87 | @echo 88 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 89 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 90 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/jupyter_core.qhcp" 91 | @echo "To view the help file:" 92 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/jupyter_core.qhc" 93 | 94 | applehelp: 95 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 96 | @echo 97 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 98 | @echo "N.B. You won't be able to view it unless you put it in" \ 99 | "~/Library/Documentation/Help or install it in your application" \ 100 | "bundle." 101 | 102 | devhelp: 103 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 104 | @echo 105 | @echo "Build finished." 106 | @echo "To view the help file:" 107 | @echo "# mkdir -p $$HOME/.local/share/devhelp/jupyter_core" 108 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/jupyter_core" 109 | @echo "# devhelp" 110 | 111 | epub: 112 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 113 | @echo 114 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 115 | 116 | latex: 117 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 118 | @echo 119 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 120 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 121 | "(use \`make latexpdf' here to do that automatically)." 122 | 123 | latexpdf: 124 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 125 | @echo "Running LaTeX files through pdflatex..." 126 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 127 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 128 | 129 | latexpdfja: 130 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 131 | @echo "Running LaTeX files through platex and dvipdfmx..." 132 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 133 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 134 | 135 | text: 136 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 137 | @echo 138 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 139 | 140 | man: 141 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 142 | @echo 143 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 144 | 145 | texinfo: 146 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 147 | @echo 148 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 149 | @echo "Run \`make' in that directory to run these through makeinfo" \ 150 | "(use \`make info' here to do that automatically)." 151 | 152 | info: 153 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 154 | @echo "Running Texinfo files through makeinfo..." 155 | make -C $(BUILDDIR)/texinfo info 156 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 157 | 158 | gettext: 159 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 160 | @echo 161 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 162 | 163 | changes: 164 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 165 | @echo 166 | @echo "The overview file is in $(BUILDDIR)/changes." 167 | 168 | linkcheck: 169 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 170 | @echo 171 | @echo "Link check complete; look for any errors in the above output " \ 172 | "or in $(BUILDDIR)/linkcheck/output.txt." 173 | 174 | doctest: 175 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 176 | @echo "Testing of doctests in the sources finished, look at the " \ 177 | "results in $(BUILDDIR)/doctest/output.txt." 178 | 179 | coverage: 180 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 181 | @echo "Testing of coverage in the sources finished, look at the " \ 182 | "results in $(BUILDDIR)/coverage/python.txt." 183 | 184 | xml: 185 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 186 | @echo 187 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 188 | 189 | pseudoxml: 190 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 191 | @echo 192 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 193 | -------------------------------------------------------------------------------- /docs/api/jupyter_core.rst: -------------------------------------------------------------------------------- 1 | jupyter\_core package 2 | ===================== 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | :maxdepth: 4 9 | 10 | jupyter_core.utils 11 | 12 | Submodules 13 | ---------- 14 | 15 | 16 | .. automodule:: jupyter_core.application 17 | :members: 18 | :show-inheritance: 19 | :undoc-members: 20 | 21 | 22 | .. automodule:: jupyter_core.command 23 | :members: 24 | :show-inheritance: 25 | :undoc-members: 26 | 27 | 28 | .. automodule:: jupyter_core.migrate 29 | :members: 30 | :show-inheritance: 31 | :undoc-members: 32 | 33 | 34 | .. automodule:: jupyter_core.paths 35 | :members: 36 | :show-inheritance: 37 | :undoc-members: 38 | 39 | 40 | .. automodule:: jupyter_core.troubleshoot 41 | :members: 42 | :show-inheritance: 43 | :undoc-members: 44 | 45 | 46 | .. automodule:: jupyter_core.version 47 | :members: 48 | :show-inheritance: 49 | :undoc-members: 50 | 51 | Module contents 52 | --------------- 53 | 54 | .. automodule:: jupyter_core 55 | :members: 56 | :show-inheritance: 57 | :undoc-members: 58 | -------------------------------------------------------------------------------- /docs/api/jupyter_core.utils.rst: -------------------------------------------------------------------------------- 1 | jupyter\_core.utils package 2 | =========================== 3 | 4 | Module contents 5 | --------------- 6 | 7 | .. automodule:: jupyter_core.utils 8 | :members: 9 | :show-inheritance: 10 | :undoc-members: 11 | -------------------------------------------------------------------------------- /docs/api/modules.rst: -------------------------------------------------------------------------------- 1 | jupyter_core 2 | ============ 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | jupyter_core 8 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # jupyter_core documentation build configuration file, created by 2 | # sphinx-quickstart on Wed Jun 24 11:51:36 2015. 3 | # 4 | # This file is execfile()d with the current directory set to its 5 | # containing dir. 6 | # 7 | # Note that not all possible configuration values are present in this 8 | # autogenerated file. 9 | # 10 | # All configuration values have a default; values that are commented out 11 | # serve to show the default. 12 | from __future__ import annotations 13 | 14 | import shutil 15 | from pathlib import Path 16 | 17 | from intersphinx_registry import get_intersphinx_mapping 18 | 19 | from jupyter_core.version import __version__, version_info 20 | 21 | # If extensions (or modules to document with autodoc) are in another directory, 22 | # add these directories to sys.path here. If the directory is relative to the 23 | # documentation root, use os.path.abspath to make it absolute, like shown here. 24 | # sys.path.insert(0, os.path.abspath('.')) 25 | 26 | # -- General configuration ------------------------------------------------ 27 | 28 | # If your documentation needs a minimal Sphinx version, state it here. 29 | # needs_sphinx = '1.0' 30 | 31 | # Add any Sphinx extension module names here, as strings. They can be 32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 33 | # ones. 34 | extensions = [ 35 | "myst_parser", 36 | "sphinx.ext.autodoc", 37 | "sphinx.ext.intersphinx", 38 | "sphinx_autodoc_typehints", 39 | ] 40 | 41 | try: 42 | import enchant # noqa: F401 43 | 44 | extensions += ["sphinxcontrib.spelling"] 45 | except ModuleNotFoundError: 46 | pass 47 | 48 | # Add any paths that contain templates here, relative to this directory. 49 | templates_path = ["_templates"] 50 | 51 | # The suffix(es) of source filenames. 52 | # You can specify multiple suffix as a list of string: 53 | # source_suffix = ['.rst', '.md'] 54 | source_suffix = ".rst" 55 | 56 | # The encoding of source files. 57 | # source_encoding = 'utf-8-sig' 58 | 59 | # The master toctree document. 60 | master_doc = "index" 61 | 62 | # General information about the project. 63 | project = "jupyter_core" 64 | copyright = "2015, Jupyter Development Team" 65 | author = "Jupyter Development Team" 66 | 67 | # The version info for the project you're documenting, acts as replacement for 68 | # |version| and |release|, also used in various other places throughout the 69 | # built documents. 70 | # 71 | # The short X.Y version. 72 | major, minor = version_info[:2] 73 | version = f"{major}.{minor}" 74 | # The full version, including alpha/beta/rc tags. 75 | release = __version__ 76 | 77 | # The language for content autogenerated by Sphinx. Refer to documentation 78 | # for a list of supported languages. 79 | # 80 | # This is also used if you do content translation via gettext catalogs. 81 | # Usually you set "language" from the command line for these cases. 82 | language = "en" 83 | 84 | # There are two options for replacing |today|: either, you set today to some 85 | # non-false value, then it is used: 86 | # today = '' 87 | # Else, today_fmt is used as the format for a strftime call. 88 | # today_fmt = '%B %d, %Y' 89 | 90 | # List of patterns, relative to source directory, that match files and 91 | # directories to ignore when looking for source files. 92 | exclude_patterns = ["_build"] 93 | 94 | # The reST default role (used for this markup: `text`) to use for all 95 | # documents. 96 | # default_role = None 97 | 98 | # If true, '()' will be appended to :func: etc. cross-reference text. 99 | # add_function_parentheses = True 100 | 101 | # If true, the current module name will be prepended to all description 102 | # unit titles (such as .. function::). 103 | # add_module_names = True 104 | 105 | # If true, sectionauthor and moduleauthor directives will be shown in the 106 | # output. They are ignored by default. 107 | # show_authors = False 108 | 109 | # The name of the Pygments (syntax highlighting) style to use. 110 | pygments_style = "sphinx" 111 | 112 | # A list of ignored prefixes for module index sorting. 113 | # modindex_common_prefix = [] 114 | 115 | # If true, keep warnings as "system message" paragraphs in the built documents. 116 | # keep_warnings = False 117 | 118 | # If true, `todo` and `todoList` produce output, else they produce nothing. 119 | todo_include_todos = False 120 | 121 | 122 | # -- Options for HTML output ---------------------------------------------- 123 | 124 | # The theme to use for HTML and HTML Help pages. See the documentation for 125 | # a list of builtin themes. 126 | html_theme = "pydata_sphinx_theme" 127 | 128 | # Theme options are theme-specific and customize the look and feel of a theme 129 | # further. For a list of options available for each theme, see the 130 | # documentation. 131 | html_theme_options = {"navigation_with_keys": False} 132 | 133 | # Add any paths that contain custom themes here, relative to this directory. 134 | # html_theme_path = [] 135 | 136 | # The name for this set of Sphinx documents. If None, it defaults to 137 | # " v documentation". 138 | # html_title = None 139 | 140 | # A shorter title for the navigation bar. Default is the same as html_title. 141 | # html_short_title = None 142 | 143 | # The name of an image file (relative to this directory) to place at the top 144 | # of the sidebar. 145 | # html_logo = None 146 | 147 | # The name of an image file (within the static path) to use as favicon of the 148 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 149 | # pixels large. 150 | # html_favicon = None 151 | 152 | # Add any paths that contain custom static files (such as style sheets) here, 153 | # relative to this directory. They are copied after the builtin static files, 154 | # so a file named "default.css" will overwrite the builtin "default.css". 155 | # html_static_path = ["_static"] 156 | 157 | # Add any extra paths that contain custom files (such as robots.txt or 158 | # .htaccess) here, relative to this directory. These files are copied 159 | # directly to the root of the documentation. 160 | # html_extra_path = [] 161 | 162 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 163 | # using the given strftime format. 164 | # html_last_updated_fmt = '%b %d, %Y' 165 | 166 | # If true, SmartyPants will be used to convert quotes and dashes to 167 | # typographically correct entities. 168 | # html_use_smartypants = True 169 | 170 | # Custom sidebar templates, maps document names to template names. 171 | # html_sidebars = {} 172 | 173 | # Additional templates that should be rendered to pages, maps page names to 174 | # template names. 175 | # html_additional_pages = {} 176 | 177 | # If false, no module index is generated. 178 | # html_domain_indices = True 179 | 180 | # If false, no index is generated. 181 | # html_use_index = True 182 | 183 | # If true, the index is split into individual pages for each letter. 184 | # html_split_index = False 185 | 186 | # If true, links to the reST sources are added to the pages. 187 | # html_show_sourcelink = True 188 | 189 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 190 | # html_show_sphinx = True 191 | 192 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 193 | # html_show_copyright = True 194 | 195 | # If true, an OpenSearch description file will be output, and all pages will 196 | # contain a tag referring to it. The value of this option must be the 197 | # base URL from which the finished HTML is served. 198 | # html_use_opensearch = '' 199 | 200 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 201 | # html_file_suffix = None 202 | 203 | # Language to be used for generating the HTML full-text search index. 204 | # Sphinx supports the following languages: 205 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' 206 | # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' 207 | # html_search_language = 'en' 208 | 209 | # A dictionary with options for the search language support, empty by default. 210 | # Now only 'ja' uses this config value 211 | # html_search_options = {'type': 'default'} 212 | 213 | # The name of a javascript file (relative to the configuration directory) that 214 | # implements a search results scorer. If empty, the default will be used. 215 | # html_search_scorer = 'scorer.js' 216 | 217 | # Output file base name for HTML help builder. 218 | htmlhelp_basename = "jupyter_coredoc" 219 | 220 | # -- Options for LaTeX output --------------------------------------------- 221 | 222 | # latex_elements: dict = {} 223 | 224 | # Grouping the document tree into LaTeX files. List of tuples 225 | # (source start file, target name, title, 226 | # author, documentclass [howto, manual, or own class]). 227 | latex_documents = [ 228 | ( 229 | master_doc, 230 | "jupyter_core.tex", 231 | "jupyter\\_core Documentation", 232 | "Jupyter Development Team", 233 | "manual", 234 | ), 235 | ] 236 | 237 | # The name of an image file (relative to this directory) to place at the top of 238 | # the title page. 239 | # latex_logo = None 240 | 241 | # For "manual" documents, if this is true, then toplevel headings are parts, 242 | # not chapters. 243 | # latex_use_parts = False 244 | 245 | # If true, show page references after internal links. 246 | # latex_show_pagerefs = False 247 | 248 | # If true, show URL addresses after external links. 249 | # latex_show_urls = False 250 | 251 | # Documents to append as an appendix to all manuals. 252 | # latex_appendices = [] 253 | 254 | # If false, no module index is generated. 255 | # latex_domain_indices = True 256 | 257 | 258 | # -- Options for manual page output --------------------------------------- 259 | 260 | # One entry per manual page. List of tuples 261 | # (source start file, name, description, authors, manual section). 262 | man_pages = [(master_doc, "jupyter_core", "jupyter_core Documentation", [author], 1)] 263 | 264 | # If true, show URL addresses after external links. 265 | # man_show_urls = False 266 | 267 | 268 | # -- Options for Texinfo output ------------------------------------------- 269 | 270 | # Grouping the document tree into Texinfo files. List of tuples 271 | # (source start file, target name, title, author, 272 | # dir menu entry, description, category) 273 | texinfo_documents = [ 274 | ( 275 | master_doc, 276 | "jupyter_core", 277 | "jupyter_core Documentation", 278 | author, 279 | "jupyter_core", 280 | "One line description of project.", 281 | "Miscellaneous", 282 | ), 283 | ] 284 | 285 | # Documents to append as an appendix to all manuals. 286 | # texinfo_appendices = [] 287 | 288 | # If false, no module index is generated. 289 | # texinfo_domain_indices = True 290 | 291 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 292 | # texinfo_show_urls = 'footnote' 293 | 294 | # If true, do not generate a @detailmenu in the "Top" node's menu. 295 | # texinfo_no_detailmenu = False 296 | 297 | intersphinx_mapping = get_intersphinx_mapping(packages=["python"]) 298 | 299 | 300 | def setup(_): 301 | here = Path(__file__).parent.resolve() 302 | shutil.copy(Path(here, "..", "CHANGELOG.md"), "changelog.md") 303 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | jupyter_core |version| 2 | ====================== 3 | 4 | This documentation only describes the public API in the ``jupyter_core`` 5 | package. For overview information about using Jupyter, see the `main Jupyter 6 | docs `__. 7 | 8 | Contents: 9 | 10 | .. toctree:: 11 | :maxdepth: 2 12 | 13 | changelog 14 | API Docs 15 | 16 | Indices and tables 17 | ================== 18 | 19 | * :ref:`genindex` 20 | * :ref:`modindex` 21 | * :ref:`search` 22 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | echo. coverage to run coverage check of the documentation if enabled 41 | goto end 42 | ) 43 | 44 | if "%1" == "clean" ( 45 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 46 | del /q /s %BUILDDIR%\* 47 | goto end 48 | ) 49 | 50 | 51 | REM Check if sphinx-build is available and fallback to Python version if any 52 | %SPHINXBUILD% 2> nul 53 | if errorlevel 9009 goto sphinx_python 54 | goto sphinx_ok 55 | 56 | :sphinx_python 57 | 58 | set SPHINXBUILD=python -m sphinx.__init__ 59 | %SPHINXBUILD% 2> nul 60 | if errorlevel 9009 ( 61 | echo. 62 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 63 | echo.installed, then set the SPHINXBUILD environment variable to point 64 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 65 | echo.may add the Sphinx directory to PATH. 66 | echo. 67 | echo.If you don't have Sphinx installed, grab it from 68 | echo.http://sphinx-doc.org/ 69 | exit /b 1 70 | ) 71 | 72 | :sphinx_ok 73 | 74 | 75 | if "%1" == "html" ( 76 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 77 | if errorlevel 1 exit /b 1 78 | echo. 79 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 80 | goto end 81 | ) 82 | 83 | if "%1" == "dirhtml" ( 84 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 85 | if errorlevel 1 exit /b 1 86 | echo. 87 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 88 | goto end 89 | ) 90 | 91 | if "%1" == "singlehtml" ( 92 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 93 | if errorlevel 1 exit /b 1 94 | echo. 95 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 96 | goto end 97 | ) 98 | 99 | if "%1" == "pickle" ( 100 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 101 | if errorlevel 1 exit /b 1 102 | echo. 103 | echo.Build finished; now you can process the pickle files. 104 | goto end 105 | ) 106 | 107 | if "%1" == "json" ( 108 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 109 | if errorlevel 1 exit /b 1 110 | echo. 111 | echo.Build finished; now you can process the JSON files. 112 | goto end 113 | ) 114 | 115 | if "%1" == "htmlhelp" ( 116 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 117 | if errorlevel 1 exit /b 1 118 | echo. 119 | echo.Build finished; now you can run HTML Help Workshop with the ^ 120 | .hhp project file in %BUILDDIR%/htmlhelp. 121 | goto end 122 | ) 123 | 124 | if "%1" == "qthelp" ( 125 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 129 | .qhcp project file in %BUILDDIR%/qthelp, like this: 130 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\jupyter_core.qhcp 131 | echo.To view the help file: 132 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\jupyter_core.ghc 133 | goto end 134 | ) 135 | 136 | if "%1" == "devhelp" ( 137 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 138 | if errorlevel 1 exit /b 1 139 | echo. 140 | echo.Build finished. 141 | goto end 142 | ) 143 | 144 | if "%1" == "epub" ( 145 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 146 | if errorlevel 1 exit /b 1 147 | echo. 148 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 149 | goto end 150 | ) 151 | 152 | if "%1" == "latex" ( 153 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 154 | if errorlevel 1 exit /b 1 155 | echo. 156 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 157 | goto end 158 | ) 159 | 160 | if "%1" == "latexpdf" ( 161 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 162 | cd %BUILDDIR%/latex 163 | make all-pdf 164 | cd %~dp0 165 | echo. 166 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 167 | goto end 168 | ) 169 | 170 | if "%1" == "latexpdfja" ( 171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 172 | cd %BUILDDIR%/latex 173 | make all-pdf-ja 174 | cd %~dp0 175 | echo. 176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 177 | goto end 178 | ) 179 | 180 | if "%1" == "text" ( 181 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 182 | if errorlevel 1 exit /b 1 183 | echo. 184 | echo.Build finished. The text files are in %BUILDDIR%/text. 185 | goto end 186 | ) 187 | 188 | if "%1" == "man" ( 189 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 190 | if errorlevel 1 exit /b 1 191 | echo. 192 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 193 | goto end 194 | ) 195 | 196 | if "%1" == "texinfo" ( 197 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 198 | if errorlevel 1 exit /b 1 199 | echo. 200 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 201 | goto end 202 | ) 203 | 204 | if "%1" == "gettext" ( 205 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 206 | if errorlevel 1 exit /b 1 207 | echo. 208 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 209 | goto end 210 | ) 211 | 212 | if "%1" == "changes" ( 213 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 214 | if errorlevel 1 exit /b 1 215 | echo. 216 | echo.The overview file is in %BUILDDIR%/changes. 217 | goto end 218 | ) 219 | 220 | if "%1" == "linkcheck" ( 221 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 222 | if errorlevel 1 exit /b 1 223 | echo. 224 | echo.Link check complete; look for any errors in the above output ^ 225 | or in %BUILDDIR%/linkcheck/output.txt. 226 | goto end 227 | ) 228 | 229 | if "%1" == "doctest" ( 230 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 231 | if errorlevel 1 exit /b 1 232 | echo. 233 | echo.Testing of doctests in the sources finished, look at the ^ 234 | results in %BUILDDIR%/doctest/output.txt. 235 | goto end 236 | ) 237 | 238 | if "%1" == "coverage" ( 239 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage 240 | if errorlevel 1 exit /b 1 241 | echo. 242 | echo.Testing of coverage in the sources finished, look at the ^ 243 | results in %BUILDDIR%/coverage/python.txt. 244 | goto end 245 | ) 246 | 247 | if "%1" == "xml" ( 248 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 249 | if errorlevel 1 exit /b 1 250 | echo. 251 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 252 | goto end 253 | ) 254 | 255 | if "%1" == "pseudoxml" ( 256 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 257 | if errorlevel 1 exit /b 1 258 | echo. 259 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 260 | goto end 261 | ) 262 | 263 | :end 264 | -------------------------------------------------------------------------------- /examples/completions-zsh: -------------------------------------------------------------------------------- 1 | #compdef jupyter 2 | # A zsh autocompleter for jupyter. 3 | _jupyter() { 4 | IFS=$'\n' 5 | 6 | local context curcontext="$curcontext" state line 7 | typeset -A opt_args 8 | 9 | local ret=1 10 | 11 | _arguments -C \ 12 | '1: :_jupyter_cmds' \ 13 | '(-h,--help)'{-h,--help}'[Show this help message and exit.]' \ 14 | '--version[Show the jupyter command'\''s version and exit.]' \ 15 | '--config-dir[Show Jupyter config dir.]' \ 16 | '--data-dir[Show Jupyter data dir.]' \ 17 | '--runtime-dir[Show Jupyter runtime dir.]' \ 18 | '--paths[Show all Jupyter paths. Add --json for machine-readable format.]' \ 19 | '--json[Output paths as machine-readable json.]' \ 20 | '*::arg:->args' \ 21 | && ret=0 22 | 23 | case $state in 24 | (args) 25 | curcontext="${curcontext%:*:*}:jupyter-cmd-$words[1]:" 26 | local update_policy 27 | zstyle -s ":completion:${curcontext}:" cache-policy update_policy 28 | [[ -z "$update_policy" ]] && \ 29 | zstyle ":completion:${curcontext}:" \ 30 | cache-policy _jupyter_options_caching_policy 31 | local cache_id=jupyter_options 32 | local subcmd=$line[1] 33 | if (_cache_invalid $cache_id || ! _retrieve_cache $cache_id || \ 34 | [[ ${(P)+subcmd} -eq 0 ]] || _cache_invalid $cache_id); then 35 | typeset -agU $subcmd 36 | set -A $subcmd $( (jupyter $subcmd --help-all | \ 37 | grep -o '^--[^-][^= ]\+=\?' | sed 's/\([^=]*\)\(=\?\)/(\1)\1\2:/') 2>/dev/null) 38 | _store_cache $cache_id $subcmd 39 | fi 40 | case $subcmd in 41 | (console) 42 | _arguments \ 43 | '1:Source file:_files -g "*.py"' \ 44 | ${(P)subcmd} && ret=0 45 | ;; 46 | (kernelspec) 47 | sub2cmd=$line[2] 48 | case $sub2cmd in 49 | (install|list) 50 | if ([[ ${(P)+sub2cmd} -eq 0 ]]) && ! _retrieve_cache $cache_id; then 51 | typeset -agU $sub2cmd 52 | set -A $sub2cmd $(_jupyter_get_options $subcmd $sub2cmd) 53 | _store_cache $cache_id $sub2cmd 54 | fi 55 | _arguments "1: :_${subcmd}_cmds" ${(P)sub2cmd} && ret=0 56 | ;; 57 | *) 58 | _arguments "1: :_${subcmd}_cmds" ${(P)subcmd} && ret=0 59 | ;; 60 | esac 61 | ;; 62 | (nbconvert) 63 | _arguments \ 64 | '1:Source file:_files -g "*.ipynb"' \ 65 | ${(P)subcmd} && ret=0 66 | ;; 67 | (nbextension) 68 | sub2cmd=$line[2] 69 | case $sub2cmd in 70 | (disable|enable) 71 | if ([[ ${(P)+sub2cmd} -eq 0 ]]) && ! _retrieve_cache $cache_id; then 72 | typeset -agU $sub2cmd 73 | set -A $sub2cmd $(_jupyter_get_options $subcmd $sub2cmd) 74 | _store_cache $cache_id $sub2cmd 75 | fi 76 | _arguments \ 77 | '1: :_nbextension_cmds' \ 78 | '2:Extension path:_files' \ 79 | ${(P)sub2cmd} && ret=0 80 | ;; 81 | (install) 82 | if ([[ ${(P)+sub2cmd} -eq 0 ]]) && ! _retrieve_cache $cache_id; then 83 | typeset -agU $sub2cmd 84 | set -A $sub2cmd $(_jupyter_get_options $subcmd $sub2cmd) 85 | _store_cache $cache_id $sub2cmd 86 | fi 87 | _arguments \ 88 | '1: :_nbextension_cmds' \ 89 | '2:Extension path:_files' \ 90 | ${(P)sub2cmd} && ret=0 91 | ;; 92 | *) 93 | _arguments "1: :_${subcmd}_cmds" ${(P)subcmd} && ret=0 94 | ;; 95 | esac 96 | ;; 97 | (notebook) 98 | sub2cmd=$line[2] 99 | case $sub2cmd in 100 | (list) 101 | if ([[ ${(P)+sub2cmd} -eq 0 ]]) && ! _retrieve_cache $cache_id; then 102 | typeset -agU $sub2cmd 103 | set -A $sub2cmd $(_jupyter_get_options $subcmd $sub2cmd) 104 | _store_cache $cache_id $sub2cmd 105 | fi 106 | _arguments "1: :_${subcmd}_cmds" ${(P)sub2cmd} && ret=0 107 | ;; 108 | *) 109 | _arguments "1: :_${subcmd}_cmds" ${(P)subcmd} && ret=0 110 | ;; 111 | esac 112 | ;; 113 | (trust) 114 | _arguments \ 115 | '*:Source file:_files -g "*.ipynb"' \ 116 | ${(P)subcmd} && ret=0 117 | ;; 118 | *) 119 | _arguments ${(P)subcmd} && ret=0 120 | ;; 121 | esac 122 | ;; 123 | esac 124 | } 125 | 126 | _jupyter_options_caching_policy() { 127 | local -a newer 128 | # rebuild if cache does not exist or is more than a week old 129 | newer=( "$1"(Nmw-1) ) 130 | return $#newer 131 | } 132 | 133 | _jupyter_get_options() { 134 | echo '(--help)--help[Print help about subcommand.]:' 135 | (jupyter "$@" --help-all | \ 136 | grep -o '^--[^-][^= ]\+=\?' | sed 's/\([^=]*\)\(=\?\)/(\1)\1\2:/') 2>/dev/null 137 | } 138 | 139 | _jupyter_cmds() { 140 | local -a commands 141 | if whence jupyter-console >/dev/null; then 142 | commands=($commands 'console:Launch a Console application inside a terminal.') 143 | fi 144 | if whence jupyter-kernelspec >/dev/null; then 145 | commands=($commands 'kernelspec:Manage Jupyter kernel specifications.') 146 | fi 147 | if whence jupyter-nbconvert >/dev/null; then 148 | commands=($commands 'nbconvert:Convert notebook files to various other formats.') 149 | fi 150 | if whence jupyter-nbextension >/dev/null; then 151 | commands=($commands 'nbextension:Work with Jupyter notebook extensions.') 152 | fi 153 | if whence jupyter-notebook >/dev/null; then 154 | commands=($commands 'notebook:Launch a Tornado based HTML Notebook Server.') 155 | fi 156 | if whence jupyter-qtconsole >/dev/null; then 157 | commands=($commands 'qtconsole:Launch a Console-style application using Qt.') 158 | fi 159 | if whence jupyter-trust >/dev/null; then 160 | commands=($commands 'trust:Sign Jupyter notebooks with your key, to trust their dynamic output.') 161 | fi 162 | _describe -t commands 'jupyter command' commands "$@" 163 | } 164 | 165 | _kernelspec_cmds() { 166 | local commands; commands=( 167 | 'help:Print help about subcommand.' 168 | 'install:Install a kernel specification directory.' 169 | 'list:List installed kernel specifications.' 170 | ) 171 | _describe -t commands 'kernelspec command' commands "$@" 172 | } 173 | 174 | _nbextension_cmds() { 175 | local commands; commands=( 176 | 'help:Print help about subcommand.' 177 | 'enable:Enable a notebook extension.' 178 | 'install:Install notebook extensions.' 179 | 'disable:Disable a notebook extension.' 180 | ) 181 | _describe -t commands 'nbextension command' commands "$@" 182 | } 183 | 184 | _notebook_cmds() { 185 | local commands; commands=( 186 | 'help:Print help about subcommand.' 187 | 'list:List currently running notebook servers in this profile.' 188 | ) 189 | _describe -t commands 'notebook command' commands "$@" 190 | } 191 | 192 | _jupyter "$@" 193 | # vim: ft=zsh sw=2 ts=2 et 194 | -------------------------------------------------------------------------------- /examples/jupyter-completion.bash: -------------------------------------------------------------------------------- 1 | # load with: . jupyter-completion.bash 2 | # 3 | # NOTE: with traitlets>=5.8, jupyter and its subcommands now directly support 4 | # shell command-line tab-completion using argcomplete, which has more complete 5 | # support than this script. Simply install argcomplete and activate global 6 | # completion by following the relevant instructions in: 7 | # https://kislyuk.github.io/argcomplete/#activating-global-completion 8 | 9 | if [[ -n ${ZSH_VERSION-} ]]; then 10 | autoload -Uz bashcompinit && bashcompinit 11 | fi 12 | 13 | _jupyter_get_flags() 14 | { 15 | local url=$1 16 | local var=$2 17 | local dash=$3 18 | if [[ "$url $var" == $__jupyter_complete_last ]]; then 19 | opts=$__jupyter_complete_last_res 20 | return 21 | fi 22 | 23 | if [ -z $1 ]; then 24 | opts=$(jupyter --help | sed -n 's/^ -/-/p' |sed -e 's/, /\n/' |sed -e 's/\(-[[:alnum:]_-]*\).*/\1/') 25 | else 26 | # matplotlib and profile don't need the = and the 27 | # version without simplifies the special cased completion 28 | opts=$(jupyter ${url} --help-all | grep -E "^-{1,2}[^-]" | sed -e "s/<.*//" -e "s/[^=]$/& /" -e "$ s/^/\n-h\n--help\n--help-all\n/") 29 | fi 30 | __jupyter_complete_last="$url $var" 31 | __jupyter_complete_last_res="$opts" 32 | } 33 | 34 | _jupyter() 35 | { 36 | local cur=${COMP_WORDS[COMP_CWORD]} 37 | local prev=${COMP_WORDS[COMP_CWORD - 1]} 38 | local subcommands="notebook qtconsole console nbconvert kernelspec trust " 39 | local opts="help" 40 | local mode="" 41 | for i in "${COMP_WORDS[@]}"; do 42 | [ "$cur" = "$i" ] && break 43 | if [[ ${subcommands} == *${i}* ]]; then 44 | mode="$i" 45 | break 46 | elif [[ ${i} == "--"* ]]; then 47 | mode="nosubcommand" 48 | break 49 | fi 50 | done 51 | 52 | 53 | if [[ ${cur} == -* ]]; then 54 | case $mode in 55 | "notebook" | "qtconsole" | "console" | "nbconvert") 56 | _jupyter_get_flags $mode 57 | opts=$"${opts}" 58 | ;; 59 | "kernelspec") 60 | if [[ $COMP_CWORD -ge 3 ]]; then 61 | # 'history trim' and 'history clear' covered by next line 62 | _jupyter_get_flags $mode\ "${COMP_WORDS[2]}" 63 | else 64 | _jupyter_get_flags $mode 65 | 66 | fi 67 | opts=$"${opts}" 68 | ;; 69 | *) 70 | _jupyter_get_flags 71 | esac 72 | # don't drop the trailing space 73 | local IFS=$'\t\n' 74 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) 75 | return 0 76 | elif [[ $mode == "kernelspec" ]]; then 77 | if [[ $COMP_CWORD -ge 3 ]]; then 78 | # drop into flags 79 | opts="--" 80 | else 81 | opts="list install " 82 | fi 83 | local IFS=$'\t\n' 84 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) 85 | else 86 | if [ "$COMP_CWORD" == 1 ]; then 87 | local IFS=$'\t\n' 88 | local sub=$(echo $subcommands | sed -e "s/ / \t/g") 89 | COMPREPLY=( $(compgen -W "${sub}" -- ${cur}) ) 90 | else 91 | local IFS=$'\n' 92 | COMPREPLY=( $(compgen -o filenames -f -- ${cur}) ) 93 | fi 94 | fi 95 | 96 | } 97 | complete -o default -o nospace -F _jupyter jupyter 98 | -------------------------------------------------------------------------------- /jupyter.py: -------------------------------------------------------------------------------- 1 | """Launch the root jupyter command""" 2 | 3 | from __future__ import annotations 4 | 5 | if __name__ == "__main__": 6 | from jupyter_core.command import main 7 | 8 | main() 9 | -------------------------------------------------------------------------------- /jupyter_core/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from .version import __version__, version_info # noqa: F401 4 | -------------------------------------------------------------------------------- /jupyter_core/__main__.py: -------------------------------------------------------------------------------- 1 | """Launch the root jupyter command""" 2 | 3 | from __future__ import annotations 4 | 5 | from .command import main 6 | 7 | main() 8 | -------------------------------------------------------------------------------- /jupyter_core/application.py: -------------------------------------------------------------------------------- 1 | """ 2 | A base Application class for Jupyter applications. 3 | 4 | All Jupyter applications should inherit from this. 5 | """ 6 | 7 | # Copyright (c) Jupyter Development Team. 8 | # Distributed under the terms of the Modified BSD License. 9 | from __future__ import annotations 10 | 11 | import logging 12 | import os 13 | import sys 14 | import typing as t 15 | from copy import deepcopy 16 | from pathlib import Path 17 | from shutil import which 18 | 19 | from traitlets import Bool, List, Unicode, observe 20 | from traitlets.config.application import Application, catch_config_error 21 | from traitlets.config.loader import ConfigFileNotFound 22 | 23 | from .paths import ( 24 | allow_insecure_writes, 25 | issue_insecure_write_warning, 26 | jupyter_config_dir, 27 | jupyter_config_path, 28 | jupyter_data_dir, 29 | jupyter_path, 30 | jupyter_runtime_dir, 31 | ) 32 | from .utils import ensure_dir_exists, ensure_event_loop 33 | 34 | # mypy: disable-error-code="no-untyped-call" 35 | 36 | # aliases and flags 37 | 38 | base_aliases: dict[str, t.Any] = {} 39 | if isinstance(Application.aliases, dict): 40 | # traitlets 5 41 | base_aliases.update(Application.aliases) 42 | _jupyter_aliases = { 43 | "log-level": "Application.log_level", 44 | "config": "JupyterApp.config_file", 45 | } 46 | base_aliases.update(_jupyter_aliases) 47 | 48 | base_flags: dict[str, t.Any] = {} 49 | if isinstance(Application.flags, dict): 50 | # traitlets 5 51 | base_flags.update(Application.flags) 52 | _jupyter_flags: dict[str, t.Any] = { 53 | "debug": ( 54 | {"Application": {"log_level": logging.DEBUG}}, 55 | "set log level to logging.DEBUG (maximize logging output)", 56 | ), 57 | "generate-config": ({"JupyterApp": {"generate_config": True}}, "generate default config file"), 58 | "y": ( 59 | {"JupyterApp": {"answer_yes": True}}, 60 | "Answer yes to any questions instead of prompting.", 61 | ), 62 | } 63 | base_flags.update(_jupyter_flags) 64 | 65 | 66 | class NoStart(Exception): 67 | """Exception to raise when an application shouldn't start""" 68 | 69 | 70 | class JupyterApp(Application): 71 | """Base class for Jupyter applications""" 72 | 73 | name = "jupyter" # override in subclasses 74 | description = "A Jupyter Application" 75 | 76 | aliases = base_aliases 77 | flags = base_flags 78 | 79 | def _log_level_default(self) -> int: 80 | return logging.INFO 81 | 82 | jupyter_path = List(Unicode()) 83 | 84 | def _jupyter_path_default(self) -> list[str]: 85 | return jupyter_path() 86 | 87 | config_dir = Unicode() 88 | 89 | def _config_dir_default(self) -> str: 90 | return jupyter_config_dir() 91 | 92 | @property 93 | def config_file_paths(self) -> list[str]: 94 | path = jupyter_config_path() 95 | if self.config_dir not in path: 96 | # Insert config dir as first item. 97 | path.insert(0, self.config_dir) 98 | return path 99 | 100 | data_dir = Unicode() 101 | 102 | def _data_dir_default(self) -> str: 103 | d = jupyter_data_dir() 104 | ensure_dir_exists(d, mode=0o700) 105 | return d 106 | 107 | runtime_dir = Unicode() 108 | 109 | def _runtime_dir_default(self) -> str: 110 | rd = jupyter_runtime_dir() 111 | ensure_dir_exists(rd, mode=0o700) 112 | return rd 113 | 114 | @observe("runtime_dir") 115 | def _runtime_dir_changed(self, change: t.Any) -> None: 116 | ensure_dir_exists(change["new"], mode=0o700) 117 | 118 | generate_config = Bool(False, config=True, help="""Generate default config file.""") 119 | 120 | config_file_name = Unicode(config=True, help="Specify a config file to load.") 121 | 122 | def _config_file_name_default(self) -> str: 123 | if not self.name: 124 | return "" 125 | return self.name.replace("-", "_") + "_config" 126 | 127 | config_file = Unicode( 128 | config=True, 129 | help="""Full path of a config file.""", 130 | ) 131 | 132 | answer_yes = Bool(False, config=True, help="""Answer yes to any prompts.""") 133 | 134 | def write_default_config(self) -> None: 135 | """Write our default config to a .py config file""" 136 | config_file: str 137 | if self.config_file: 138 | config_file = self.config_file 139 | else: 140 | config_file = str(Path(self.config_dir, self.config_file_name + ".py")) 141 | 142 | if Path(config_file).exists() and not self.answer_yes: 143 | answer = "" 144 | 145 | def ask() -> str: 146 | prompt = f"Overwrite {config_file!r} with default config? [y/N]" 147 | try: 148 | return input(prompt).lower() or "n" 149 | except KeyboardInterrupt: 150 | print("") # empty line 151 | return "n" 152 | 153 | answer = ask() 154 | while not answer.startswith(("y", "n")): 155 | print("Please answer 'yes' or 'no'") 156 | answer = ask() 157 | if answer.startswith("n"): 158 | return 159 | 160 | config_text = self.generate_config_file() 161 | print("Writing default config to: {config_file!r}") 162 | ensure_dir_exists(Path(config_file).parent.resolve(), 0o700) 163 | with Path.open(Path(config_file), mode="w", encoding="utf-8") as f: 164 | f.write(config_text) 165 | 166 | def migrate_config(self) -> None: 167 | """Migrate config/data from IPython 3""" 168 | try: # let's see if we can open the marker file 169 | # for reading and updating (writing) 170 | f_marker = Path.open(Path(self.config_dir, "migrated"), "r+") 171 | except FileNotFoundError: # cannot find the marker file 172 | pass # that means we have not migrated yet, so continue 173 | except OSError: # not readable and/or writable 174 | return # so let's give up migration in such an environment 175 | else: # if we got here without raising anything, 176 | # that means the file exists 177 | f_marker.close() 178 | return # so we must have already migrated -> bail out 179 | 180 | from .migrate import get_ipython_dir, migrate 181 | 182 | # No IPython dir, nothing to migrate 183 | if not Path(get_ipython_dir()).exists(): 184 | return 185 | 186 | migrate() 187 | 188 | def load_config_file(self, suppress_errors: bool = True) -> None: # type:ignore[override] 189 | """Load the config file. 190 | 191 | By default, errors in loading config are handled, and a warning 192 | printed on screen. For testing, the suppress_errors option is set 193 | to False, so errors will make tests fail. 194 | """ 195 | self.log.debug("Searching %s for config files", self.config_file_paths) 196 | base_config = "jupyter_config" 197 | try: 198 | super().load_config_file( 199 | base_config, 200 | path=self.config_file_paths, 201 | ) 202 | except ConfigFileNotFound: 203 | # ignore errors loading parent 204 | self.log.debug("Config file %s not found", base_config) 205 | 206 | if self.config_file: 207 | path, config_file_name = os.path.split(self.config_file) 208 | else: 209 | path = self.config_file_paths # type:ignore[assignment] 210 | config_file_name = self.config_file_name 211 | 212 | if not config_file_name or (config_file_name == base_config): 213 | return 214 | 215 | try: 216 | super().load_config_file(config_file_name, path=path) 217 | except ConfigFileNotFound: 218 | self.log.debug("Config file not found, skipping: %s", config_file_name) 219 | except Exception: 220 | # Reraise errors for testing purposes, or if set in 221 | # self.raise_config_file_errors 222 | if (not suppress_errors) or self.raise_config_file_errors: 223 | raise 224 | self.log.warning("Error loading config file: %s", config_file_name, exc_info=True) 225 | 226 | # subcommand-related 227 | def _find_subcommand(self, name: str) -> str: 228 | name = f"{self.name}-{name}" 229 | return which(name) or "" 230 | 231 | @property 232 | def _dispatching(self) -> bool: 233 | """Return whether we are dispatching to another command 234 | 235 | or running ourselves. 236 | """ 237 | return bool(self.generate_config or self.subapp or self.subcommand) 238 | 239 | subcommand = Unicode() 240 | 241 | @catch_config_error 242 | def initialize(self, argv: t.Any = None) -> None: 243 | """Initialize the application.""" 244 | # don't hook up crash handler before parsing command-line 245 | if argv is None: 246 | argv = sys.argv[1:] 247 | if argv: 248 | subc = self._find_subcommand(argv[0]) 249 | if subc: 250 | self.argv = argv 251 | self.subcommand = subc 252 | return 253 | self.parse_command_line(argv) 254 | cl_config = deepcopy(self.config) 255 | if self._dispatching: 256 | return 257 | self.migrate_config() 258 | self.load_config_file() 259 | # enforce cl-opts override configfile opts: 260 | self.update_config(cl_config) 261 | if allow_insecure_writes: 262 | issue_insecure_write_warning() 263 | 264 | def start(self) -> None: 265 | """Start the whole thing""" 266 | if self.subcommand: 267 | os.execv(self.subcommand, [self.subcommand] + self.argv[1:]) # noqa: S606 268 | raise NoStart() 269 | 270 | if self.subapp: 271 | self.subapp.start() 272 | raise NoStart() 273 | 274 | if self.generate_config: 275 | self.write_default_config() 276 | raise NoStart() 277 | 278 | @classmethod 279 | def launch_instance(cls, argv: t.Any = None, **kwargs: t.Any) -> None: 280 | """Launch an instance of a Jupyter Application""" 281 | # Ensure an event loop is set before any other code runs. 282 | loop = ensure_event_loop() 283 | try: 284 | super().launch_instance(argv=argv, **kwargs) 285 | except NoStart: 286 | return 287 | loop.close() 288 | 289 | 290 | class JupyterAsyncApp(JupyterApp): 291 | """A Jupyter application that runs on an asyncio loop.""" 292 | 293 | name = "jupyter_async" # override in subclasses 294 | description = "An Async Jupyter Application" 295 | 296 | # Set to True for tornado-based apps. 297 | _prefer_selector_loop = False 298 | 299 | async def initialize_async(self, argv: t.Any = None) -> None: 300 | """Initialize the application asynchronoously.""" 301 | 302 | async def start_async(self) -> None: 303 | """Run the application in an event loop.""" 304 | 305 | @classmethod 306 | async def _launch_instance(cls, argv: t.Any = None, **kwargs: t.Any) -> None: 307 | app = cls.instance(**kwargs) 308 | app.initialize(argv) 309 | await app.initialize_async(argv) 310 | await app.start_async() 311 | 312 | @classmethod 313 | def launch_instance(cls, argv: t.Any = None, **kwargs: t.Any) -> None: 314 | """Launch an instance of an async Jupyter Application""" 315 | loop = ensure_event_loop(cls._prefer_selector_loop) 316 | coro = cls._launch_instance(argv, **kwargs) 317 | loop.run_until_complete(coro) 318 | loop.close() 319 | 320 | 321 | if __name__ == "__main__": 322 | JupyterApp.launch_instance() 323 | -------------------------------------------------------------------------------- /jupyter_core/command.py: -------------------------------------------------------------------------------- 1 | # PYTHON_ARGCOMPLETE_OK 2 | """The root `jupyter` command. 3 | 4 | This does nothing other than dispatch to subcommands or output path info. 5 | """ 6 | 7 | # Copyright (c) Jupyter Development Team. 8 | # Distributed under the terms of the Modified BSD License. 9 | from __future__ import annotations 10 | 11 | import argparse 12 | import errno 13 | import json 14 | import os 15 | import site 16 | import sys 17 | import sysconfig 18 | from pathlib import Path 19 | from shutil import which 20 | from subprocess import Popen 21 | from typing import Any 22 | 23 | from . import paths 24 | from .version import __version__ 25 | 26 | 27 | class JupyterParser(argparse.ArgumentParser): 28 | """A Jupyter argument parser.""" 29 | 30 | @property 31 | def epilog(self) -> str: 32 | """Add subcommands to epilog on request 33 | 34 | Avoids searching PATH for subcommands unless help output is requested. 35 | """ 36 | subcommands: str = " ".join(list_subcommands()) 37 | return f"Available subcommands: {subcommands}" 38 | 39 | @epilog.setter 40 | def epilog(self, x: Any) -> None: 41 | """Ignore epilog set in Parser.__init__""" 42 | 43 | def argcomplete(self) -> None: 44 | """Trigger auto-completion, if enabled""" 45 | try: 46 | import argcomplete 47 | 48 | argcomplete.autocomplete(self) 49 | except ImportError: 50 | pass 51 | 52 | 53 | def jupyter_parser() -> JupyterParser: 54 | """Create a jupyter parser object.""" 55 | parser = JupyterParser( 56 | description="Jupyter: Interactive Computing", 57 | ) 58 | group = parser.add_mutually_exclusive_group(required=False) 59 | # don't use argparse's version action because it prints to stderr on py2 60 | group.add_argument( 61 | "--version", action="store_true", help="show the versions of core jupyter packages and exit" 62 | ) 63 | subcommand_action = group.add_argument( 64 | "subcommand", type=str, nargs="?", help="the subcommand to launch" 65 | ) 66 | # For argcomplete, supply all known subcommands 67 | subcommand_action.completer = lambda *args, **kwargs: list_subcommands() # type: ignore[attr-defined] # noqa: ARG005 68 | 69 | group.add_argument("--config-dir", action="store_true", help="show Jupyter config dir") 70 | group.add_argument("--data-dir", action="store_true", help="show Jupyter data dir") 71 | group.add_argument("--runtime-dir", action="store_true", help="show Jupyter runtime dir") 72 | group.add_argument( 73 | "--paths", 74 | action="store_true", 75 | help="show all Jupyter paths. Add --json for machine-readable format.", 76 | ) 77 | parser.add_argument("--json", action="store_true", help="output paths as machine-readable json") 78 | parser.add_argument("--debug", action="store_true", help="output debug information about paths") 79 | 80 | return parser 81 | 82 | 83 | def list_subcommands() -> list[str]: 84 | """List all jupyter subcommands 85 | 86 | searches PATH for `jupyter-name` 87 | 88 | Returns a list of jupyter's subcommand names, without the `jupyter-` prefix. 89 | Nested children (e.g. jupyter-sub-subsub) are not included. 90 | """ 91 | subcommand_tuples = set() 92 | # construct a set of `('foo', 'bar') from `jupyter-foo-bar` 93 | for d in _path_with_self(): 94 | try: 95 | bin_paths = list(Path(d).iterdir()) 96 | except OSError: 97 | continue 98 | for path in bin_paths: 99 | name = path.name 100 | if name.startswith("jupyter-"): 101 | if sys.platform.startswith("win"): 102 | # remove file-extension on Windows 103 | name = path.stem 104 | subcommand_tuples.add(tuple(name.split("-")[1:])) 105 | # build a set of subcommand strings, excluding subcommands whose parents are defined 106 | subcommands = set() 107 | # Only include `jupyter-foo-bar` if `jupyter-foo` is not already present 108 | for sub_tup in subcommand_tuples: 109 | if not any(sub_tup[:i] in subcommand_tuples for i in range(1, len(sub_tup))): 110 | subcommands.add("-".join(sub_tup)) 111 | return sorted(subcommands) 112 | 113 | 114 | def _execvp(cmd: str, argv: list[str]) -> None: 115 | """execvp, except on Windows where it uses Popen 116 | 117 | Python provides execvp on Windows, but its behavior is problematic (Python bug#9148). 118 | """ 119 | if sys.platform.startswith("win"): 120 | # PATH is ignored when shell=False, 121 | # so rely on shutil.which 122 | cmd_path = which(cmd) 123 | if cmd_path is None: 124 | msg = f"{cmd!r} not found" 125 | raise OSError(msg, errno.ENOENT) 126 | p = Popen([cmd_path] + argv[1:]) # noqa: S603 127 | # Don't raise KeyboardInterrupt in the parent process. 128 | # Set this after spawning, to avoid subprocess inheriting handler. 129 | import signal 130 | 131 | signal.signal(signal.SIGINT, signal.SIG_IGN) 132 | p.wait() 133 | sys.exit(p.returncode) 134 | else: 135 | os.execvp(cmd, argv) # noqa: S606 136 | 137 | 138 | def _jupyter_abspath(subcommand: str) -> str: 139 | """This method get the abspath of a specified jupyter-subcommand with no 140 | changes on ENV. 141 | """ 142 | # get env PATH with self 143 | search_path = os.pathsep.join(_path_with_self()) 144 | # get the abs path for the jupyter- 145 | jupyter_subcommand = f"jupyter-{subcommand}" 146 | abs_path = which(jupyter_subcommand, path=search_path) 147 | if abs_path is None: 148 | msg = f"\nJupyter command `{jupyter_subcommand}` not found." 149 | raise Exception(msg) 150 | 151 | if not os.access(abs_path, os.X_OK): 152 | msg = f"\nJupyter command `{jupyter_subcommand}` is not executable." 153 | raise Exception(msg) 154 | 155 | return abs_path 156 | 157 | 158 | def _path_with_self() -> list[str]: 159 | """Put `jupyter`'s dir at the front of PATH 160 | 161 | Ensures that /path/to/jupyter subcommand 162 | will do /path/to/jupyter-subcommand 163 | even if /other/jupyter-subcommand is ahead of it on PATH 164 | """ 165 | path_list = (os.environ.get("PATH") or os.defpath).split(os.pathsep) 166 | 167 | # Insert the "scripts" directory for this Python installation 168 | # This allows the "jupyter" command to be relocated, while still 169 | # finding subcommands that have been installed in the default 170 | # location. 171 | # We put the scripts directory at the *end* of PATH, so that 172 | # if the user explicitly overrides a subcommand, that override 173 | # still takes effect. 174 | try: 175 | bindir = sysconfig.get_path("scripts") 176 | except KeyError: 177 | # The Python environment does not specify a "scripts" location 178 | pass 179 | else: 180 | path_list.append(bindir) 181 | 182 | scripts = [sys.argv[0]] 183 | if Path(scripts[0]).is_symlink(): 184 | # include realpath, if `jupyter` is a symlink 185 | scripts.append(os.path.realpath(scripts[0])) 186 | 187 | for script in scripts: 188 | bindir = str(Path(script).parent) 189 | if Path(bindir).is_dir() and os.access(script, os.X_OK): # only if it's a script 190 | # ensure executable's dir is on PATH 191 | # avoids missing subcommands when jupyter is run via absolute path 192 | path_list.insert(0, bindir) 193 | return path_list 194 | 195 | 196 | def _evaluate_argcomplete(parser: JupyterParser) -> list[str]: 197 | """If argcomplete is enabled, trigger autocomplete or return current words 198 | 199 | If the first word looks like a subcommand, return the current command 200 | that is attempting to be completed so that the subcommand can evaluate it; 201 | otherwise auto-complete using the main parser. 202 | """ 203 | try: 204 | # traitlets >= 5.8 provides some argcomplete support, 205 | # use helper methods to jump to argcomplete 206 | from traitlets.config.argcomplete_config import ( 207 | get_argcomplete_cwords, 208 | increment_argcomplete_index, 209 | ) 210 | 211 | cwords = get_argcomplete_cwords() 212 | if cwords and len(cwords) > 1 and not cwords[1].startswith("-"): 213 | # If first completion word looks like a subcommand, 214 | # increment word from which to start handling arguments 215 | increment_argcomplete_index() 216 | return cwords 217 | # Otherwise no subcommand, directly autocomplete and exit 218 | parser.argcomplete() 219 | except ImportError: 220 | # traitlets >= 5.8 not available, just try to complete this without 221 | # worrying about subcommands 222 | parser.argcomplete() 223 | msg = "Control flow should not reach end of autocomplete()" 224 | raise AssertionError(msg) 225 | 226 | 227 | def main() -> None: 228 | """The command entry point.""" 229 | parser = jupyter_parser() 230 | argv = sys.argv 231 | subcommand = None 232 | if "_ARGCOMPLETE" in os.environ: 233 | argv = _evaluate_argcomplete(parser) 234 | subcommand = argv[1] 235 | elif len(argv) > 1 and not argv[1].startswith("-"): 236 | # Don't parse if a subcommand is given 237 | # Avoids argparse gobbling up args passed to subcommand, such as `-h`. 238 | subcommand = argv[1] 239 | else: 240 | args, opts = parser.parse_known_args() 241 | subcommand = args.subcommand 242 | if args.version: 243 | print("Selected Jupyter core packages...") 244 | for package in [ 245 | "IPython", 246 | "ipykernel", 247 | "ipywidgets", 248 | "jupyter_client", 249 | "jupyter_core", 250 | "jupyter_server", 251 | "jupyterlab", 252 | "nbclient", 253 | "nbconvert", 254 | "nbformat", 255 | "notebook", 256 | "qtconsole", 257 | "traitlets", 258 | ]: 259 | try: 260 | if package == "jupyter_core": # We're already here 261 | version = __version__ 262 | else: 263 | mod = __import__(package) 264 | version = mod.__version__ 265 | except ImportError: 266 | version = "not installed" 267 | print(f"{package:<17}:", version) 268 | return 269 | if args.json and not args.paths: 270 | sys.exit("--json is only used with --paths") 271 | if args.debug and not args.paths: 272 | sys.exit("--debug is only used with --paths") 273 | if args.debug and args.json: 274 | sys.exit("--debug cannot be used with --json") 275 | if args.config_dir: 276 | print(paths.jupyter_config_dir()) 277 | return 278 | if args.data_dir: 279 | print(paths.jupyter_data_dir()) 280 | return 281 | if args.runtime_dir: 282 | print(paths.jupyter_runtime_dir()) 283 | return 284 | if args.paths: 285 | data = {} 286 | data["runtime"] = [paths.jupyter_runtime_dir()] 287 | data["config"] = paths.jupyter_config_path() 288 | data["data"] = paths.jupyter_path() 289 | if args.json: 290 | print(json.dumps(data)) 291 | else: 292 | if args.debug: 293 | env = os.environ 294 | 295 | if paths.use_platform_dirs(): 296 | print( 297 | "JUPYTER_PLATFORM_DIRS is set to a true value, so we use platformdirs to find platform-specific directories" 298 | ) 299 | else: 300 | print( 301 | "JUPYTER_PLATFORM_DIRS is set to a false value, or is not set, so we use hardcoded legacy paths for platform-specific directories" 302 | ) 303 | 304 | if paths.prefer_environment_over_user(): 305 | print( 306 | "JUPYTER_PREFER_ENV_PATH is set to a true value, or JUPYTER_PREFER_ENV_PATH is not set and we detected a virtual environment, making the environment-level path preferred over the user-level path for data and config" 307 | ) 308 | else: 309 | print( 310 | "JUPYTER_PREFER_ENV_PATH is set to a false value, or JUPYTER_PREFER_ENV_PATH is not set and we did not detect a virtual environment, making the user-level path preferred over the environment-level path for data and config" 311 | ) 312 | 313 | # config path list 314 | if env.get("JUPYTER_NO_CONFIG"): 315 | print( 316 | "JUPYTER_NO_CONFIG is set, making the config path list only a single temporary directory" 317 | ) 318 | else: 319 | print( 320 | "JUPYTER_NO_CONFIG is not set, so we use the full path list for config" 321 | ) 322 | 323 | if env.get("JUPYTER_CONFIG_PATH"): 324 | print( 325 | f"JUPYTER_CONFIG_PATH is set to '{env.get('JUPYTER_CONFIG_PATH')}', which is prepended to the config path list (unless JUPYTER_NO_CONFIG is set)" 326 | ) 327 | else: 328 | print( 329 | "JUPYTER_CONFIG_PATH is not set, so we do not prepend anything to the config paths" 330 | ) 331 | 332 | if env.get("JUPYTER_CONFIG_DIR"): 333 | print( 334 | f"JUPYTER_CONFIG_DIR is set to '{env.get('JUPYTER_CONFIG_DIR')}', overriding the default user-level config directory" 335 | ) 336 | else: 337 | print( 338 | "JUPYTER_CONFIG_DIR is not set, so we use the default user-level config directory" 339 | ) 340 | 341 | if site.ENABLE_USER_SITE: 342 | print( 343 | f"Python's site.ENABLE_USER_SITE is True, so we add the user site directory '{site.getuserbase()}'" 344 | ) 345 | else: 346 | print( 347 | f"Python's site.ENABLE_USER_SITE is not True, so we do not add the Python site user directory '{site.getuserbase()}'" 348 | ) 349 | 350 | # data path list 351 | if env.get("JUPYTER_PATH"): 352 | print( 353 | f"JUPYTER_PATH is set to '{env.get('JUPYTER_PATH')}', which is prepended to the data paths" 354 | ) 355 | else: 356 | print( 357 | "JUPYTER_PATH is not set, so we do not prepend anything to the data paths" 358 | ) 359 | 360 | if env.get("JUPYTER_DATA_DIR"): 361 | print( 362 | f"JUPYTER_DATA_DIR is set to '{env.get('JUPYTER_DATA_DIR')}', overriding the default user-level data directory" 363 | ) 364 | else: 365 | print( 366 | "JUPYTER_DATA_DIR is not set, so we use the default user-level data directory" 367 | ) 368 | 369 | # runtime directory 370 | if env.get("JUPYTER_RUNTIME_DIR"): 371 | print( 372 | f"JUPYTER_RUNTIME_DIR is set to '{env.get('JUPYTER_RUNTIME_DIR')}', overriding the default runtime directory" 373 | ) 374 | else: 375 | print( 376 | "JUPYTER_RUNTIME_DIR is not set, so we use the default runtime directory" 377 | ) 378 | 379 | print() 380 | 381 | for name in sorted(data): 382 | path = data[name] 383 | print(f"{name}:") 384 | for p in path: 385 | print(" " + p) 386 | return 387 | 388 | if not subcommand: 389 | parser.print_help(file=sys.stderr) 390 | sys.exit("\nPlease specify a subcommand or one of the optional arguments.") 391 | 392 | try: 393 | command = _jupyter_abspath(subcommand) 394 | except Exception as e: 395 | parser.print_help(file=sys.stderr) 396 | # special-case alias of "jupyter help" to "jupyter --help" 397 | if subcommand == "help": 398 | return 399 | sys.exit(str(e)) 400 | 401 | try: 402 | _execvp(command, [command] + argv[2:]) 403 | except OSError as e: 404 | sys.exit(f"Error executing Jupyter command {subcommand!r}: {e}") 405 | 406 | 407 | if __name__ == "__main__": 408 | main() 409 | -------------------------------------------------------------------------------- /jupyter_core/migrate.py: -------------------------------------------------------------------------------- 1 | # PYTHON_ARGCOMPLETE_OK 2 | """Migrating IPython < 4.0 to Jupyter 3 | 4 | This *copies* configuration and resources to their new locations in Jupyter 5 | 6 | Migrations: 7 | 8 | - .ipython/ 9 | - nbextensions -> JUPYTER_DATA_DIR/nbextensions 10 | - kernels -> JUPYTER_DATA_DIR/kernels 11 | 12 | - .ipython/profile_default/ 13 | - static/custom -> .jupyter/custom 14 | - nbconfig -> .jupyter/nbconfig 15 | - security/ 16 | 17 | - notebook_secret, notebook_cookie_secret, nbsignatures.db -> JUPYTER_DATA_DIR 18 | 19 | - ipython_{notebook,nbconvert,qtconsole}_config.py -> .jupyter/jupyter_{name}_config.py 20 | 21 | 22 | """ 23 | 24 | # Copyright (c) Jupyter Development Team. 25 | # Distributed under the terms of the Modified BSD License. 26 | from __future__ import annotations 27 | 28 | import os 29 | import re 30 | import shutil 31 | from datetime import datetime, timezone 32 | from pathlib import Path 33 | from typing import Any 34 | 35 | from traitlets.config.loader import JSONFileConfigLoader, PyFileConfigLoader 36 | from traitlets.log import get_logger 37 | 38 | from .application import JupyterApp 39 | from .paths import jupyter_config_dir, jupyter_data_dir 40 | from .utils import ensure_dir_exists 41 | 42 | # mypy: disable-error-code="no-untyped-call" 43 | 44 | 45 | migrations = { 46 | str(Path("{ipython_dir}", "nbextensions")): str(Path("{jupyter_data}", "nbextensions")), 47 | str(Path("{ipython_dir}", "kernels")): str(Path("{jupyter_data}", "kernels")), 48 | str(Path("{profile}", "nbconfig")): str(Path("{jupyter_config}", "nbconfig")), 49 | } 50 | 51 | custom_src_t = str(Path("{profile}", "static", "custom")) 52 | custom_dst_t = str(Path("{jupyter_config}", "custom")) 53 | 54 | for security_file in ("notebook_secret", "notebook_cookie_secret", "nbsignatures.db"): 55 | src = str(Path("{profile}", "security", security_file)) 56 | dst = str(Path("{jupyter_data}", security_file)) 57 | migrations[src] = dst 58 | 59 | config_migrations = ["notebook", "nbconvert", "qtconsole"] 60 | 61 | regex = re.compile 62 | 63 | config_substitutions = { 64 | regex(r"\bIPythonQtConsoleApp\b"): "JupyterQtConsoleApp", 65 | regex(r"\bIPythonWidget\b"): "JupyterWidget", 66 | regex(r"\bRichIPythonWidget\b"): "RichJupyterWidget", 67 | regex(r"\bIPython\.html\b"): "notebook", 68 | regex(r"\bIPython\.nbconvert\b"): "nbconvert", 69 | } 70 | 71 | 72 | def get_ipython_dir() -> str: 73 | """Return the IPython directory location. 74 | 75 | Not imported from IPython because the IPython implementation 76 | ensures that a writable directory exists, 77 | creating a temporary directory if not. 78 | We don't want to trigger that when checking if migration should happen. 79 | 80 | We only need to support the IPython < 4 behavior for migration, 81 | so importing for forward-compatibility and edge cases is not important. 82 | """ 83 | return os.environ.get("IPYTHONDIR", str(Path("~/.ipython").expanduser())) 84 | 85 | 86 | def migrate_dir(src: str, dst: str) -> bool: 87 | """Migrate a directory from src to dst""" 88 | log = get_logger() 89 | src_path = Path(src) 90 | dst_path = Path(dst) 91 | if not any(src_path.iterdir()): 92 | log.debug("No files in %s", src) 93 | return False 94 | if dst_path.exists(): 95 | if any(dst_path.iterdir()): 96 | # already exists, non-empty 97 | log.debug("%s already exists", dst) 98 | return False 99 | dst_path.rmdir() 100 | log.info("Copying %s -> %s", src, dst) 101 | ensure_dir_exists(dst_path.parent) 102 | shutil.copytree(src, dst, symlinks=True) 103 | return True 104 | 105 | 106 | def migrate_file(src: str | Path, dst: str | Path, substitutions: Any = None) -> bool: 107 | """Migrate a single file from src to dst 108 | 109 | substitutions is an optional dict of {regex: replacement} for performing replacements on the file. 110 | """ 111 | log = get_logger() 112 | dst_path = Path(dst) 113 | if dst_path.exists(): 114 | # already exists 115 | log.debug("%s already exists", dst) 116 | return False 117 | log.info("Copying %s -> %s", src, dst) 118 | ensure_dir_exists(dst_path.parent) 119 | shutil.copy(src, dst) 120 | if substitutions: 121 | with dst_path.open() as f: 122 | text = f.read() 123 | for pat, replacement in substitutions.items(): 124 | text = pat.sub(replacement, text) 125 | with dst_path.open("w") as f: 126 | f.write(text) 127 | return True 128 | 129 | 130 | def migrate_one(src: str, dst: str) -> bool: 131 | """Migrate one item 132 | 133 | dispatches to migrate_dir/_file 134 | """ 135 | log = get_logger() 136 | if Path(src).is_file(): 137 | return migrate_file(src, dst) 138 | if Path(src).is_dir(): 139 | return migrate_dir(src, dst) 140 | log.debug("Nothing to migrate for %s", src) 141 | return False 142 | 143 | 144 | def migrate_static_custom(src: str, dst: str) -> bool: 145 | """Migrate non-empty custom.js,css from src to dst 146 | 147 | src, dst are 'custom' directories containing custom.{js,css} 148 | """ 149 | log = get_logger() 150 | migrated = False 151 | 152 | custom_js = Path(src, "custom.js") 153 | custom_css = Path(src, "custom.css") 154 | # check if custom_js is empty: 155 | custom_js_empty = True 156 | if Path(custom_js).is_file(): 157 | with Path.open(custom_js, encoding="utf-8") as f: 158 | js = f.read().strip() 159 | for line in js.splitlines(): 160 | if not (line.isspace() or line.strip().startswith(("/*", "*", "//"))): 161 | custom_js_empty = False 162 | break 163 | 164 | # check if custom_css is empty: 165 | custom_css_empty = True 166 | if Path(custom_css).is_file(): 167 | with Path.open(custom_css, encoding="utf-8") as f: 168 | css = f.read().strip() 169 | custom_css_empty = css.startswith("/*") and css.endswith("*/") 170 | 171 | if custom_js_empty: 172 | log.debug("Ignoring empty %s", custom_js) 173 | if custom_css_empty: 174 | log.debug("Ignoring empty %s", custom_css) 175 | 176 | if custom_js_empty and custom_css_empty: 177 | # nothing to migrate 178 | return False 179 | ensure_dir_exists(dst) 180 | 181 | if not custom_js_empty or not custom_css_empty: 182 | ensure_dir_exists(dst) 183 | 184 | if not custom_js_empty and migrate_file(custom_js, Path(dst, "custom.js")): 185 | migrated = True 186 | if not custom_css_empty and migrate_file(custom_css, Path(dst, "custom.css")): 187 | migrated = True 188 | 189 | return migrated 190 | 191 | 192 | def migrate_config(name: str, env: Any) -> list[Any]: 193 | """Migrate a config file. 194 | 195 | Includes substitutions for updated configurable names. 196 | """ 197 | log = get_logger() 198 | src_base = str(Path(f"{env['profile']}", f"ipython_{name}_config")) 199 | dst_base = str(Path(f"{env['jupyter_config']}", f"jupyter_{name}_config")) 200 | loaders = { 201 | ".py": PyFileConfigLoader, 202 | ".json": JSONFileConfigLoader, 203 | } 204 | migrated = [] 205 | for ext in (".py", ".json"): 206 | src = src_base + ext 207 | dst = dst_base + ext 208 | if Path(src).exists(): 209 | cfg = loaders[ext](src).load_config() 210 | if cfg: 211 | if migrate_file(src, dst, substitutions=config_substitutions): 212 | migrated.append(src) 213 | else: 214 | # don't migrate empty config files 215 | log.debug("Not migrating empty config file: %s", src) 216 | return migrated 217 | 218 | 219 | def migrate() -> bool: 220 | """Migrate IPython configuration to Jupyter""" 221 | env = { 222 | "jupyter_data": jupyter_data_dir(), 223 | "jupyter_config": jupyter_config_dir(), 224 | "ipython_dir": get_ipython_dir(), 225 | "profile": str(Path(get_ipython_dir(), "profile_default")), 226 | } 227 | migrated = False 228 | for src_t, dst_t in migrations.items(): 229 | src = src_t.format(**env) 230 | dst = dst_t.format(**env) 231 | if Path(src).exists() and migrate_one(src, dst): 232 | migrated = True 233 | 234 | for name in config_migrations: 235 | if migrate_config(name, env): 236 | migrated = True 237 | 238 | custom_src = custom_src_t.format(**env) 239 | custom_dst = custom_dst_t.format(**env) 240 | 241 | if Path(custom_src).exists() and migrate_static_custom(custom_src, custom_dst): 242 | migrated = True 243 | 244 | # write a marker to avoid re-running migration checks 245 | ensure_dir_exists(env["jupyter_config"]) 246 | with Path.open(Path(env["jupyter_config"], "migrated"), "w", encoding="utf-8") as f: 247 | f.write(datetime.now(tz=timezone.utc).isoformat()) 248 | 249 | return migrated 250 | 251 | 252 | class JupyterMigrate(JupyterApp): 253 | """A Jupyter Migration App.""" 254 | 255 | name = "jupyter-migrate" 256 | description = """ 257 | Migrate configuration and data from .ipython prior to 4.0 to Jupyter locations. 258 | 259 | This migrates: 260 | 261 | - config files in the default profile 262 | - kernels in ~/.ipython/kernels 263 | - notebook javascript extensions in ~/.ipython/extensions 264 | - custom.js/css to .jupyter/custom 265 | 266 | to their new Jupyter locations. 267 | 268 | All files are copied, not moved. 269 | If the destinations already exist, nothing will be done. 270 | """ 271 | 272 | def start(self) -> None: 273 | """Start the application.""" 274 | if not migrate(): 275 | self.log.info("Found nothing to migrate.") 276 | 277 | 278 | main = JupyterMigrate.launch_instance 279 | 280 | 281 | if __name__ == "__main__": 282 | main() 283 | -------------------------------------------------------------------------------- /jupyter_core/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyter/jupyter_core/ad6b4aea233a9634ffcd6ad553ecd63129ab5f6e/jupyter_core/py.typed -------------------------------------------------------------------------------- /jupyter_core/troubleshoot.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | display environment information that is frequently 4 | used to troubleshoot installations of Jupyter or IPython 5 | """ 6 | 7 | from __future__ import annotations 8 | 9 | import os 10 | import platform 11 | import subprocess 12 | import sys 13 | from typing import Any, Optional, Union 14 | 15 | 16 | def subs(cmd: Union[list[str], str]) -> Optional[str]: 17 | """ 18 | get data from commands that we need to run outside of python 19 | """ 20 | try: 21 | stdout = subprocess.check_output(cmd) # noqa: S603 22 | return stdout.decode("utf-8", "replace").strip() 23 | except (OSError, subprocess.CalledProcessError): 24 | return None 25 | 26 | 27 | def get_data() -> dict[str, Any]: 28 | """ 29 | returns a dict of various user environment data 30 | """ 31 | env: dict[str, Any] = {} 32 | env["path"] = os.environ.get("PATH") 33 | env["sys_path"] = sys.path 34 | env["sys_exe"] = sys.executable 35 | env["sys_version"] = sys.version 36 | env["platform"] = platform.platform() 37 | # FIXME: which on Windows? 38 | if sys.platform == "win32": 39 | env["where"] = subs(["where", "jupyter"]) 40 | env["which"] = None 41 | else: 42 | env["which"] = subs(["which", "-a", "jupyter"]) 43 | env["where"] = None 44 | env["pip"] = subs([sys.executable, "-m", "pip", "list"]) 45 | env["conda"] = subs(["conda", "list"]) 46 | env["conda-env"] = subs(["conda", "env", "export"]) 47 | return env 48 | 49 | 50 | def main() -> None: 51 | """ 52 | print out useful info 53 | """ 54 | # pylint: disable=superfluous-parens 55 | # args = get_args() 56 | if "_ARGCOMPLETE" in os.environ: 57 | # No arguments to complete, the script can be slow to run to completion, 58 | # so in case someone tries to complete jupyter troubleshoot just exit early 59 | return 60 | 61 | environment_data = get_data() 62 | 63 | print("$PATH:") 64 | for directory in environment_data["path"].split(os.pathsep): 65 | print(f"\t{directory}") 66 | 67 | print("\nsys.path:") 68 | for directory in environment_data["sys_path"]: 69 | print(f"\t{directory}") 70 | 71 | print("\nsys.executable:") 72 | print(f"\t{environment_data['sys_exe']}") 73 | 74 | print("\nsys.version:") 75 | if "\n" in environment_data["sys_version"]: 76 | for data in environment_data["sys_version"].split("\n"): 77 | print(f"\t{data}") 78 | else: 79 | print(f"\t{environment_data['sys_version']}") 80 | 81 | print("\nplatform.platform():") 82 | print(f"\t{environment_data['platform']}") 83 | 84 | if environment_data["which"]: 85 | print("\nwhich -a jupyter:") 86 | for line in environment_data["which"].split("\n"): 87 | print(f"\t{line}") 88 | 89 | if environment_data["where"]: 90 | print("\nwhere jupyter:") 91 | for line in environment_data["where"].split("\n"): 92 | print(f"\t{line}") 93 | 94 | if environment_data["pip"]: 95 | print("\npip list:") 96 | for package in environment_data["pip"].split("\n"): 97 | print(f"\t{package}") 98 | 99 | if environment_data["conda"]: 100 | print("\nconda list:") 101 | for package in environment_data["conda"].split("\n"): 102 | print(f"\t{package}") 103 | 104 | if environment_data["conda-env"]: 105 | print("\nconda env:") 106 | for package in environment_data["conda-env"].split("\n"): 107 | print(f"\t{package}") 108 | 109 | 110 | if __name__ == "__main__": 111 | main() 112 | -------------------------------------------------------------------------------- /jupyter_core/utils/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Jupyter Development Team. 2 | # Distributed under the terms of the Modified BSD License. 3 | from __future__ import annotations 4 | 5 | import asyncio 6 | import atexit 7 | import errno 8 | import inspect 9 | import sys 10 | import threading 11 | import warnings 12 | from contextvars import ContextVar 13 | from pathlib import Path 14 | from types import FrameType 15 | from typing import Any, Awaitable, Callable, TypeVar, cast 16 | 17 | 18 | def ensure_dir_exists(path: str | Path, mode: int = 0o777) -> None: 19 | """Ensure that a directory exists 20 | 21 | If it doesn't exist, try to create it, protecting against a race condition 22 | if another process is doing the same. 23 | The default permissions are determined by the current umask. 24 | """ 25 | try: 26 | Path(path).mkdir(parents=True, mode=mode) 27 | except OSError as e: 28 | if e.errno != errno.EEXIST: 29 | raise 30 | if not Path(path).is_dir(): 31 | msg = f"{path!r} exists but is not a directory" 32 | raise OSError(msg) 33 | 34 | 35 | def _get_frame(level: int) -> FrameType | None: 36 | """Get the frame at the given stack level.""" 37 | # sys._getframe is much faster than inspect.stack, but isn't guaranteed to 38 | # exist in all python implementations, so we fall back to inspect.stack() 39 | 40 | # We need to add one to level to account for this get_frame call. 41 | if hasattr(sys, "_getframe"): 42 | frame = sys._getframe(level + 1) 43 | else: 44 | frame = inspect.stack(context=0)[level + 1].frame 45 | return frame 46 | 47 | 48 | # This function is from https://github.com/python/cpython/issues/67998 49 | # (https://bugs.python.org/file39550/deprecated_module_stacklevel.diff) and 50 | # calculates the appropriate stacklevel for deprecations to target the 51 | # deprecation for the caller, no matter how many internal stack frames we have 52 | # added in the process. For example, with the deprecation warning in the 53 | # __init__ below, the appropriate stacklevel will change depending on how deep 54 | # the inheritance hierarchy is. 55 | def _external_stacklevel(internal: list[str]) -> int: 56 | """Find the stacklevel of the first frame that doesn't contain any of the given internal strings 57 | 58 | The depth will be 1 at minimum in order to start checking at the caller of 59 | the function that called this utility method. 60 | """ 61 | # Get the level of my caller's caller 62 | level = 2 63 | frame = _get_frame(level) 64 | 65 | # Normalize the path separators: 66 | normalized_internal = [str(Path(s)) for s in internal] 67 | 68 | # climb the stack frames while we see internal frames 69 | while frame and any(s in str(Path(frame.f_code.co_filename)) for s in normalized_internal): 70 | level += 1 71 | frame = frame.f_back 72 | 73 | # Return the stack level from the perspective of whoever called us (i.e., one level up) 74 | return level - 1 75 | 76 | 77 | def deprecation(message: str, internal: str | list[str] = "jupyter_core/") -> None: 78 | """Generate a deprecation warning targeting the first frame that is not 'internal' 79 | 80 | internal is a string or list of strings, which if they appear in filenames in the 81 | frames, the frames will be considered internal. Changing this can be useful if, for example, 82 | we know that our internal code is calling out to another library. 83 | """ 84 | _internal: list[str] 85 | _internal = [internal] if isinstance(internal, str) else internal 86 | 87 | # stack level of the first external frame from here 88 | stacklevel = _external_stacklevel(_internal) 89 | 90 | # The call to .warn adds one frame, so bump the stacklevel up by one 91 | warnings.warn(message, DeprecationWarning, stacklevel=stacklevel + 1) 92 | 93 | 94 | T = TypeVar("T") 95 | 96 | 97 | class _TaskRunner: 98 | """A task runner that runs an asyncio event loop on a background thread.""" 99 | 100 | def __init__(self) -> None: 101 | self.__io_loop: asyncio.AbstractEventLoop | None = None 102 | self.__runner_thread: threading.Thread | None = None 103 | self.__lock = threading.Lock() 104 | atexit.register(self._close) 105 | 106 | def _close(self) -> None: 107 | if self.__io_loop: 108 | self.__io_loop.stop() 109 | 110 | def _runner(self) -> None: 111 | loop = self.__io_loop 112 | assert loop is not None 113 | try: 114 | loop.run_forever() 115 | finally: 116 | loop.close() 117 | 118 | def run(self, coro: Any) -> Any: 119 | """Synchronously run a coroutine on a background thread.""" 120 | with self.__lock: 121 | name = f"{threading.current_thread().name} - runner" 122 | if self.__io_loop is None: 123 | self.__io_loop = asyncio.new_event_loop() 124 | self.__runner_thread = threading.Thread(target=self._runner, daemon=True, name=name) 125 | self.__runner_thread.start() 126 | fut = asyncio.run_coroutine_threadsafe(coro, self.__io_loop) 127 | return fut.result(None) 128 | 129 | 130 | _runner_map: dict[str, _TaskRunner] = {} 131 | _loop: ContextVar[asyncio.AbstractEventLoop | None] = ContextVar("_loop", default=None) 132 | 133 | 134 | def run_sync(coro: Callable[..., Awaitable[T]]) -> Callable[..., T]: 135 | """Wraps coroutine in a function that blocks until it has executed. 136 | 137 | Parameters 138 | ---------- 139 | coro : coroutine-function 140 | The coroutine-function to be executed. 141 | 142 | Returns 143 | ------- 144 | result : 145 | Whatever the coroutine-function returns. 146 | """ 147 | 148 | assert inspect.iscoroutinefunction(coro) 149 | 150 | def wrapped(*args: Any, **kwargs: Any) -> Any: 151 | name = threading.current_thread().name 152 | inner = coro(*args, **kwargs) 153 | try: 154 | asyncio.get_running_loop() 155 | except RuntimeError: 156 | # No loop running, run the loop for this thread. 157 | loop = ensure_event_loop() 158 | return loop.run_until_complete(inner) 159 | 160 | # Loop is currently running in this thread, 161 | # use a task runner. 162 | if name not in _runner_map: 163 | _runner_map[name] = _TaskRunner() 164 | return _runner_map[name].run(inner) 165 | 166 | wrapped.__doc__ = coro.__doc__ 167 | return wrapped 168 | 169 | 170 | def ensure_event_loop(prefer_selector_loop: bool = False) -> asyncio.AbstractEventLoop: 171 | # Get the loop for this thread, or create a new one. 172 | loop = _loop.get() 173 | if loop is not None and not loop.is_closed(): 174 | return loop 175 | try: 176 | loop = asyncio.get_running_loop() 177 | except RuntimeError: 178 | if sys.platform == "win32" and prefer_selector_loop: 179 | loop = asyncio.WindowsSelectorEventLoopPolicy().new_event_loop() 180 | else: 181 | loop = asyncio.new_event_loop() 182 | asyncio.set_event_loop(loop) 183 | _loop.set(loop) 184 | return loop 185 | 186 | 187 | async def ensure_async(obj: Awaitable[T] | T) -> T: 188 | """Convert a non-awaitable object to a coroutine if needed, 189 | and await it if it was not already awaited. 190 | 191 | This function is meant to be called on the result of calling a function, 192 | when that function could either be asynchronous or not. 193 | """ 194 | if inspect.isawaitable(obj): 195 | obj = cast(Awaitable[T], obj) 196 | try: 197 | result = await obj 198 | except RuntimeError as e: 199 | if str(e) == "cannot reuse already awaited coroutine": 200 | # obj is already the coroutine's result 201 | return cast(T, obj) 202 | raise 203 | return result 204 | return obj 205 | -------------------------------------------------------------------------------- /jupyter_core/version.py: -------------------------------------------------------------------------------- 1 | """ 2 | store the current version info of the jupyter_core. 3 | """ 4 | 5 | from __future__ import annotations 6 | 7 | import re 8 | 9 | # Version string must appear intact for hatch versioning 10 | __version__ = "5.8.1" 11 | 12 | # Build up version_info tuple for backwards compatibility 13 | pattern = r"(?P\d+).(?P\d+).(?P\d+)(?P.*)" 14 | match = re.match(pattern, __version__) 15 | assert match is not None 16 | parts: list[object] = [int(match[part]) for part in ["major", "minor", "patch"]] 17 | if match["rest"]: 18 | parts.append(match["rest"]) 19 | version_info = tuple(parts) 20 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling>=1.4"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "jupyter_core" 7 | description = "Jupyter core package. A base package on which Jupyter projects rely." 8 | license = "BSD-3-Clause" 9 | license-files = ["LICENSE"] 10 | classifiers = [ 11 | "Framework :: Jupyter", 12 | "Intended Audience :: Developers", 13 | "Intended Audience :: System Administrators", 14 | "Intended Audience :: Science/Research", 15 | "Programming Language :: Python", 16 | "Programming Language :: Python :: 3" 17 | ] 18 | requires-python = ">=3.8" 19 | dependencies = [ 20 | "platformdirs>=2.5", 21 | "traitlets>=5.3", 22 | "pywin32>=300 ; sys_platform == 'win32' and platform_python_implementation != 'PyPy'" 23 | ] 24 | dynamic = ["version"] 25 | 26 | [[project.authors]] 27 | name = "Jupyter Development Team" 28 | email = "jupyter@googlegroups.org" 29 | 30 | [project.readme] 31 | text = "There is no reason to install this package on its own." 32 | content-type = "text/plain" 33 | 34 | [project.urls] 35 | Homepage = "https://jupyter.org" 36 | Documentation = "https://jupyter-core.readthedocs.io/" 37 | Source = "https://github.com/jupyter/jupyter_core" 38 | Tracker = "https://github.com/jupyter/jupyter_core/issues" 39 | 40 | [project.optional-dependencies] 41 | test = [ 42 | "ipykernel", 43 | "pre-commit", 44 | "pytest<9", 45 | "pytest-cov", 46 | "pytest-timeout" 47 | ] 48 | docs = [ 49 | "intersphinx_registry", 50 | "myst-parser", 51 | "pydata_sphinx_theme", 52 | "sphinx-autodoc-typehints", 53 | "sphinxcontrib_spelling", 54 | "traitlets", 55 | ] 56 | 57 | [project.scripts] 58 | jupyter = "jupyter_core.command:main" 59 | jupyter-migrate = "jupyter_core.migrate:main" 60 | jupyter-troubleshoot = "jupyter_core.troubleshoot:main" 61 | 62 | [tool.hatch.version] 63 | path = "jupyter_core/version.py" 64 | 65 | [tool.hatch.build.force-include] 66 | "./jupyter.py" = "jupyter.py" 67 | 68 | [tool.hatch.envs.docs] 69 | features = ["docs"] 70 | [tool.hatch.envs.docs.env-vars] 71 | SPHINXOPTS = "-W -n" 72 | [tool.hatch.envs.docs.scripts] 73 | build = "make -C docs html" 74 | api = "sphinx-apidoc -o docs/api -f -E jupyter_core jupyter_core/tests/*" 75 | 76 | [tool.hatch.envs.test] 77 | features = ["test"] 78 | [tool.hatch.envs.test.scripts] 79 | test = "python -m pytest -vv {args}" 80 | nowarn = "test -W default {args}" 81 | 82 | [tool.hatch.envs.cov] 83 | features = ["test"] 84 | dependencies = ["coverage", "pytest-cov"] 85 | [tool.hatch.envs.cov.scripts] 86 | test = "python -m pytest -vv --cov jupyter_core --cov-branch --cov-report term-missing:skip-covered {args}" 87 | nowarn = "test -W default {args}" 88 | 89 | [tool.hatch.envs.typing] 90 | dependencies = ["pre-commit"] 91 | detached = true 92 | [tool.hatch.envs.typing.scripts] 93 | test = "pre-commit run --all-files --hook-stage manual mypy" 94 | 95 | [tool.hatch.envs.lint] 96 | dependencies = ["pre-commit"] 97 | detached = true 98 | [tool.hatch.envs.lint.scripts] 99 | build = [ 100 | "pre-commit run --all-files ruff", 101 | "pre-commit run --all-files ruff-format" 102 | ] 103 | 104 | [tool.mypy] 105 | files = "jupyter_core" 106 | python_version = "3.8" 107 | strict = true 108 | enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] 109 | warn_unreachable = true 110 | disallow_incomplete_defs = true 111 | disallow_untyped_defs = true 112 | warn_redundant_casts = true 113 | disallow_untyped_calls = true 114 | 115 | [tool.pytest.ini_options] 116 | minversion = "7.0" 117 | xfail_strict = true 118 | log_cli_level = "info" 119 | addopts = [ 120 | "-raXs", "--durations=10", "--color=yes", "--doctest-modules", 121 | "--showlocals", "--strict-markers", "--strict-config", 122 | "--ignore-glob=tests/dotipython*" 123 | ] 124 | testpaths = [ 125 | "tests/" 126 | ] 127 | filterwarnings= [ 128 | # Fail on warnings 129 | "error", 130 | # Expected internal warnings 131 | "module:Jupyter is migrating its paths to use standard platformdirs:DeprecationWarning", 132 | ] 133 | 134 | [tool.coverage.run] 135 | relative_files = true 136 | source = ["jupyter_core"] 137 | 138 | [tool.coverage.report] 139 | exclude_lines = [ 140 | "pragma: no cover", 141 | "def __repr__", 142 | "if self.debug:", 143 | "if settings.DEBUG", 144 | "raise AssertionError", 145 | "raise NotImplementedError", 146 | "if 0:", 147 | "if __name__ == .__main__.:", 148 | "class .*\bProtocol\\):", 149 | "@(abc\\.)?abstractmethod", 150 | ] 151 | 152 | [tool.ruff] 153 | line-length = 100 154 | 155 | [tool.ruff.lint] 156 | extend-select = [ 157 | "B", # flake8-bugbear 158 | "I", # isort 159 | "ARG", # flake8-unused-arguments 160 | "C4", # flake8-comprehensions 161 | "EM", # flake8-errmsg 162 | "ICN", # flake8-import-conventions 163 | "G", # flake8-logging-format 164 | "PGH", # pygrep-hooks 165 | "PIE", # flake8-pie 166 | "PL", # pylint 167 | "PT", # flake8-pytest-style 168 | "PTH", # flake8-use-pathlib 169 | "RET", # flake8-return 170 | "RUF", # Ruff-specific 171 | "SIM", # flake8-simplify 172 | "T20", # flake8-print 173 | "UP", # pyupgrade 174 | "YTT", # flake8-2020 175 | "EXE", # flake8-executable 176 | "NPY", # NumPy specific rules 177 | "PD", # pandas-vet 178 | "PYI", # flake8-pyi 179 | "S", # flake8-bandit 180 | ] 181 | ignore = [ 182 | "E501", # Line too long (158 > 100 characters) 183 | "PLR0912",# Too many branches 184 | "PLR0915",# Too many statements 185 | "PLR2004",# Magic value used in comparison, consider replacing `...` with a constant variable 186 | "RUF012", # Mutable class attributes should be annotated 187 | "S101", # Use of assert 188 | "SIM103", # Return the condition directly 189 | "SIM105", # Use `contextlib.suppress(...)` 190 | "SIM108", # Use ternary operator 191 | "UP007", # Use `X | Y` for type annotations" 192 | ] 193 | isort.required-imports = ["from __future__ import annotations"] 194 | 195 | [tool.ruff.lint.per-file-ignores] 196 | # B011 Do not call assert False since python -O removes these calls 197 | # F841 local variable 'foo' is assigned to but never used 198 | # C408 Unnecessary `dict` call 199 | # E402 Module level import not at top of file 200 | # T201 `print` found 201 | # B007 Loop control variable `i` not used within the loop body. 202 | # N802 Function name `assertIn` should be lowercase 203 | # PLR2004 Magic value used in comparison, consider replacing b'WITNESS A' with a constant variable 204 | # S603 `subprocess` call: check for execution of untrusted input 205 | "tests/*" = ["B011", "F841", "C408", "E402", "T201", "B007", "N802", "S", "PTH", "ARG0"] 206 | # F821 Undefined name `get_config` 207 | "tests/**/profile_default/*_config.py" = ["F821"] 208 | # T201 `print` found 209 | "jupyter_core/application.py" = ["T201"] 210 | "jupyter_core/command.py" = ["T201"] 211 | "jupyter_core/troubleshoot.py" = ["T201"] 212 | # N802 Function name `SetFileSecurity` should be lowercase 213 | "jupyter_core/paths.py" = ["N802", "N803", "N806"] 214 | # C901 Function is too complex 215 | "jupyter_core/migrate.py" = ["C901"] # `migrate_static_custom` is too complex (11 > 10) 216 | 217 | [tool.interrogate] 218 | ignore-init-module=true 219 | ignore-private=true 220 | ignore-semiprivate=true 221 | ignore-property-decorators=true 222 | ignore-nested-functions=true 223 | ignore-nested-classes=true 224 | ignore-overloaded-functions=true 225 | fail-under=100 226 | exclude = ["docs", "tests"] 227 | 228 | [tool.check-wheel-contents] 229 | toplevel = ["jupyter_core/", "jupyter.py"] 230 | ignore = ["W002"] 231 | -------------------------------------------------------------------------------- /scripts/jupyter: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Launch the root jupyter command""" 3 | 4 | from __future__ import annotations 5 | 6 | from jupyter_core.command import main 7 | 8 | if __name__ == "__main__": 9 | main() 10 | -------------------------------------------------------------------------------- /scripts/jupyter-migrate: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # PYTHON_ARGCOMPLETE_OK 3 | """Migrate Jupyter config from IPython < 4.0""" 4 | 5 | from __future__ import annotations 6 | 7 | from jupyter_core.migrate import main 8 | 9 | if __name__ == "__main__": 10 | main() 11 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyter/jupyter_core/ad6b4aea233a9634ffcd6ad553ecd63129ab5f6e/tests/__init__.py -------------------------------------------------------------------------------- /tests/dotipython/nbextensions/myext.js: -------------------------------------------------------------------------------- 1 | var hello; 2 | -------------------------------------------------------------------------------- /tests/dotipython/profile_default/ipython_kernel_config.py: -------------------------------------------------------------------------------- 1 | # Configuration file for ipython-kernel. 2 | from __future__ import annotations 3 | 4 | c = get_config() 5 | 6 | # ------------------------------------------------------------------------------ 7 | # IPKernelApp configuration 8 | # ------------------------------------------------------------------------------ 9 | 10 | # IPython: an enhanced interactive Python shell. 11 | 12 | # IPKernelApp will inherit config from: BaseIPythonApplication, Application, 13 | # InteractiveShellApp, ConnectionFileMixin 14 | 15 | # Should variables loaded at startup (by startup files, exec_lines, etc.) be 16 | # hidden from tools like %who? 17 | # c.IPKernelApp.hide_initial_ns = True 18 | 19 | # The importstring for the DisplayHook factory 20 | # c.IPKernelApp.displayhook_class = 'IPython.kernel.zmq.displayhook.ZMQDisplayHook' 21 | 22 | # A list of dotted module names of IPython extensions to load. 23 | # c.IPKernelApp.extensions = [] 24 | 25 | # Execute the given command string. 26 | # c.IPKernelApp.code_to_run = '' 27 | 28 | # redirect stderr to the null device 29 | # c.IPKernelApp.no_stderr = False 30 | 31 | # The date format used by logging formatters for %(asctime)s 32 | # c.IPKernelApp.log_datefmt = '%Y-%m-%d %H:%M:%S' 33 | 34 | # Whether to create profile dir if it doesn't exist 35 | # c.IPKernelApp.auto_create = False 36 | 37 | # Reraise exceptions encountered loading IPython extensions? 38 | # c.IPKernelApp.reraise_ipython_extension_failures = False 39 | 40 | # Set the log level by value or name. 41 | # c.IPKernelApp.log_level = 30 42 | 43 | # Run the file referenced by the PYTHONSTARTUP environment variable at IPython 44 | # startup. 45 | # c.IPKernelApp.exec_PYTHONSTARTUP = True 46 | 47 | # Pre-load matplotlib and numpy for interactive use, selecting a particular 48 | # matplotlib backend and loop integration. 49 | # c.IPKernelApp.pylab = None 50 | 51 | # Run the module as a script. 52 | # c.IPKernelApp.module_to_run = '' 53 | 54 | # The importstring for the OutStream factory 55 | # c.IPKernelApp.outstream_class = 'IPython.kernel.zmq.iostream.OutStream' 56 | 57 | # dotted module name of an IPython extension to load. 58 | # c.IPKernelApp.extra_extension = '' 59 | 60 | # Create a massive crash report when IPython encounters what may be an internal 61 | # error. The default is to append a short message to the usual traceback 62 | # c.IPKernelApp.verbose_crash = False 63 | 64 | # Whether to overwrite existing config files when copying 65 | # c.IPKernelApp.overwrite = False 66 | 67 | # The IPython profile to use. 68 | # c.IPKernelApp.profile = 'default' 69 | 70 | # List of files to run at IPython startup. 71 | # c.IPKernelApp.exec_files = [] 72 | 73 | # The Logging format template 74 | # c.IPKernelApp.log_format = '[%(name)s]%(highlevel)s %(message)s' 75 | 76 | # Whether to install the default config files into the profile dir. If a new 77 | # profile is being created, and IPython contains config files for that profile, 78 | # then they will be staged into the new directory. Otherwise, default config 79 | # files will be automatically generated. 80 | # c.IPKernelApp.copy_config_files = False 81 | 82 | # set the stdin (ROUTER) port [default: random] 83 | # c.IPKernelApp.stdin_port = 0 84 | 85 | # Path to an extra config file to load. 86 | # 87 | # If specified, load this config file in addition to any other IPython config. 88 | # c.IPKernelApp.extra_config_file = '' 89 | 90 | # lines of code to run at IPython startup. 91 | # c.IPKernelApp.exec_lines = [] 92 | 93 | # set the control (ROUTER) port [default: random] 94 | # c.IPKernelApp.control_port = 0 95 | 96 | # set the heartbeat port [default: random] 97 | # c.IPKernelApp.hb_port = 0 98 | 99 | # Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx', 100 | # 'pyglet', 'qt', 'qt5', 'tk', 'wx'). 101 | # c.IPKernelApp.gui = None 102 | 103 | # A file to be run 104 | # c.IPKernelApp.file_to_run = '' 105 | 106 | # The name of the IPython directory. This directory is used for logging 107 | # configuration (through profiles), history storage, etc. The default is usually 108 | # $HOME/.ipython. This option can also be specified through the environment 109 | # variable IPYTHONDIR. 110 | # c.IPKernelApp.ipython_dir = '' 111 | 112 | # kill this process if its parent dies. On Windows, the argument specifies the 113 | # HANDLE of the parent process, otherwise it is simply boolean. 114 | # c.IPKernelApp.parent_handle = 0 115 | 116 | # Configure matplotlib for interactive use with the default matplotlib backend. 117 | # c.IPKernelApp.matplotlib = None 118 | 119 | # set the iopub (PUB) port [default: random] 120 | # c.IPKernelApp.iopub_port = 0 121 | 122 | # redirect stdout to the null device 123 | # c.IPKernelApp.no_stdout = False 124 | 125 | # 126 | # c.IPKernelApp.transport = 'tcp' 127 | 128 | # JSON file in which to store connection info [default: kernel-.json] 129 | # 130 | # This file will contain the IP, ports, and authentication key needed to connect 131 | # clients to this kernel. By default, this file will be created in the security 132 | # dir of the current profile, but can be specified by absolute path. 133 | # c.IPKernelApp.connection_file = '' 134 | 135 | # The Kernel subclass to be used. 136 | # 137 | # This should allow easy re-use of the IPKernelApp entry point to configure and 138 | # launch kernels other than IPython's own. 139 | # c.IPKernelApp.kernel_class = 140 | 141 | # ONLY USED ON WINDOWS Interrupt this process when the parent is signaled. 142 | # c.IPKernelApp.interrupt = 0 143 | 144 | # set the shell (ROUTER) port [default: random] 145 | # c.IPKernelApp.shell_port = 0 146 | 147 | # If true, IPython will populate the user namespace with numpy, pylab, etc. and 148 | # an ``import *`` is done from numpy and pylab, when using pylab mode. 149 | # 150 | # When False, pylab mode should not import any names into the user namespace. 151 | # c.IPKernelApp.pylab_import_all = True 152 | 153 | # Set the kernel's IP address [default localhost]. If the IP address is 154 | # something other than localhost, then Consoles on other machines will be able 155 | # to connect to the Kernel, so be careful! 156 | # c.IPKernelApp.ip = '' 157 | 158 | # ------------------------------------------------------------------------------ 159 | # IPythonKernel configuration 160 | # ------------------------------------------------------------------------------ 161 | 162 | # IPythonKernel will inherit config from: Kernel 163 | 164 | # 165 | # c.IPythonKernel._execute_sleep = 0.0005 166 | 167 | # Whether to use appnope for compatibility with OS X App Nap. 168 | # 169 | # Only affects OS X >= 10.9. 170 | # c.IPythonKernel._darwin_app_nap = True 171 | 172 | # 173 | # c.IPythonKernel._poll_interval = 0.05 174 | 175 | # ------------------------------------------------------------------------------ 176 | # ZMQInteractiveShell configuration 177 | # ------------------------------------------------------------------------------ 178 | 179 | # A subclass of InteractiveShell for ZMQ. 180 | 181 | # ZMQInteractiveShell will inherit config from: InteractiveShell 182 | 183 | # 184 | # c.ZMQInteractiveShell.object_info_string_level = 0 185 | 186 | # 187 | # c.ZMQInteractiveShell.separate_out = '' 188 | 189 | # Automatically call the pdb debugger after every exception. 190 | # c.ZMQInteractiveShell.pdb = False 191 | 192 | # 193 | # c.ZMQInteractiveShell.ipython_dir = '' 194 | 195 | # 196 | # c.ZMQInteractiveShell.history_length = 10000 197 | 198 | # 199 | # c.ZMQInteractiveShell.readline_remove_delims = '-/~' 200 | 201 | # If True, anything that would be passed to the pager will be displayed as 202 | # regular output instead. 203 | # c.ZMQInteractiveShell.display_page = False 204 | 205 | # Deprecated, use PromptManager.in2_template 206 | # c.ZMQInteractiveShell.prompt_in2 = ' .\\D.: ' 207 | 208 | # 209 | # c.ZMQInteractiveShell.separate_in = '\n' 210 | 211 | # Start logging to the default log file in overwrite mode. Use `logappend` to 212 | # specify a log file to **append** logs to. 213 | # c.ZMQInteractiveShell.logstart = False 214 | 215 | # Set the size of the output cache. The default is 1000, you can change it 216 | # permanently in your config file. Setting it to 0 completely disables the 217 | # caching system, and the minimum value accepted is 20 (if you provide a value 218 | # less than 20, it is reset to 0 and a warning is issued). This limit is 219 | # defined because otherwise you'll spend more time re-flushing a too small cache 220 | # than working 221 | # c.ZMQInteractiveShell.cache_size = 1000 222 | 223 | # 224 | # c.ZMQInteractiveShell.wildcards_case_sensitive = True 225 | 226 | # The name of the logfile to use. 227 | # c.ZMQInteractiveShell.logfile = '' 228 | 229 | # 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run 230 | # interactively (displaying output from expressions). 231 | # c.ZMQInteractiveShell.ast_node_interactivity = 'last_expr' 232 | 233 | # 234 | # c.ZMQInteractiveShell.debug = False 235 | 236 | # 237 | # c.ZMQInteractiveShell.quiet = False 238 | 239 | # Save multi-line entries as one entry in readline history 240 | # c.ZMQInteractiveShell.multiline_history = True 241 | 242 | # Deprecated, use PromptManager.in_template 243 | # c.ZMQInteractiveShell.prompt_in1 = 'In [\\#]: ' 244 | 245 | # Enable magic commands to be called without the leading %. 246 | # c.ZMQInteractiveShell.automagic = True 247 | 248 | # The part of the banner to be printed before the profile 249 | # c.ZMQInteractiveShell.banner1 = 'Python 3.4.3 |Continuum Analytics, Inc.| (default, Mar 6 2015, 12:07:41) \nType "copyright", "credits" or "license" for more information.\n\nIPython 3.1.0 -- An enhanced Interactive Python.\nAnaconda is brought to you by Continuum Analytics.\nPlease check out: http://continuum.io/thanks and https://binstar.org\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n' 250 | 251 | # Make IPython automatically call any callable object even if you didn't type 252 | # explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically. 253 | # The value can be '0' to disable the feature, '1' for 'smart' autocall, where 254 | # it is not applied if there are no more arguments on the line, and '2' for 255 | # 'full' autocall, where all callable objects are automatically called (even if 256 | # no arguments are present). 257 | # c.ZMQInteractiveShell.autocall = 0 258 | 259 | # 260 | # c.ZMQInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard'] 261 | 262 | # Set the color scheme (NoColor, Linux, or LightBG). 263 | # c.ZMQInteractiveShell.colors = 'LightBG' 264 | 265 | # Use colors for displaying information about objects. Because this information 266 | # is passed through a pager (like 'less'), and some pagers get confused with 267 | # color codes, this capability can be turned off. 268 | # c.ZMQInteractiveShell.color_info = True 269 | 270 | # Show rewritten input, e.g. for autocall. 271 | # c.ZMQInteractiveShell.show_rewritten_input = True 272 | 273 | # 274 | # c.ZMQInteractiveShell.xmode = 'Context' 275 | 276 | # 277 | # c.ZMQInteractiveShell.separate_out2 = '' 278 | 279 | # The part of the banner to be printed after the profile 280 | # c.ZMQInteractiveShell.banner2 = '' 281 | 282 | # Start logging to the given file in append mode. Use `logfile` to specify a log 283 | # file to **overwrite** logs to. 284 | # c.ZMQInteractiveShell.logappend = '' 285 | 286 | # Don't call post-execute functions that have failed in the past. 287 | # c.ZMQInteractiveShell.disable_failing_post_execute = False 288 | 289 | # Deprecated, use PromptManager.out_template 290 | # c.ZMQInteractiveShell.prompt_out = 'Out[\\#]: ' 291 | 292 | # Enable deep (recursive) reloading by default. IPython can use the deep_reload 293 | # module which reloads changes in modules recursively (it replaces the reload() 294 | # function, so you don't need to change anything to use it). deep_reload() 295 | # forces a full reload of modules whose code may have changed, which the default 296 | # reload() function does not. When deep_reload is off, IPython will use the 297 | # normal reload(), but deep_reload will still be available as dreload(). 298 | # c.ZMQInteractiveShell.deep_reload = False 299 | 300 | # Deprecated, use PromptManager.justify 301 | # c.ZMQInteractiveShell.prompts_pad_left = True 302 | 303 | # A list of ast.NodeTransformer subclass instances, which will be applied to 304 | # user input before code is run. 305 | # c.ZMQInteractiveShell.ast_transformers = [] 306 | 307 | # ------------------------------------------------------------------------------ 308 | # ProfileDir configuration 309 | # ------------------------------------------------------------------------------ 310 | 311 | # An object to manage the profile directory and its resources. 312 | # 313 | # The profile directory is used by all IPython applications, to manage 314 | # configuration, logging and security. 315 | # 316 | # This object knows how to find, create and manage these directories. This 317 | # should be used by any code that wants to handle profiles. 318 | 319 | # Set the profile location directly. This overrides the logic used by the 320 | # `profile` option. 321 | # c.ProfileDir.location = '' 322 | 323 | # ------------------------------------------------------------------------------ 324 | # Session configuration 325 | # ------------------------------------------------------------------------------ 326 | 327 | # Object for handling serialization and sending of messages. 328 | # 329 | # The Session object handles building messages and sending them with ZMQ sockets 330 | # or ZMQStream objects. Objects can communicate with each other over the 331 | # network via Session objects, and only need to work with the dict-based IPython 332 | # message spec. The Session will handle serialization/deserialization, security, 333 | # and metadata. 334 | # 335 | # Sessions support configurable serialization via packer/unpacker traits, and 336 | # signing with HMAC digests via the key/keyfile traits. 337 | # 338 | # Parameters ---------- 339 | # 340 | # debug : bool 341 | # whether to trigger extra debugging statements 342 | # packer/unpacker : str : 'json', 'pickle' or import_string 343 | # importstrings for methods to serialize message parts. If just 344 | # 'json' or 'pickle', predefined JSON and pickle packers will be used. 345 | # Otherwise, the entire importstring must be used. 346 | # 347 | # The functions must accept at least valid JSON input, and output *bytes*. 348 | # 349 | # For example, to use msgpack: 350 | # packer = 'msgpack.packb', unpacker='msgpack.unpackb' 351 | # pack/unpack : callables 352 | # You can also set the pack/unpack callables for serialization directly. 353 | # session : bytes 354 | # the ID of this Session object. The default is to generate a new UUID. 355 | # username : unicode 356 | # username added to message headers. The default is to ask the OS. 357 | # key : bytes 358 | # The key used to initialize an HMAC signature. If unset, messages 359 | # will not be signed or checked. 360 | # keyfile : filepath 361 | # The file containing a key. If this is set, `key` will be initialized 362 | # to the contents of the file. 363 | 364 | # The digest scheme used to construct the message signatures. Must have the form 365 | # 'hmac-HASH'. 366 | # c.Session.signature_scheme = 'hmac-sha256' 367 | 368 | # The maximum number of digests to remember. 369 | # 370 | # The digest history will be culled when it exceeds this value. 371 | # c.Session.digest_history_size = 65536 372 | 373 | # The name of the unpacker for unserializing messages. Only used with custom 374 | # functions for `packer`. 375 | # c.Session.unpacker = 'json' 376 | 377 | # The name of the packer for serializing messages. Should be one of 'json', 378 | # 'pickle', or an import name for a custom callable serializer. 379 | # c.Session.packer = 'json' 380 | 381 | # Username for the Session. Default is your system username. 382 | # c.Session.username = 'minrk' 383 | 384 | # Debug output in the Session 385 | # c.Session.debug = False 386 | 387 | # path to file containing execution key. 388 | # c.Session.keyfile = '' 389 | 390 | # The maximum number of items for a container to be introspected for custom 391 | # serialization. Containers larger than this are pickled outright. 392 | # c.Session.item_threshold = 64 393 | 394 | # Threshold (in bytes) beyond which an object's buffer should be extracted to 395 | # avoid pickling. 396 | # c.Session.buffer_threshold = 1024 397 | 398 | # The UUID identifying this session. 399 | # c.Session.session = '' 400 | 401 | # Threshold (in bytes) beyond which a buffer should be sent without copying. 402 | # c.Session.copy_threshold = 65536 403 | 404 | # execution key, for signing messages. 405 | # c.Session.key = b'' 406 | 407 | # Metadata dictionary, which serves as the default top-level metadata dict for 408 | # each message. 409 | # c.Session.metadata = {} 410 | -------------------------------------------------------------------------------- /tests/dotipython/profile_default/ipython_nbconvert_config.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | c.NbConvertApp.post_processors = [] 4 | -------------------------------------------------------------------------------- /tests/dotipython/profile_default/ipython_notebook_config.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | c.NotebookApp.open_browser = False 4 | -------------------------------------------------------------------------------- /tests/dotipython/profile_default/static/custom/custom.css: -------------------------------------------------------------------------------- 1 | /* 2 | Placeholder for custom user CSS 3 | 4 | mainly to be overridden in profile/static/custom/custom.css 5 | 6 | This will always be an empty file in IPython 7 | */ 8 | -------------------------------------------------------------------------------- /tests/dotipython/profile_default/static/custom/custom.js: -------------------------------------------------------------------------------- 1 | // leave at least 2 line with only a star on it below, or doc generation fails 2 | /** 3 | * 4 | * 5 | * Placeholder for custom user javascript 6 | * mainly to be overridden in profile/static/custom/custom.js 7 | * This will always be an empty file in IPython 8 | * 9 | * User could add any javascript in the `profile/static/custom/custom.js` file. 10 | * It will be executed by the ipython notebook at load time. 11 | * 12 | * Same thing with `profile/static/custom/custom.css` to inject custom css into the notebook. 13 | * 14 | * 15 | * The object available at load time depend on the version of IPython in use. 16 | * there is no guaranties of API stability. 17 | * 18 | * The example below explain the principle, and might not be valid. 19 | * 20 | * Instances are created after the loading of this file and might need to be accessed using events: 21 | * define([ 22 | * 'base/js/namespace', 23 | * 'base/js/events' 24 | * ], function(IPython, events) { 25 | * events.on("app_initialized.NotebookApp", function () { 26 | * IPython.keyboard_manager.... 27 | * }); 28 | * }); 29 | * 30 | * __Example 1:__ 31 | * 32 | * Create a custom button in toolbar that execute `%qtconsole` in kernel 33 | * and hence open a qtconsole attached to the same kernel as the current notebook 34 | * 35 | * define([ 36 | * 'base/js/namespace', 37 | * 'base/js/events' 38 | * ], function(IPython, events) { 39 | * events.on('app_initialized.NotebookApp', function(){ 40 | * IPython.toolbar.add_buttons_group([ 41 | * { 42 | * 'label' : 'run qtconsole', 43 | * 'icon' : 'icon-terminal', // select your icon from http://fortawesome.github.io/Font-Awesome/icons 44 | * 'callback': function () { 45 | * IPython.notebook.kernel.execute('%qtconsole') 46 | * } 47 | * } 48 | * // add more button here if needed. 49 | * ]); 50 | * }); 51 | * }); 52 | * 53 | * __Example 2:__ 54 | * 55 | * At the completion of the dashboard loading, load an unofficial javascript extension 56 | * that is installed in profile/static/custom/ 57 | * 58 | * define([ 59 | * 'base/js/events' 60 | * ], function(events) { 61 | * events.on('app_initialized.DashboardApp', function(){ 62 | * require(['custom/unofficial_extension.js']) 63 | * }); 64 | * }); 65 | * 66 | * __Example 3:__ 67 | * 68 | * Use `jQuery.getScript(url [, success(script, textStatus, jqXHR)] );` 69 | * to load custom script into the notebook. 70 | * 71 | * // to load the metadata ui extension example. 72 | * $.getScript('/static/notebook/js/celltoolbarpresets/example.js'); 73 | * // or 74 | * // to load the metadata ui extension to control slideshow mode / reveal js for nbconvert 75 | * $.getScript('/static/notebook/js/celltoolbarpresets/slideshow.js'); 76 | * 77 | * 78 | * @module IPython 79 | * @namespace IPython 80 | * @class customjs 81 | * @static 82 | */ 83 | -------------------------------------------------------------------------------- /tests/dotipython_empty/profile_default/ipython_kernel_config.py: -------------------------------------------------------------------------------- 1 | # Configuration file for ipython-kernel. 2 | from __future__ import annotations 3 | 4 | c = get_config() 5 | 6 | # ------------------------------------------------------------------------------ 7 | # IPKernelApp configuration 8 | # ------------------------------------------------------------------------------ 9 | 10 | # IPython: an enhanced interactive Python shell. 11 | 12 | # IPKernelApp will inherit config from: BaseIPythonApplication, Application, 13 | # InteractiveShellApp, ConnectionFileMixin 14 | 15 | # Should variables loaded at startup (by startup files, exec_lines, etc.) be 16 | # hidden from tools like %who? 17 | # c.IPKernelApp.hide_initial_ns = True 18 | 19 | # The importstring for the DisplayHook factory 20 | # c.IPKernelApp.displayhook_class = 'IPython.kernel.zmq.displayhook.ZMQDisplayHook' 21 | 22 | # A list of dotted module names of IPython extensions to load. 23 | # c.IPKernelApp.extensions = [] 24 | 25 | # Execute the given command string. 26 | # c.IPKernelApp.code_to_run = '' 27 | 28 | # redirect stderr to the null device 29 | # c.IPKernelApp.no_stderr = False 30 | 31 | # The date format used by logging formatters for %(asctime)s 32 | # c.IPKernelApp.log_datefmt = '%Y-%m-%d %H:%M:%S' 33 | 34 | # Whether to create profile dir if it doesn't exist 35 | # c.IPKernelApp.auto_create = False 36 | 37 | # Reraise exceptions encountered loading IPython extensions? 38 | # c.IPKernelApp.reraise_ipython_extension_failures = False 39 | 40 | # Set the log level by value or name. 41 | # c.IPKernelApp.log_level = 30 42 | 43 | # Run the file referenced by the PYTHONSTARTUP environment variable at IPython 44 | # startup. 45 | # c.IPKernelApp.exec_PYTHONSTARTUP = True 46 | 47 | # Pre-load matplotlib and numpy for interactive use, selecting a particular 48 | # matplotlib backend and loop integration. 49 | # c.IPKernelApp.pylab = None 50 | 51 | # Run the module as a script. 52 | # c.IPKernelApp.module_to_run = '' 53 | 54 | # The importstring for the OutStream factory 55 | # c.IPKernelApp.outstream_class = 'IPython.kernel.zmq.iostream.OutStream' 56 | 57 | # dotted module name of an IPython extension to load. 58 | # c.IPKernelApp.extra_extension = '' 59 | 60 | # Create a massive crash report when IPython encounters what may be an internal 61 | # error. The default is to append a short message to the usual traceback 62 | # c.IPKernelApp.verbose_crash = False 63 | 64 | # Whether to overwrite existing config files when copying 65 | # c.IPKernelApp.overwrite = False 66 | 67 | # The IPython profile to use. 68 | # c.IPKernelApp.profile = 'default' 69 | 70 | # List of files to run at IPython startup. 71 | # c.IPKernelApp.exec_files = [] 72 | 73 | # The Logging format template 74 | # c.IPKernelApp.log_format = '[%(name)s]%(highlevel)s %(message)s' 75 | 76 | # Whether to install the default config files into the profile dir. If a new 77 | # profile is being created, and IPython contains config files for that profile, 78 | # then they will be staged into the new directory. Otherwise, default config 79 | # files will be automatically generated. 80 | # c.IPKernelApp.copy_config_files = False 81 | 82 | # set the stdin (ROUTER) port [default: random] 83 | # c.IPKernelApp.stdin_port = 0 84 | 85 | # Path to an extra config file to load. 86 | # 87 | # If specified, load this config file in addition to any other IPython config. 88 | # c.IPKernelApp.extra_config_file = '' 89 | 90 | # lines of code to run at IPython startup. 91 | # c.IPKernelApp.exec_lines = [] 92 | 93 | # set the control (ROUTER) port [default: random] 94 | # c.IPKernelApp.control_port = 0 95 | 96 | # set the heartbeat port [default: random] 97 | # c.IPKernelApp.hb_port = 0 98 | 99 | # Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx', 100 | # 'pyglet', 'qt', 'qt5', 'tk', 'wx'). 101 | # c.IPKernelApp.gui = None 102 | 103 | # A file to be run 104 | # c.IPKernelApp.file_to_run = '' 105 | 106 | # The name of the IPython directory. This directory is used for logging 107 | # configuration (through profiles), history storage, etc. The default is usually 108 | # $HOME/.ipython. This option can also be specified through the environment 109 | # variable IPYTHONDIR. 110 | # c.IPKernelApp.ipython_dir = '' 111 | 112 | # kill this process if its parent dies. On Windows, the argument specifies the 113 | # HANDLE of the parent process, otherwise it is simply boolean. 114 | # c.IPKernelApp.parent_handle = 0 115 | 116 | # Configure matplotlib for interactive use with the default matplotlib backend. 117 | # c.IPKernelApp.matplotlib = None 118 | 119 | # set the iopub (PUB) port [default: random] 120 | # c.IPKernelApp.iopub_port = 0 121 | 122 | # redirect stdout to the null device 123 | # c.IPKernelApp.no_stdout = False 124 | 125 | # 126 | # c.IPKernelApp.transport = 'tcp' 127 | 128 | # JSON file in which to store connection info [default: kernel-.json] 129 | # 130 | # This file will contain the IP, ports, and authentication key needed to connect 131 | # clients to this kernel. By default, this file will be created in the security 132 | # dir of the current profile, but can be specified by absolute path. 133 | # c.IPKernelApp.connection_file = '' 134 | 135 | # The Kernel subclass to be used. 136 | # 137 | # This should allow easy re-use of the IPKernelApp entry point to configure and 138 | # launch kernels other than IPython's own. 139 | # c.IPKernelApp.kernel_class = 140 | 141 | # ONLY USED ON WINDOWS Interrupt this process when the parent is signaled. 142 | # c.IPKernelApp.interrupt = 0 143 | 144 | # set the shell (ROUTER) port [default: random] 145 | # c.IPKernelApp.shell_port = 0 146 | 147 | # If true, IPython will populate the user namespace with numpy, pylab, etc. and 148 | # an ``import *`` is done from numpy and pylab, when using pylab mode. 149 | # 150 | # When False, pylab mode should not import any names into the user namespace. 151 | # c.IPKernelApp.pylab_import_all = True 152 | 153 | # Set the kernel's IP address [default localhost]. If the IP address is 154 | # something other than localhost, then Consoles on other machines will be able 155 | # to connect to the Kernel, so be careful! 156 | # c.IPKernelApp.ip = '' 157 | 158 | # ------------------------------------------------------------------------------ 159 | # IPythonKernel configuration 160 | # ------------------------------------------------------------------------------ 161 | 162 | # IPythonKernel will inherit config from: Kernel 163 | 164 | # 165 | # c.IPythonKernel._execute_sleep = 0.0005 166 | 167 | # Whether to use appnope for compatibility with OS X App Nap. 168 | # 169 | # Only affects OS X >= 10.9. 170 | # c.IPythonKernel._darwin_app_nap = True 171 | 172 | # 173 | # c.IPythonKernel._poll_interval = 0.05 174 | 175 | # ------------------------------------------------------------------------------ 176 | # ZMQInteractiveShell configuration 177 | # ------------------------------------------------------------------------------ 178 | 179 | # A subclass of InteractiveShell for ZMQ. 180 | 181 | # ZMQInteractiveShell will inherit config from: InteractiveShell 182 | 183 | # 184 | # c.ZMQInteractiveShell.object_info_string_level = 0 185 | 186 | # 187 | # c.ZMQInteractiveShell.separate_out = '' 188 | 189 | # Automatically call the pdb debugger after every exception. 190 | # c.ZMQInteractiveShell.pdb = False 191 | 192 | # 193 | # c.ZMQInteractiveShell.ipython_dir = '' 194 | 195 | # 196 | # c.ZMQInteractiveShell.history_length = 10000 197 | 198 | # 199 | # c.ZMQInteractiveShell.readline_remove_delims = '-/~' 200 | 201 | # If True, anything that would be passed to the pager will be displayed as 202 | # regular output instead. 203 | # c.ZMQInteractiveShell.display_page = False 204 | 205 | # Deprecated, use PromptManager.in2_template 206 | # c.ZMQInteractiveShell.prompt_in2 = ' .\\D.: ' 207 | 208 | # 209 | # c.ZMQInteractiveShell.separate_in = '\n' 210 | 211 | # Start logging to the default log file in overwrite mode. Use `logappend` to 212 | # specify a log file to **append** logs to. 213 | # c.ZMQInteractiveShell.logstart = False 214 | 215 | # Set the size of the output cache. The default is 1000, you can change it 216 | # permanently in your config file. Setting it to 0 completely disables the 217 | # caching system, and the minimum value accepted is 20 (if you provide a value 218 | # less than 20, it is reset to 0 and a warning is issued). This limit is 219 | # defined because otherwise you'll spend more time re-flushing a too small cache 220 | # than working 221 | # c.ZMQInteractiveShell.cache_size = 1000 222 | 223 | # 224 | # c.ZMQInteractiveShell.wildcards_case_sensitive = True 225 | 226 | # The name of the logfile to use. 227 | # c.ZMQInteractiveShell.logfile = '' 228 | 229 | # 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run 230 | # interactively (displaying output from expressions). 231 | # c.ZMQInteractiveShell.ast_node_interactivity = 'last_expr' 232 | 233 | # 234 | # c.ZMQInteractiveShell.debug = False 235 | 236 | # 237 | # c.ZMQInteractiveShell.quiet = False 238 | 239 | # Save multi-line entries as one entry in readline history 240 | # c.ZMQInteractiveShell.multiline_history = True 241 | 242 | # Deprecated, use PromptManager.in_template 243 | # c.ZMQInteractiveShell.prompt_in1 = 'In [\\#]: ' 244 | 245 | # Enable magic commands to be called without the leading %. 246 | # c.ZMQInteractiveShell.automagic = True 247 | 248 | # The part of the banner to be printed before the profile 249 | # c.ZMQInteractiveShell.banner1 = 'Python 3.4.3 |Continuum Analytics, Inc.| (default, Mar 6 2015, 12:07:41) \nType "copyright", "credits" or "license" for more information.\n\nIPython 3.1.0 -- An enhanced Interactive Python.\nAnaconda is brought to you by Continuum Analytics.\nPlease check out: http://continuum.io/thanks and https://binstar.org\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n' 250 | 251 | # Make IPython automatically call any callable object even if you didn't type 252 | # explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically. 253 | # The value can be '0' to disable the feature, '1' for 'smart' autocall, where 254 | # it is not applied if there are no more arguments on the line, and '2' for 255 | # 'full' autocall, where all callable objects are automatically called (even if 256 | # no arguments are present). 257 | # c.ZMQInteractiveShell.autocall = 0 258 | 259 | # 260 | # c.ZMQInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard'] 261 | 262 | # Set the color scheme (NoColor, Linux, or LightBG). 263 | # c.ZMQInteractiveShell.colors = 'LightBG' 264 | 265 | # Use colors for displaying information about objects. Because this information 266 | # is passed through a pager (like 'less'), and some pagers get confused with 267 | # color codes, this capability can be turned off. 268 | # c.ZMQInteractiveShell.color_info = True 269 | 270 | # Show rewritten input, e.g. for autocall. 271 | # c.ZMQInteractiveShell.show_rewritten_input = True 272 | 273 | # 274 | # c.ZMQInteractiveShell.xmode = 'Context' 275 | 276 | # 277 | # c.ZMQInteractiveShell.separate_out2 = '' 278 | 279 | # The part of the banner to be printed after the profile 280 | # c.ZMQInteractiveShell.banner2 = '' 281 | 282 | # Start logging to the given file in append mode. Use `logfile` to specify a log 283 | # file to **overwrite** logs to. 284 | # c.ZMQInteractiveShell.logappend = '' 285 | 286 | # Don't call post-execute functions that have failed in the past. 287 | # c.ZMQInteractiveShell.disable_failing_post_execute = False 288 | 289 | # Deprecated, use PromptManager.out_template 290 | # c.ZMQInteractiveShell.prompt_out = 'Out[\\#]: ' 291 | 292 | # Enable deep (recursive) reloading by default. IPython can use the deep_reload 293 | # module which reloads changes in modules recursively (it replaces the reload() 294 | # function, so you don't need to change anything to use it). deep_reload() 295 | # forces a full reload of modules whose code may have changed, which the default 296 | # reload() function does not. When deep_reload is off, IPython will use the 297 | # normal reload(), but deep_reload will still be available as dreload(). 298 | # c.ZMQInteractiveShell.deep_reload = False 299 | 300 | # Deprecated, use PromptManager.justify 301 | # c.ZMQInteractiveShell.prompts_pad_left = True 302 | 303 | # A list of ast.NodeTransformer subclass instances, which will be applied to 304 | # user input before code is run. 305 | # c.ZMQInteractiveShell.ast_transformers = [] 306 | 307 | # ------------------------------------------------------------------------------ 308 | # ProfileDir configuration 309 | # ------------------------------------------------------------------------------ 310 | 311 | # An object to manage the profile directory and its resources. 312 | # 313 | # The profile directory is used by all IPython applications, to manage 314 | # configuration, logging and security. 315 | # 316 | # This object knows how to find, create and manage these directories. This 317 | # should be used by any code that wants to handle profiles. 318 | 319 | # Set the profile location directly. This overrides the logic used by the 320 | # `profile` option. 321 | # c.ProfileDir.location = '' 322 | 323 | # ------------------------------------------------------------------------------ 324 | # Session configuration 325 | # ------------------------------------------------------------------------------ 326 | 327 | # Object for handling serialization and sending of messages. 328 | # 329 | # The Session object handles building messages and sending them with ZMQ sockets 330 | # or ZMQStream objects. Objects can communicate with each other over the 331 | # network via Session objects, and only need to work with the dict-based IPython 332 | # message spec. The Session will handle serialization/deserialization, security, 333 | # and metadata. 334 | # 335 | # Sessions support configurable serialization via packer/unpacker traits, and 336 | # signing with HMAC digests via the key/keyfile traits. 337 | # 338 | # Parameters ---------- 339 | # 340 | # debug : bool 341 | # whether to trigger extra debugging statements 342 | # packer/unpacker : str : 'json', 'pickle' or import_string 343 | # importstrings for methods to serialize message parts. If just 344 | # 'json' or 'pickle', predefined JSON and pickle packers will be used. 345 | # Otherwise, the entire importstring must be used. 346 | # 347 | # The functions must accept at least valid JSON input, and output *bytes*. 348 | # 349 | # For example, to use msgpack: 350 | # packer = 'msgpack.packb', unpacker='msgpack.unpackb' 351 | # pack/unpack : callables 352 | # You can also set the pack/unpack callables for serialization directly. 353 | # session : bytes 354 | # the ID of this Session object. The default is to generate a new UUID. 355 | # username : unicode 356 | # username added to message headers. The default is to ask the OS. 357 | # key : bytes 358 | # The key used to initialize an HMAC signature. If unset, messages 359 | # will not be signed or checked. 360 | # keyfile : filepath 361 | # The file containing a key. If this is set, `key` will be initialized 362 | # to the contents of the file. 363 | 364 | # The digest scheme used to construct the message signatures. Must have the form 365 | # 'hmac-HASH'. 366 | # c.Session.signature_scheme = 'hmac-sha256' 367 | 368 | # The maximum number of digests to remember. 369 | # 370 | # The digest history will be culled when it exceeds this value. 371 | # c.Session.digest_history_size = 65536 372 | 373 | # The name of the unpacker for unserializing messages. Only used with custom 374 | # functions for `packer`. 375 | # c.Session.unpacker = 'json' 376 | 377 | # The name of the packer for serializing messages. Should be one of 'json', 378 | # 'pickle', or an import name for a custom callable serializer. 379 | # c.Session.packer = 'json' 380 | 381 | # Username for the Session. Default is your system username. 382 | # c.Session.username = 'minrk' 383 | 384 | # Debug output in the Session 385 | # c.Session.debug = False 386 | 387 | # path to file containing execution key. 388 | # c.Session.keyfile = '' 389 | 390 | # The maximum number of items for a container to be introspected for custom 391 | # serialization. Containers larger than this are pickled outright. 392 | # c.Session.item_threshold = 64 393 | 394 | # Threshold (in bytes) beyond which an object's buffer should be extracted to 395 | # avoid pickling. 396 | # c.Session.buffer_threshold = 1024 397 | 398 | # The UUID identifying this session. 399 | # c.Session.session = '' 400 | 401 | # Threshold (in bytes) beyond which a buffer should be sent without copying. 402 | # c.Session.copy_threshold = 65536 403 | 404 | # execution key, for signing messages. 405 | # c.Session.key = b'' 406 | 407 | # Metadata dictionary, which serves as the default top-level metadata dict for 408 | # each message. 409 | # c.Session.metadata = {} 410 | -------------------------------------------------------------------------------- /tests/dotipython_empty/profile_default/ipython_notebook_config.py: -------------------------------------------------------------------------------- 1 | # Configuration file for ipython-notebook. 2 | from __future__ import annotations 3 | 4 | c = get_config() 5 | 6 | # ------------------------------------------------------------------------------ 7 | # NotebookApp configuration 8 | # ------------------------------------------------------------------------------ 9 | 10 | # NotebookApp will inherit config from: BaseIPythonApplication, Application 11 | 12 | # Supply SSL options for the tornado HTTPServer. See the tornado docs for 13 | # details. 14 | # c.NotebookApp.ssl_options = {} 15 | 16 | # The config manager class to use 17 | # c.NotebookApp.config_manager_class = 18 | 19 | # Hashed password to use for web authentication. 20 | # 21 | # To generate, type in a python/IPython shell: 22 | # 23 | # from IPython.lib import passwd; passwd() 24 | # 25 | # The string should be of the form type:salt:hashed-password. 26 | # c.NotebookApp.password = '' 27 | 28 | # The number of additional ports to try if the specified port is not available. 29 | # c.NotebookApp.port_retries = 50 30 | 31 | # The kernel manager class to use. 32 | # c.NotebookApp.kernel_manager_class = 33 | 34 | # The port the notebook server will listen on. 35 | # c.NotebookApp.port = 8888 36 | 37 | # Set the log level by value or name. 38 | # c.NotebookApp.log_level = 30 39 | 40 | # Path to an extra config file to load. 41 | # 42 | # If specified, load this config file in addition to any other IPython config. 43 | # c.NotebookApp.extra_config_file = '' 44 | 45 | # The cluster manager class to use. 46 | # c.NotebookApp.cluster_manager_class = 47 | 48 | # The base URL for the notebook server. 49 | # 50 | # Leading and trailing slashes can be omitted, and will automatically be added. 51 | # c.NotebookApp.base_url = '/' 52 | 53 | # Python modules to load as notebook server extensions. This is an experimental 54 | # API, and may change in future releases. 55 | # c.NotebookApp.server_extensions = [] 56 | 57 | # The login handler class to use. 58 | # c.NotebookApp.login_handler_class = 59 | 60 | # The session manager class to use. 61 | # c.NotebookApp.session_manager_class = 62 | 63 | # Set the Access-Control-Allow-Origin header 64 | # 65 | # Use '*' to allow any origin to access your server. 66 | # 67 | # Takes precedence over allow_origin_pat. 68 | # c.NotebookApp.allow_origin = '' 69 | 70 | # Whether to enable MathJax for typesetting math/TeX 71 | # 72 | # MathJax is the javascript library IPython uses to render math/LaTeX. It is 73 | # very large, so you may want to disable it if you have a slow internet 74 | # connection, or for offline use of the notebook. 75 | # 76 | # When disabled, equations etc. will appear as their untransformed TeX source. 77 | # c.NotebookApp.enable_mathjax = True 78 | 79 | # The notebook manager class to use. 80 | # c.NotebookApp.contents_manager_class = 81 | 82 | # The full path to an SSL/TLS certificate file. 83 | # c.NotebookApp.certfile = '' 84 | 85 | # Set the Access-Control-Allow-Credentials: true header 86 | # c.NotebookApp.allow_credentials = False 87 | 88 | # The Logging format template 89 | # c.NotebookApp.log_format = '[%(name)s]%(highlevel)s %(message)s' 90 | 91 | # The base URL for websockets, if it differs from the HTTP server (hint: it 92 | # almost certainly doesn't). 93 | # 94 | # Should be in the form of an HTTP origin: ws[s]://hostname[:port] 95 | # c.NotebookApp.websocket_url = '' 96 | 97 | # Use a regular expression for the Access-Control-Allow-Origin header 98 | # 99 | # Requests from an origin matching the expression will get replies with: 100 | # 101 | # Access-Control-Allow-Origin: origin 102 | # 103 | # where `origin` is the origin of the request. 104 | # 105 | # Ignored if allow_origin is set. 106 | # c.NotebookApp.allow_origin_pat = '' 107 | 108 | # The date format used by logging formatters for %(asctime)s 109 | # c.NotebookApp.log_datefmt = '%Y-%m-%d %H:%M:%S' 110 | 111 | # The logout handler class to use. 112 | # c.NotebookApp.logout_handler_class = 113 | 114 | # The default URL to redirect to from `/` 115 | # c.NotebookApp.default_url = '/tree' 116 | 117 | # The IPython profile to use. 118 | # c.NotebookApp.profile = 'default' 119 | 120 | # extra paths to look for Javascript notebook extensions 121 | # c.NotebookApp.extra_nbextensions_path = [] 122 | 123 | # Specify what command to use to invoke a web browser when opening the notebook. 124 | # If not specified, the default browser will be determined by the `webbrowser` 125 | # standard library module, which allows setting of the BROWSER environment 126 | # variable to override it. 127 | # c.NotebookApp.browser = '' 128 | 129 | # The url for MathJax.js. 130 | # c.NotebookApp.mathjax_url = '' 131 | 132 | # Supply overrides for the tornado.web.Application that the IPython notebook 133 | # uses. 134 | # c.NotebookApp.tornado_settings = {} 135 | 136 | # The file where the cookie secret is stored. 137 | # c.NotebookApp.cookie_secret_file = '' 138 | 139 | # Create a massive crash report when IPython encounters what may be an internal 140 | # error. The default is to append a short message to the usual traceback 141 | # c.NotebookApp.verbose_crash = False 142 | 143 | # Whether to overwrite existing config files when copying 144 | # c.NotebookApp.overwrite = False 145 | 146 | # Whether to open in a browser after starting. The specific browser used is 147 | # platform dependent and determined by the python standard library `webbrowser` 148 | # module, unless it is overridden using the --browser (NotebookApp.browser) 149 | # configuration option. 150 | # c.NotebookApp.open_browser = True 151 | 152 | # DEPRECATED, use tornado_settings 153 | # c.NotebookApp.webapp_settings = {} 154 | 155 | # Reraise exceptions encountered loading server extensions? 156 | # c.NotebookApp.reraise_server_extension_failures = False 157 | 158 | # Whether to install the default config files into the profile dir. If a new 159 | # profile is being created, and IPython contains config files for that profile, 160 | # then they will be staged into the new directory. Otherwise, default config 161 | # files will be automatically generated. 162 | # c.NotebookApp.copy_config_files = False 163 | 164 | # DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. 165 | # c.NotebookApp.pylab = 'disabled' 166 | 167 | # The directory to use for notebooks and kernels. 168 | # c.NotebookApp.notebook_dir = '' 169 | 170 | # The kernel spec manager class to use. Should be a subclass of 171 | # `IPython.kernel.kernelspec.KernelSpecManager`. 172 | # 173 | # The Api of KernelSpecManager is provisional and might change without warning 174 | # between this version of IPython and the next stable one. 175 | # c.NotebookApp.kernel_spec_manager_class = 176 | 177 | # 178 | # c.NotebookApp.file_to_run = '' 179 | 180 | # DEPRECATED use base_url 181 | # c.NotebookApp.base_project_url = '/' 182 | 183 | # The random bytes used to secure cookies. By default this is a new random 184 | # number every time you start the Notebook. Set it to a value in a config file 185 | # to enable logins to persist across server sessions. 186 | # 187 | # Note: Cookie secrets should be kept private, do not share config files with 188 | # cookie_secret stored in plaintext (you can read the value from a file). 189 | # c.NotebookApp.cookie_secret = b'' 190 | 191 | # The full path to a private key file for usage with SSL/TLS. 192 | # c.NotebookApp.keyfile = '' 193 | 194 | # Extra paths to search for serving static files. 195 | # 196 | # This allows adding javascript/css to be available from the notebook server 197 | # machine, or overriding individual files in the IPython 198 | # c.NotebookApp.extra_static_paths = [] 199 | 200 | # The name of the IPython directory. This directory is used for logging 201 | # configuration (through profiles), history storage, etc. The default is usually 202 | # $HOME/.ipython. This option can also be specified through the environment 203 | # variable IPYTHONDIR. 204 | # c.NotebookApp.ipython_dir = '' 205 | 206 | # Extra paths to search for serving jinja templates. 207 | # 208 | # Can be used to override templates from IPython.html.templates. 209 | # c.NotebookApp.extra_template_paths = [] 210 | 211 | # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- 212 | # For headerssent by the upstream reverse proxy. Necessary if the proxy handles 213 | # SSL 214 | # c.NotebookApp.trust_xheaders = False 215 | 216 | # Supply extra arguments that will be passed to Jinja environment. 217 | # c.NotebookApp.jinja_environment_options = {} 218 | 219 | # The IP address the notebook server will listen on. 220 | # c.NotebookApp.ip = 'localhost' 221 | 222 | # ------------------------------------------------------------------------------ 223 | # KernelManager configuration 224 | # ------------------------------------------------------------------------------ 225 | 226 | # Manages a single kernel in a subprocess on this host. 227 | # 228 | # This version starts kernels with Popen. 229 | 230 | # KernelManager will inherit config from: ConnectionFileMixin 231 | 232 | # set the heartbeat port [default: random] 233 | # c.KernelManager.hb_port = 0 234 | 235 | # set the stdin (ROUTER) port [default: random] 236 | # c.KernelManager.stdin_port = 0 237 | 238 | # 239 | # c.KernelManager.transport = 'tcp' 240 | 241 | # JSON file in which to store connection info [default: kernel-.json] 242 | # 243 | # This file will contain the IP, ports, and authentication key needed to connect 244 | # clients to this kernel. By default, this file will be created in the security 245 | # dir of the current profile, but can be specified by absolute path. 246 | # c.KernelManager.connection_file = '' 247 | 248 | # set the control (ROUTER) port [default: random] 249 | # c.KernelManager.control_port = 0 250 | 251 | # set the shell (ROUTER) port [default: random] 252 | # c.KernelManager.shell_port = 0 253 | 254 | # Should we autorestart the kernel if it dies. 255 | # c.KernelManager.autorestart = False 256 | 257 | # DEPRECATED: Use kernel_name instead. 258 | # 259 | # The Popen Command to launch the kernel. Override this if you have a custom 260 | # kernel. If kernel_cmd is specified in a configuration file, IPython does not 261 | # pass any arguments to the kernel, because it cannot make any assumptions about 262 | # the arguments that the kernel understands. In particular, this means that the 263 | # kernel does not receive the option --debug if it given on the IPython command 264 | # line. 265 | # c.KernelManager.kernel_cmd = [] 266 | 267 | # Set the kernel's IP address [default localhost]. If the IP address is 268 | # something other than localhost, then Consoles on other machines will be able 269 | # to connect to the Kernel, so be careful! 270 | # c.KernelManager.ip = '' 271 | 272 | # set the iopub (PUB) port [default: random] 273 | # c.KernelManager.iopub_port = 0 274 | 275 | # ------------------------------------------------------------------------------ 276 | # ProfileDir configuration 277 | # ------------------------------------------------------------------------------ 278 | 279 | # An object to manage the profile directory and its resources. 280 | # 281 | # The profile directory is used by all IPython applications, to manage 282 | # configuration, logging and security. 283 | # 284 | # This object knows how to find, create and manage these directories. This 285 | # should be used by any code that wants to handle profiles. 286 | 287 | # Set the profile location directly. This overrides the logic used by the 288 | # `profile` option. 289 | # c.ProfileDir.location = '' 290 | 291 | # ------------------------------------------------------------------------------ 292 | # Session configuration 293 | # ------------------------------------------------------------------------------ 294 | 295 | # Object for handling serialization and sending of messages. 296 | # 297 | # The Session object handles building messages and sending them with ZMQ sockets 298 | # or ZMQStream objects. Objects can communicate with each other over the 299 | # network via Session objects, and only need to work with the dict-based IPython 300 | # message spec. The Session will handle serialization/deserialization, security, 301 | # and metadata. 302 | # 303 | # Sessions support configurable serialization via packer/unpacker traits, and 304 | # signing with HMAC digests via the key/keyfile traits. 305 | # 306 | # Parameters ---------- 307 | # 308 | # debug : bool 309 | # whether to trigger extra debugging statements 310 | # packer/unpacker : str : 'json', 'pickle' or import_string 311 | # importstrings for methods to serialize message parts. If just 312 | # 'json' or 'pickle', predefined JSON and pickle packers will be used. 313 | # Otherwise, the entire importstring must be used. 314 | # 315 | # The functions must accept at least valid JSON input, and output *bytes*. 316 | # 317 | # For example, to use msgpack: 318 | # packer = 'msgpack.packb', unpacker='msgpack.unpackb' 319 | # pack/unpack : callables 320 | # You can also set the pack/unpack callables for serialization directly. 321 | # session : bytes 322 | # the ID of this Session object. The default is to generate a new UUID. 323 | # username : unicode 324 | # username added to message headers. The default is to ask the OS. 325 | # key : bytes 326 | # The key used to initialize an HMAC signature. If unset, messages 327 | # will not be signed or checked. 328 | # keyfile : filepath 329 | # The file containing a key. If this is set, `key` will be initialized 330 | # to the contents of the file. 331 | 332 | # The digest scheme used to construct the message signatures. Must have the form 333 | # 'hmac-HASH'. 334 | # c.Session.signature_scheme = 'hmac-sha256' 335 | 336 | # The maximum number of digests to remember. 337 | # 338 | # The digest history will be culled when it exceeds this value. 339 | # c.Session.digest_history_size = 65536 340 | 341 | # The name of the unpacker for unserializing messages. Only used with custom 342 | # functions for `packer`. 343 | # c.Session.unpacker = 'json' 344 | 345 | # The name of the packer for serializing messages. Should be one of 'json', 346 | # 'pickle', or an import name for a custom callable serializer. 347 | # c.Session.packer = 'json' 348 | 349 | # Username for the Session. Default is your system username. 350 | # c.Session.username = 'minrk' 351 | 352 | # Debug output in the Session 353 | # c.Session.debug = False 354 | 355 | # path to file containing execution key. 356 | # c.Session.keyfile = '' 357 | 358 | # The maximum number of items for a container to be introspected for custom 359 | # serialization. Containers larger than this are pickled outright. 360 | # c.Session.item_threshold = 64 361 | 362 | # Threshold (in bytes) beyond which an object's buffer should be extracted to 363 | # avoid pickling. 364 | # c.Session.buffer_threshold = 1024 365 | 366 | # The UUID identifying this session. 367 | # c.Session.session = '' 368 | 369 | # Threshold (in bytes) beyond which a buffer should be sent without copying. 370 | # c.Session.copy_threshold = 65536 371 | 372 | # execution key, for signing messages. 373 | # c.Session.key = b'' 374 | 375 | # Metadata dictionary, which serves as the default top-level metadata dict for 376 | # each message. 377 | # c.Session.metadata = {} 378 | 379 | # ------------------------------------------------------------------------------ 380 | # MappingKernelManager configuration 381 | # ------------------------------------------------------------------------------ 382 | 383 | # A KernelManager that handles notebook mapping and HTTP error handling 384 | 385 | # MappingKernelManager will inherit config from: MultiKernelManager 386 | 387 | # The kernel manager class. This is configurable to allow subclassing of the 388 | # KernelManager for customized behavior. 389 | # c.MappingKernelManager.kernel_manager_class = 'IPython.kernel.ioloop.IOLoopKernelManager' 390 | 391 | # 392 | # c.MappingKernelManager.root_dir = '' 393 | 394 | # The name of the default kernel to start 395 | # c.MappingKernelManager.default_kernel_name = 'python3' 396 | 397 | # ------------------------------------------------------------------------------ 398 | # ContentsManager configuration 399 | # ------------------------------------------------------------------------------ 400 | 401 | # Base class for serving files and directories. 402 | # 403 | # This serves any text or binary file, as well as directories, with special 404 | # handling for JSON notebook documents. 405 | # 406 | # Most APIs take a path argument, which is always an API-style unicode path, and 407 | # always refers to a directory. 408 | # 409 | # - unicode, not url-escaped 410 | # - '/'-separated 411 | # - leading and trailing '/' will be stripped 412 | # - if unspecified, path defaults to '', 413 | # indicating the root path. 414 | 415 | # The base name used when creating untitled directories. 416 | # c.ContentsManager.untitled_directory = 'Untitled Folder' 417 | 418 | # Python callable or importstring thereof 419 | # 420 | # To be called on a contents model prior to save. 421 | # 422 | # This can be used to process the structure, such as removing notebook outputs 423 | # or other side effects that should not be saved. 424 | # 425 | # It will be called as (all arguments passed by keyword):: 426 | # 427 | # hook(path=path, model=model, contents_manager=self) 428 | # 429 | # - model: the model to be saved. Includes file contents. 430 | # Modifying this dict will affect the file that is stored. 431 | # - path: the API path of the save destination 432 | # - contents_manager: this ContentsManager instance 433 | # c.ContentsManager.pre_save_hook = None 434 | 435 | # Glob patterns to hide in file and directory listings. 436 | # c.ContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~'] 437 | 438 | # The base name used when creating untitled files. 439 | # c.ContentsManager.untitled_file = 'untitled' 440 | 441 | # The base name used when creating untitled notebooks. 442 | # c.ContentsManager.untitled_notebook = 'Untitled' 443 | 444 | # 445 | # c.ContentsManager.checkpoints = None 446 | 447 | # 448 | # c.ContentsManager.checkpoints_class = 449 | 450 | # 451 | # c.ContentsManager.checkpoints_kwargs = {} 452 | 453 | # ------------------------------------------------------------------------------ 454 | # FileContentsManager configuration 455 | # ------------------------------------------------------------------------------ 456 | 457 | # FileContentsManager will inherit config from: ContentsManager 458 | 459 | # The base name used when creating untitled directories. 460 | # c.FileContentsManager.untitled_directory = 'Untitled Folder' 461 | 462 | # Python callable or importstring thereof 463 | # 464 | # To be called on a contents model prior to save. 465 | # 466 | # This can be used to process the structure, such as removing notebook outputs 467 | # or other side effects that should not be saved. 468 | # 469 | # It will be called as (all arguments passed by keyword):: 470 | # 471 | # hook(path=path, model=model, contents_manager=self) 472 | # 473 | # - model: the model to be saved. Includes file contents. 474 | # Modifying this dict will affect the file that is stored. 475 | # - path: the API path of the save destination 476 | # - contents_manager: this ContentsManager instance 477 | # c.FileContentsManager.pre_save_hook = None 478 | 479 | # Glob patterns to hide in file and directory listings. 480 | # c.FileContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~'] 481 | 482 | # The base name used when creating untitled files. 483 | # c.FileContentsManager.untitled_file = 'untitled' 484 | 485 | # The base name used when creating untitled notebooks. 486 | # c.FileContentsManager.untitled_notebook = 'Untitled' 487 | 488 | # Python callable or importstring thereof 489 | # 490 | # to be called on the path of a file just saved. 491 | # 492 | # This can be used to process the file on disk, such as converting the notebook 493 | # to a script or HTML via nbconvert. 494 | # 495 | # It will be called as (all arguments passed by keyword):: 496 | # 497 | # hook(os_path=os_path, model=model, contents_manager=instance) 498 | # 499 | # - path: the filesystem path to the file just written - model: the model 500 | # representing the file - contents_manager: this ContentsManager instance 501 | # c.FileContentsManager.post_save_hook = None 502 | 503 | # DEPRECATED, use post_save_hook 504 | # c.FileContentsManager.save_script = False 505 | 506 | # 507 | # c.FileContentsManager.root_dir = '' 508 | 509 | # 510 | # c.FileContentsManager.checkpoints_class = 511 | 512 | # 513 | # c.FileContentsManager.checkpoints = None 514 | 515 | # 516 | # c.FileContentsManager.checkpoints_kwargs = {} 517 | 518 | # ------------------------------------------------------------------------------ 519 | # NotebookNotary configuration 520 | # ------------------------------------------------------------------------------ 521 | 522 | # A class for computing and verifying notebook signatures. 523 | 524 | # The number of notebook signatures to cache. When the number of signatures 525 | # exceeds this value, the oldest 25% of signatures will be culled. 526 | # c.NotebookNotary.cache_size = 65535 527 | 528 | # The sqlite file in which to store notebook signatures. By default, this will 529 | # be in your IPython profile. You can set it to ':memory:' to disable sqlite 530 | # writing to the filesystem. 531 | # c.NotebookNotary.db_file = '' 532 | 533 | # The secret key with which notebooks are signed. 534 | # c.NotebookNotary.secret = b'' 535 | 536 | # The file where the secret key is stored. 537 | # c.NotebookNotary.secret_file = '' 538 | 539 | # The hashing algorithm used to sign notebooks. 540 | # c.NotebookNotary.algorithm = 'sha256' 541 | 542 | # ------------------------------------------------------------------------------ 543 | # KernelSpecManager configuration 544 | # ------------------------------------------------------------------------------ 545 | 546 | # Whitelist of allowed kernel names. 547 | # 548 | # By default, all installed kernels are allowed. 549 | # c.KernelSpecManager.whitelist = set() 550 | -------------------------------------------------------------------------------- /tests/dotipython_empty/profile_default/static/custom/custom.css: -------------------------------------------------------------------------------- 1 | /* 2 | Placeholder for custom user CSS 3 | 4 | mainly to be overridden in profile/static/custom/custom.css 5 | 6 | This will always be an empty file in IPython 7 | */ 8 | -------------------------------------------------------------------------------- /tests/dotipython_empty/profile_default/static/custom/custom.js: -------------------------------------------------------------------------------- 1 | // leave at least 2 line with only a star on it below, or doc generation fails 2 | /** 3 | * 4 | * 5 | * Placeholder for custom user javascript 6 | * mainly to be overridden in profile/static/custom/custom.js 7 | * This will always be an empty file in IPython 8 | * 9 | * User could add any javascript in the `profile/static/custom/custom.js` file. 10 | * It will be executed by the ipython notebook at load time. 11 | * 12 | * Same thing with `profile/static/custom/custom.css` to inject custom css into the notebook. 13 | * 14 | * 15 | * The object available at load time depend on the version of IPython in use. 16 | * there is no guaranties of API stability. 17 | * 18 | * The example below explain the principle, and might not be valid. 19 | * 20 | * Instances are created after the loading of this file and might need to be accessed using events: 21 | * define([ 22 | * 'base/js/namespace', 23 | * 'base/js/events' 24 | * ], function(IPython, events) { 25 | * events.on("app_initialized.NotebookApp", function () { 26 | * IPython.keyboard_manager.... 27 | * }); 28 | * }); 29 | * 30 | * __Example 1:__ 31 | * 32 | * Create a custom button in toolbar that execute `%qtconsole` in kernel 33 | * and hence open a qtconsole attached to the same kernel as the current notebook 34 | * 35 | * define([ 36 | * 'base/js/namespace', 37 | * 'base/js/events' 38 | * ], function(IPython, events) { 39 | * events.on('app_initialized.NotebookApp', function(){ 40 | * IPython.toolbar.add_buttons_group([ 41 | * { 42 | * 'label' : 'run qtconsole', 43 | * 'icon' : 'icon-terminal', // select your icon from http://fortawesome.github.io/Font-Awesome/icons 44 | * 'callback': function () { 45 | * IPython.notebook.kernel.execute('%qtconsole') 46 | * } 47 | * } 48 | * // add more button here if needed. 49 | * ]); 50 | * }); 51 | * }); 52 | * 53 | * __Example 2:__ 54 | * 55 | * At the completion of the dashboard loading, load an unofficial javascript extension 56 | * that is installed in profile/static/custom/ 57 | * 58 | * define([ 59 | * 'base/js/events' 60 | * ], function(events) { 61 | * events.on('app_initialized.DashboardApp', function(){ 62 | * require(['custom/unofficial_extension.js']) 63 | * }); 64 | * }); 65 | * 66 | * __Example 3:__ 67 | * 68 | * Use `jQuery.getScript(url [, success(script, textStatus, jqXHR)] );` 69 | * to load custom script into the notebook. 70 | * 71 | * // to load the metadata ui extension example. 72 | * $.getScript('/static/notebook/js/celltoolbarpresets/example.js'); 73 | * // or 74 | * // to load the metadata ui extension to control slideshow mode / reveal js for nbconvert 75 | * $.getScript('/static/notebook/js/celltoolbarpresets/slideshow.js'); 76 | * 77 | * 78 | * @module IPython 79 | * @namespace IPython 80 | * @class customjs 81 | * @static 82 | */ 83 | -------------------------------------------------------------------------------- /tests/mocking.py: -------------------------------------------------------------------------------- 1 | """General mocking utilities""" 2 | 3 | # Copyright (c) Jupyter Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | from __future__ import annotations 6 | 7 | import os 8 | import sys 9 | from unittest.mock import patch 10 | 11 | 12 | class MultiPatch: 13 | def __init__(self, *patchers): 14 | self.patchers = patchers 15 | 16 | def __enter__(self): 17 | for p in self.patchers: 18 | p.start() 19 | 20 | def __exit__(self, *args): 21 | for p in self.patchers: 22 | p.stop() 23 | 24 | 25 | darwin = MultiPatch( 26 | patch.object(os, "name", "posix"), 27 | patch.object(sys, "platform", "darwin"), 28 | ) 29 | 30 | linux = MultiPatch( 31 | patch.object(os, "name", "posix"), 32 | patch.object(sys, "platform", "linux2"), 33 | ) 34 | -------------------------------------------------------------------------------- /tests/test_application.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import asyncio 4 | import os 5 | import shutil 6 | from tempfile import mkdtemp 7 | from unittest.mock import patch 8 | 9 | import pytest 10 | from traitlets import Integer 11 | 12 | from jupyter_core.application import JupyterApp, JupyterAsyncApp, NoStart 13 | from jupyter_core.utils import ensure_event_loop 14 | 15 | pjoin = os.path.join 16 | 17 | 18 | def test_basic(): 19 | JupyterApp() 20 | 21 | 22 | def test_default_traits(): 23 | app = JupyterApp() 24 | for trait_name in app.traits(): 25 | getattr(app, trait_name) 26 | 27 | 28 | class DummyApp(JupyterApp): 29 | name = "dummy-app" 30 | m = Integer(0, config=True) 31 | n = Integer(0, config=True) 32 | 33 | 34 | _dummy_config = """ 35 | c.DummyApp.n = 10 36 | """ 37 | 38 | 39 | def test_custom_config(): 40 | app = DummyApp() 41 | td = mkdtemp() 42 | fname = pjoin(td, "config.py") 43 | with open(fname, "w", encoding="utf-8") as f: 44 | f.write(_dummy_config) 45 | app.initialize(["--config", fname]) 46 | shutil.rmtree(td) 47 | assert app.config_file == fname 48 | assert app.n == 10 49 | 50 | 51 | def test_cli_override(): 52 | app = DummyApp() 53 | td = mkdtemp() 54 | fname = pjoin(td, "config.py") 55 | with open(fname, "w", encoding="utf-8") as f: 56 | f.write(_dummy_config) 57 | app.initialize(["--config", fname, "--DummyApp.n=20"]) 58 | shutil.rmtree(td) 59 | assert app.n == 20 60 | 61 | 62 | def test_generate_config(): 63 | td = mkdtemp() 64 | app = DummyApp(config_dir=td) 65 | app.initialize(["--generate-config"]) 66 | assert app.generate_config 67 | 68 | with pytest.raises(NoStart): 69 | app.start() 70 | 71 | assert os.path.exists(os.path.join(td, "dummy_app_config.py")) 72 | 73 | 74 | def test_load_config(): 75 | config_dir = mkdtemp() 76 | os.environ["JUPYTER_CONFIG_PATH"] = str(config_dir) 77 | with open(pjoin(config_dir, "dummy_app_config.py"), "w", encoding="utf-8") as f: 78 | f.write("c.DummyApp.m = 1\n") 79 | f.write("c.DummyApp.n = 1") 80 | 81 | app = DummyApp(config_dir=config_dir) 82 | app.initialize([]) 83 | 84 | assert app.n == 1, "Loaded config from config dir" 85 | assert app.m == 1, "Loaded config from config dir" 86 | 87 | shutil.rmtree(config_dir) 88 | del os.environ["JUPYTER_CONFIG_PATH"] 89 | 90 | 91 | def test_load_config_no_cwd(): 92 | config_dir = mkdtemp() 93 | wd = mkdtemp() 94 | with open(pjoin(wd, "dummy_app_config.py"), "w", encoding="utf-8") as f: 95 | f.write("c.DummyApp.m = 1\n") 96 | f.write("c.DummyApp.n = 1") 97 | with patch.object(os, "getcwd", lambda: wd): 98 | app = DummyApp(config_dir=config_dir) 99 | app.initialize([]) 100 | 101 | assert app.n == 0 102 | assert app.m == 0 103 | 104 | shutil.rmtree(config_dir) 105 | shutil.rmtree(wd) 106 | 107 | 108 | def test_load_bad_config(): 109 | config_dir = mkdtemp() 110 | os.environ["JUPYTER_CONFIG_PATH"] = str(config_dir) 111 | with open(pjoin(config_dir, "dummy_app_config.py"), "w", encoding="utf-8") as f: 112 | f.write('c.DummyApp.m = "a\n') # Syntax error 113 | 114 | with pytest.raises(SyntaxError): # noqa: PT012 115 | app = DummyApp(config_dir=config_dir) 116 | app.raise_config_file_errors = True 117 | app.initialize([]) 118 | 119 | shutil.rmtree(config_dir) 120 | del os.environ["JUPYTER_CONFIG_PATH"] 121 | 122 | 123 | def test_runtime_dir_changed(): 124 | app = DummyApp() 125 | td = mkdtemp() 126 | shutil.rmtree(td) 127 | app.runtime_dir = td 128 | assert os.path.isdir(td) 129 | shutil.rmtree(td) 130 | 131 | 132 | class AsyncioRunApp(JupyterApp): 133 | async def _inner(self): 134 | pass 135 | 136 | def start(self): 137 | asyncio.run(self._inner()) 138 | 139 | 140 | def test_asyncio_run(): 141 | AsyncioRunApp.launch_instance([]) 142 | AsyncioRunApp.clear_instance() 143 | 144 | 145 | class SyncTornadoApp(JupyterApp): 146 | async def _inner(self): 147 | self.running_loop = asyncio.get_running_loop() 148 | 149 | def start(self): 150 | self.starting_loop = ensure_event_loop() 151 | loop = asyncio.get_event_loop() 152 | loop.run_until_complete(self._inner()) 153 | loop.close() 154 | 155 | 156 | def test_sync_tornado_run(): 157 | SyncTornadoApp.launch_instance([]) 158 | app = SyncTornadoApp.instance() 159 | assert app.running_loop == app.starting_loop 160 | SyncTornadoApp.clear_instance() 161 | 162 | 163 | class AsyncApp(JupyterAsyncApp): 164 | async def initialize_async(self, argv): 165 | self.value = 10 166 | 167 | async def start_async(self): 168 | assert self.value == 10 169 | 170 | 171 | def test_async_app(): 172 | AsyncApp.launch_instance([]) 173 | app = AsyncApp.instance() 174 | assert app.value == 10 175 | AsyncApp.clear_instance() 176 | 177 | 178 | class AsyncTornadoApp(AsyncApp): 179 | _prefer_selector_loop = True 180 | 181 | 182 | def test_async_tornado_app(): 183 | AsyncTornadoApp.launch_instance([]) 184 | app = AsyncApp.instance() 185 | assert app._prefer_selector_loop is True 186 | AsyncTornadoApp.clear_instance() 187 | -------------------------------------------------------------------------------- /tests/test_command.py: -------------------------------------------------------------------------------- 1 | """Test the Jupyter command-line""" 2 | 3 | from __future__ import annotations 4 | 5 | import json 6 | import os 7 | import sys 8 | import sysconfig 9 | from subprocess import PIPE, CalledProcessError, check_output 10 | from unittest.mock import patch 11 | 12 | import pytest 13 | 14 | from jupyter_core import __version__ 15 | from jupyter_core.command import list_subcommands 16 | from jupyter_core.paths import ( 17 | jupyter_config_dir, 18 | jupyter_config_path, 19 | jupyter_data_dir, 20 | jupyter_path, 21 | jupyter_runtime_dir, 22 | ) 23 | 24 | resetenv = patch.dict(os.environ) 25 | 26 | 27 | def setup_function(): 28 | resetenv.start() 29 | for var in [ 30 | "JUPYTER_CONFIG_DIR", 31 | "JUPYTER_CONFIG_PATH", 32 | "JUPYTER_DATA_DIR", 33 | "JUPYTER_NO_CONFIG", 34 | "JUPYTER_PATH", 35 | "JUPYTER_PLATFORM_DIRS", 36 | "JUPYTER_RUNTIME_DIR", 37 | ]: 38 | os.environ.pop(var, None) 39 | 40 | 41 | def teardown_function(): 42 | resetenv.stop() 43 | 44 | 45 | def get_jupyter_output(cmd): 46 | """Get output of a jupyter command""" 47 | if not isinstance(cmd, list): 48 | cmd = [cmd] 49 | return ( 50 | check_output([sys.executable, "-m", "jupyter_core", *cmd], stderr=PIPE) 51 | .decode("utf8") 52 | .strip() 53 | ) 54 | 55 | 56 | def write_executable(path, source): 57 | if sys.platform == "win32": 58 | script = path.dirpath() / path.purebasename + "-script.py" 59 | exe = path.dirpath() / path.purebasename + ".exe" 60 | else: 61 | script = path 62 | 63 | script.write(source) 64 | script.chmod(0o700) 65 | 66 | if sys.platform == "win32": 67 | try: 68 | import importlib.resources 69 | 70 | if not hasattr(importlib.resources, "files"): 71 | raise ImportError 72 | wp = importlib.resources.files("setuptools").joinpath("cli-32.exe") 73 | w = wp.read_bytes() 74 | except (ImportError, FileNotFoundError, SystemError): 75 | pytest.skip( 76 | "Need importlib.resources and setuptools to make scripts executable on Windows" 77 | ) 78 | exe.write(w, "wb") 79 | exe.chmod(0o700) 80 | 81 | 82 | def assert_output(cmd, expected): 83 | assert get_jupyter_output(cmd) == expected 84 | 85 | 86 | def test_config_dir(): 87 | assert_output("--config-dir", jupyter_config_dir()) 88 | 89 | 90 | def test_data_dir(): 91 | assert_output("--data-dir", jupyter_data_dir()) 92 | 93 | 94 | def test_runtime_dir(): 95 | assert_output("--runtime-dir", jupyter_runtime_dir()) 96 | 97 | 98 | def test_paths(): 99 | output = get_jupyter_output("--paths") 100 | for d in (jupyter_config_dir(), jupyter_data_dir(), jupyter_runtime_dir()): 101 | assert d in output 102 | for key in ("config", "data", "runtime"): 103 | assert (f"{key}:") in output 104 | 105 | for path in (jupyter_config_path(), jupyter_path()): 106 | for d in path: 107 | assert d in output 108 | 109 | 110 | def test_paths_json(): 111 | output = get_jupyter_output(["--paths", "--json"]) 112 | data = json.loads(output) 113 | assert sorted(data) == ["config", "data", "runtime"] 114 | for _, path in data.items(): 115 | assert isinstance(path, list) 116 | 117 | 118 | def test_paths_debug(): 119 | names = [ 120 | "JUPYTER_PREFER_ENV_PATH", 121 | "JUPYTER_NO_CONFIG", 122 | "JUPYTER_CONFIG_PATH", 123 | "JUPYTER_CONFIG_DIR", 124 | "JUPYTER_PATH", 125 | "JUPYTER_DATA_DIR", 126 | "JUPYTER_RUNTIME_DIR", 127 | ] 128 | output = get_jupyter_output(["--paths", "--debug"]) 129 | for v in names: 130 | assert f"{v} is not set" in output 131 | 132 | with patch.dict("os.environ", [(v, "y") for v in names]): 133 | output = get_jupyter_output(["--paths", "--debug"]) 134 | for v in names: 135 | assert f"{v} is set" in output 136 | 137 | 138 | def test_subcommand_not_given(): 139 | with pytest.raises(CalledProcessError): 140 | get_jupyter_output([]) 141 | 142 | 143 | def test_help(): 144 | output = get_jupyter_output("-h") 145 | assert "--help" in output 146 | 147 | 148 | def test_subcommand_not_found(): 149 | with pytest.raises(CalledProcessError) as excinfo: 150 | get_jupyter_output("nonexistant-subcommand") 151 | stderr = excinfo.value.stderr.decode("utf8") 152 | assert "Jupyter command `jupyter-nonexistant-subcommand` not found." in stderr 153 | 154 | 155 | @patch.object(sys, "argv", [__file__] + sys.argv[1:]) 156 | def test_subcommand_list(tmpdir): 157 | a = tmpdir.mkdir("a") 158 | for cmd in ("jupyter-foo-bar", "jupyter-xyz", "jupyter-babel-fish"): 159 | a.join(cmd).write("") 160 | b = tmpdir.mkdir("b") 161 | for cmd in ("jupyter-foo", "jupyterstuff", "jupyter-yo-eyropa-ganymyde-callysto"): 162 | b.join(cmd).write("") 163 | c = tmpdir.mkdir("c") 164 | for cmd in ("jupyter-baz", "jupyter-bop"): 165 | c.join(cmd).write("") 166 | 167 | path = os.pathsep.join(map(str, [a, b])) 168 | 169 | def get_path(dummy): 170 | return str(c) 171 | 172 | with patch.object(sysconfig, "get_path", get_path), patch.dict("os.environ", {"PATH": path}): 173 | subcommands = list_subcommands() 174 | assert subcommands == [ 175 | "babel-fish", 176 | "baz", 177 | "bop", 178 | "foo", 179 | "xyz", 180 | "yo-eyropa-ganymyde-callysto", 181 | ] 182 | 183 | 184 | skip_darwin = pytest.mark.skipif(sys.platform == "darwin", reason="Fails on macos") 185 | 186 | 187 | @skip_darwin 188 | def test_not_on_path(tmpdir): 189 | a = tmpdir.mkdir("a") 190 | jupyter = a.join("jupyter") 191 | jupyter.write("from jupyter_core import command; command.main()") 192 | jupyter.chmod(0o700) 193 | witness = a.join("jupyter-witness") 194 | witness_src = "#!{}\n{}\n".format(sys.executable, 'print("WITNESS ME")') 195 | write_executable(witness, witness_src) 196 | 197 | env = {"PATH": ""} 198 | if "SYSTEMROOT" in os.environ: # Windows http://bugs.python.org/issue20614 199 | env["SYSTEMROOT"] = os.environ["SYSTEMROOT"] 200 | if sys.platform == "win32": 201 | env["PATHEXT"] = ".EXE" 202 | # This won't work on windows unless 203 | out = check_output([sys.executable, str(jupyter), "witness"], env=env) 204 | assert b"WITNESS" in out 205 | 206 | 207 | @skip_darwin 208 | def test_path_priority(tmpdir): 209 | a = tmpdir.mkdir("a") 210 | jupyter = a.join("jupyter") 211 | jupyter.write("from jupyter_core import command; command.main()") 212 | jupyter.chmod(0o700) 213 | witness_a = a.join("jupyter-witness") 214 | witness_a_src = "#!{}\n{}\n".format(sys.executable, 'print("WITNESS A")') 215 | write_executable(witness_a, witness_a_src) 216 | 217 | b = tmpdir.mkdir("b") 218 | witness_b = b.join("jupyter-witness") 219 | witness_b_src = "#!{}\n{}\n".format(sys.executable, 'print("WITNESS B")') 220 | write_executable(witness_b, witness_b_src) 221 | 222 | env = {"PATH": str(b)} 223 | if "SYSTEMROOT" in os.environ: # Windows http://bugs.python.org/issue20614 224 | env["SYSTEMROOT"] = os.environ["SYSTEMROOT"] 225 | if sys.platform == "win32": 226 | env["PATHEXT"] = ".EXE" 227 | out = check_output([sys.executable, str(jupyter), "witness"], env=env) 228 | assert b"WITNESS A" in out 229 | 230 | 231 | @skip_darwin 232 | def test_argv0(tmpdir): 233 | a = tmpdir.mkdir("a") 234 | jupyter = a.join("jupyter") 235 | jupyter.write("from jupyter_core import command; command.main()") 236 | jupyter.chmod(0o700) 237 | witness_a = a.join("jupyter-witness") 238 | witness_a_src = f"""#!{sys.executable} 239 | import sys 240 | print(sys.argv[0]) 241 | """ 242 | write_executable(witness_a, witness_a_src) 243 | 244 | env = {} 245 | if "SYSTEMROOT" in os.environ: # Windows http://bugs.python.org/issue20614 246 | env["SYSTEMROOT"] = os.environ["SYSTEMROOT"] 247 | if sys.platform == "win32": 248 | env["PATHEXT"] = ".EXE" 249 | out = check_output([sys.executable, str(jupyter), "witness"], env=env) 250 | 251 | # Make sure the first argv is the full path to the executing script 252 | assert f"{jupyter}-witness".encode() in out 253 | 254 | 255 | def test_version(): 256 | assert isinstance(__version__, str) 257 | -------------------------------------------------------------------------------- /tests/test_migrate.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Jupyter Development Team. 2 | # Distributed under the terms of the Modified BSD License. 3 | """Test config file migration""" 4 | 5 | from __future__ import annotations 6 | 7 | import os 8 | import re 9 | import shutil 10 | from tempfile import mkdtemp 11 | from unittest.mock import patch 12 | 13 | import pytest 14 | 15 | from jupyter_core import migrate as migrate_mod 16 | from jupyter_core.application import JupyterApp 17 | from jupyter_core.migrate import ( 18 | migrate, 19 | migrate_config, 20 | migrate_dir, 21 | migrate_file, 22 | migrate_one, 23 | migrate_static_custom, 24 | ) 25 | from jupyter_core.utils import ensure_dir_exists 26 | 27 | pjoin = os.path.join 28 | here = os.path.dirname(__file__) 29 | dotipython = pjoin(here, "dotipython") 30 | dotipython_empty = pjoin(here, "dotipython_empty") 31 | 32 | 33 | @pytest.fixture 34 | def td(request): 35 | """Fixture for a temporary directory""" 36 | td = mkdtemp("μnïcø∂e") 37 | yield td 38 | shutil.rmtree(td) 39 | 40 | 41 | @pytest.fixture 42 | def env(request): 43 | """Fixture for a full testing environment""" 44 | td = mkdtemp() 45 | env = { 46 | "TESTDIR": td, 47 | "IPYTHONDIR": pjoin(td, "ipython"), 48 | "JUPYTER_CONFIG_DIR": pjoin(td, "jupyter"), 49 | "JUPYTER_DATA_DIR": pjoin(td, "jupyter_data"), 50 | "JUPYTER_RUNTIME_DIR": pjoin(td, "jupyter_runtime"), 51 | "JUPYTER_PATH": "", 52 | } 53 | env_patch = patch.dict(os.environ, env) 54 | env_patch.start() 55 | 56 | yield env 57 | 58 | env_patch.stop() 59 | shutil.rmtree(td, ignore_errors=os.name == "nt") 60 | 61 | 62 | def touch(path, content=""): 63 | ensure_dir_exists(os.path.dirname(path)) 64 | with open(path, "w", encoding="utf-8") as f: 65 | f.write(content) 66 | 67 | 68 | def assert_files_equal(a, b): 69 | """Verify that two files match""" 70 | 71 | assert os.path.exists(b) 72 | with open(a, encoding="utf-8") as f: 73 | a_txt = f.read() 74 | 75 | with open(b, encoding="utf-8") as f: 76 | b_txt = f.read() 77 | 78 | assert a_txt == b_txt 79 | 80 | 81 | def test_migrate_file(td): 82 | src = pjoin(td, "src") 83 | dst = pjoin(td, "dst") 84 | touch(src, "test file") 85 | assert migrate_file(src, dst) 86 | assert_files_equal(src, dst) 87 | 88 | src2 = pjoin(td, "src2") 89 | touch(src2, "different src") 90 | assert not migrate_file(src2, dst) 91 | assert_files_equal(src, dst) 92 | 93 | 94 | def test_migrate_dir(td): 95 | src = pjoin(td, "src") 96 | dst = pjoin(td, "dst") 97 | os.mkdir(src) 98 | assert not migrate_dir(src, dst) 99 | assert not os.path.exists(dst) 100 | 101 | touch(pjoin(src, "f"), "test file") 102 | assert migrate_dir(src, dst) 103 | assert_files_equal(pjoin(src, "f"), pjoin(dst, "f")) 104 | 105 | touch(pjoin(src, "g"), "other test file") 106 | assert not migrate_dir(src, dst) 107 | assert not os.path.exists(pjoin(dst, "g")) 108 | 109 | shutil.rmtree(dst) 110 | os.mkdir(dst) 111 | assert migrate_dir(src, dst) 112 | assert_files_equal(pjoin(src, "f"), pjoin(dst, "f")) 113 | assert_files_equal(pjoin(src, "g"), pjoin(dst, "g")) 114 | 115 | 116 | def test_migrate_one(td): 117 | src = pjoin(td, "src") 118 | srcdir = pjoin(td, "srcdir") 119 | dst = pjoin(td, "dst") 120 | dstdir = pjoin(td, "dstdir") 121 | 122 | touch(src, "test file") 123 | touch(pjoin(srcdir, "f"), "test dir file") 124 | 125 | called = {} 126 | 127 | def notice_m_file(src, dst): 128 | called["migrate_file"] = True 129 | return migrate_file(src, dst) 130 | 131 | def notice_m_dir(src, dst): 132 | called["migrate_dir"] = True 133 | return migrate_dir(src, dst) 134 | 135 | with patch.object(migrate_mod, "migrate_file", notice_m_file), patch.object( 136 | migrate_mod, "migrate_dir", notice_m_dir 137 | ): 138 | assert migrate_one(src, dst) 139 | assert called == {"migrate_file": True} 140 | called.clear() 141 | assert migrate_one(srcdir, dstdir) 142 | assert called == {"migrate_dir": True} 143 | called.clear() 144 | assert not migrate_one(pjoin(td, "does_not_exist"), dst) 145 | assert called == {} 146 | 147 | 148 | def test_migrate_config(td): 149 | profile = pjoin(td, "profile") 150 | jpy = pjoin(td, "jupyter_config") 151 | ensure_dir_exists(profile) 152 | 153 | env = { 154 | "profile": profile, 155 | "jupyter_config": jpy, 156 | } 157 | cfg_py = pjoin(profile, "ipython_test_config.py") 158 | touch(cfg_py, "c.Klass.trait = 5\n") 159 | empty_cfg_py = pjoin(profile, "ipython_empty_config.py") 160 | touch(empty_cfg_py, "# c.Klass.trait = 5\n") 161 | 162 | assert not migrate_config("empty", env) 163 | assert not os.path.exists(jpy) 164 | 165 | with patch.dict( 166 | migrate_mod.config_substitutions, 167 | { 168 | re.compile(r"\bKlass\b"): "Replaced", 169 | }, 170 | ): 171 | assert migrate_config("test", env) 172 | 173 | assert os.path.isdir(jpy) 174 | assert sorted(os.listdir(jpy)) == [ 175 | "jupyter_test_config.py", 176 | ] 177 | 178 | with open(pjoin(jpy, "jupyter_test_config.py"), encoding="utf-8") as f: 179 | text = f.read() 180 | assert text == "c.Replaced.trait = 5\n" 181 | 182 | 183 | def test_migrate_custom_default(td): 184 | profile = pjoin(dotipython, "profile_default") 185 | src = pjoin(profile, "static", "custom") 186 | assert os.path.exists(src) 187 | assert not migrate_static_custom(src, td) 188 | 189 | src = pjoin(td, "src") 190 | dst = pjoin(td, "dst") 191 | os.mkdir(src) 192 | src_custom_js = pjoin(src, "custom.js") 193 | src_custom_css = pjoin(src, "custom.css") 194 | touch(src_custom_js, "var a=5;") 195 | touch(src_custom_css, "div { height: 5px; }") 196 | 197 | assert migrate_static_custom(src, dst) 198 | 199 | 200 | def test_migrate_nothing(env): 201 | migrate() 202 | assert os.listdir(env["JUPYTER_CONFIG_DIR"]) == ["migrated"] 203 | assert not os.path.exists(env["JUPYTER_DATA_DIR"]) 204 | 205 | 206 | def test_migrate_default(env): 207 | shutil.copytree(dotipython_empty, env["IPYTHONDIR"]) 208 | migrate() 209 | assert os.listdir(env["JUPYTER_CONFIG_DIR"]) == ["migrated"] 210 | assert not os.path.exists(env["JUPYTER_DATA_DIR"]) 211 | 212 | 213 | def test_migrate(env): 214 | shutil.copytree(dotipython, env["IPYTHONDIR"]) 215 | migrate() 216 | assert os.path.exists(env["JUPYTER_CONFIG_DIR"]) 217 | assert os.path.exists(env["JUPYTER_DATA_DIR"]) 218 | 219 | 220 | def test_app_migrate(env): 221 | shutil.copytree(dotipython, env["IPYTHONDIR"]) 222 | app = JupyterApp() 223 | app.initialize([]) 224 | assert os.path.exists(env["JUPYTER_CONFIG_DIR"]) 225 | assert os.path.exists(env["JUPYTER_DATA_DIR"]) 226 | 227 | 228 | def test_app_migrate_skip_if_marker(env): 229 | shutil.copytree(dotipython, env["IPYTHONDIR"]) 230 | touch(pjoin(env["JUPYTER_CONFIG_DIR"], "migrated"), "done") 231 | app = JupyterApp() 232 | app.initialize([]) 233 | assert os.listdir(env["JUPYTER_CONFIG_DIR"]) == ["migrated"] 234 | assert not os.path.exists(env["JUPYTER_DATA_DIR"]) 235 | 236 | 237 | def test_app_migrate_skip_unwritable_marker(env): 238 | shutil.copytree(dotipython, env["IPYTHONDIR"]) 239 | migrated_marker = pjoin(env["JUPYTER_CONFIG_DIR"], "migrated") 240 | touch(migrated_marker, "done") 241 | os.chmod(migrated_marker, 0) # make it unworkable 242 | app = JupyterApp() 243 | app.initialize([]) 244 | assert os.listdir(env["JUPYTER_CONFIG_DIR"]) == ["migrated"] 245 | assert not os.path.exists(env["JUPYTER_DATA_DIR"]) 246 | -------------------------------------------------------------------------------- /tests/test_troubleshoot.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from jupyter_core.troubleshoot import main 4 | 5 | 6 | def test_troubleshoot(capsys): 7 | """Smoke test the troubleshoot function""" 8 | main() 9 | out = capsys.readouterr().out 10 | assert "pip list" in out 11 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | """Tests for utils""" 2 | 3 | # Copyright (c) Jupyter Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | from __future__ import annotations 6 | 7 | import asyncio 8 | import os 9 | import tempfile 10 | 11 | import pytest 12 | 13 | from jupyter_core.utils import ( 14 | deprecation, 15 | ensure_async, 16 | ensure_dir_exists, 17 | ensure_event_loop, 18 | run_sync, 19 | ) 20 | 21 | 22 | def test_ensure_dir_exists(): 23 | with tempfile.TemporaryDirectory() as td: 24 | ensure_dir_exists(td) 25 | ensure_dir_exists(os.path.join(str(td), "foo"), 0o777) 26 | 27 | 28 | def test_deprecation(): 29 | with pytest.deprecated_call(): 30 | deprecation("foo") 31 | 32 | 33 | async def afunc(): 34 | return "afunc" 35 | 36 | 37 | def func(): 38 | return "func" 39 | 40 | 41 | sync_afunc = run_sync(afunc) 42 | 43 | 44 | def test_run_sync(): 45 | async def foo(): 46 | return 1 47 | 48 | foo_sync = run_sync(foo) 49 | assert foo_sync() == 1 50 | assert foo_sync() == 1 51 | ensure_event_loop().close() 52 | 53 | asyncio.set_event_loop(None) 54 | assert foo_sync() == 1 55 | ensure_event_loop().close() 56 | 57 | asyncio.run(foo()) 58 | 59 | error_msg = "__foo__" 60 | 61 | async def error(): 62 | raise RuntimeError(error_msg) 63 | 64 | error_sync = run_sync(error) 65 | 66 | def test_error_sync(): 67 | with pytest.raises(RuntimeError, match=error_msg): 68 | error_sync() 69 | 70 | test_error_sync() 71 | 72 | async def with_running_loop(): 73 | test_error_sync() 74 | 75 | asyncio.run(with_running_loop()) 76 | 77 | 78 | def test_ensure_async(): 79 | async def main(): 80 | assert await ensure_async(afunc()) == "afunc" 81 | assert await ensure_async(func()) == "func" 82 | 83 | asyncio.run(main()) 84 | 85 | 86 | def test_ensure_event_loop(): 87 | loop = ensure_event_loop() 88 | 89 | async def inner(): 90 | return asyncio.get_running_loop() 91 | 92 | inner_sync = run_sync(inner) 93 | assert inner_sync() == loop 94 | --------------------------------------------------------------------------------