├── docs ├── README.md ├── ascript.ipy ├── classes_importnb.png ├── packages_importnb.png ├── test_in_notebook.ipynb ├── async-cells.ipynb ├── test_cli.py ├── test_importnb.py └── Untitled42.ipynb ├── src └── importnb │ ├── utils │ ├── __init__.py │ ├── pytest_importnb.py │ └── ipython.py │ ├── __main__.py │ ├── json.g │ ├── __init__.py │ ├── loaders.py │ ├── entry_points.py │ ├── decoder.py │ ├── docstrings.py │ ├── finder.py │ └── loader.py ├── .gitignore ├── postBuild ├── .readthedocs.yml ├── mkdocs.yml ├── .github └── workflows │ ├── release.yml │ └── test.yml ├── LICENSE ├── hatch_build.py ├── pyproject.toml ├── README.md └── LICENSE-MPL-2.0 /docs/README.md: -------------------------------------------------------------------------------- 1 | --8<-- "README.md" -------------------------------------------------------------------------------- /src/importnb/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/ascript.ipy: -------------------------------------------------------------------------------- 1 | msg = !echo "hello word" 2 | -------------------------------------------------------------------------------- /docs/classes_importnb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deathbeds/importnb/HEAD/docs/classes_importnb.png -------------------------------------------------------------------------------- /docs/packages_importnb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deathbeds/importnb/HEAD/docs/packages_importnb.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | .ipynb_checkpoints/ 3 | build/ 4 | dist/ 5 | src/importnb/_json_parser.py 6 | src/importnb/_version.py 7 | -------------------------------------------------------------------------------- /postBuild: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | python -m pip install -e . --no-deps --ignore-installed 3 | jupyter labextension install @jupyterlab/toc 4 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | python: 4 | install: 5 | - method: pip 6 | path: . 7 | extra_requirements: 8 | - docs 9 | 10 | mkdocs: 11 | configuration: mkdocs.yml 12 | fail_on_warning: false -------------------------------------------------------------------------------- /src/importnb/__main__.py: -------------------------------------------------------------------------------- 1 | from . import Notebook 2 | 3 | 4 | def main(argv=None): 5 | """A convenience function for running importnb as an application""" 6 | Notebook.load_argv(argv) 7 | 8 | 9 | if __name__ == "__main__": 10 | main() 11 | -------------------------------------------------------------------------------- /docs/test_in_notebook.ipynb: -------------------------------------------------------------------------------- 1 | {"cells":[{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":["def test_in_a_notebook():\n"," assert True, \"i am found as a test.\""]}],"metadata":{"kernelspec":{"display_name":"Python 3.9.10 ('base')","language":"python","name":"python3"},"language_info":{"name":"python","version":"3.9.10"},"orig_nbformat":4,"vscode":{"interpreter":{"hash":"fe3da9b82b3647283ed4a64afb514c08b1aa39774194ba3fa7469a9333d7260c"}}},"nbformat":4,"nbformat_minor":2} 2 | -------------------------------------------------------------------------------- /src/importnb/json.g: -------------------------------------------------------------------------------- 1 | // a lark grammar for parsing notebooks into source 2 | // this grammar extracts a subset of nbformat (cells, cell_type, source) 3 | // to generate a line for line reconstruction of the source. 4 | 5 | ?start: value 6 | ?value: object 7 | | array 8 | | string 9 | | SIGNED_NUMBER 10 | | "true" 11 | | "false" 12 | | "null" 13 | 14 | 15 | COLON: ":" 16 | array : "[" [value ("," value)*] "]" 17 | object : "{" [_items] "}" 18 | 19 | item: string COLON value 20 | 21 | _items: item ("," item)* 22 | 23 | string : ESCAPED_STRING 24 | 25 | %import common.ESCAPED_STRING 26 | %import common.SIGNED_NUMBER 27 | %import common.WS 28 | 29 | %ignore WS -------------------------------------------------------------------------------- /src/importnb/__init__.py: -------------------------------------------------------------------------------- 1 | """Import jupyter notebooks as python modules and scripts.""" 2 | __all__ = "Notebook", "reload", "imports", "__version__" 3 | 4 | 5 | def is_ipython(): 6 | from sys import modules 7 | 8 | return "IPython" in modules 9 | 10 | 11 | def get_ipython(force=True): 12 | if force or is_ipython(): 13 | try: 14 | from IPython import get_ipython 15 | except ModuleNotFoundError: 16 | return None 17 | shell = get_ipython() 18 | if shell is None: 19 | from IPython import InteractiveShell 20 | 21 | shell = InteractiveShell.instance() 22 | return shell 23 | return None 24 | 25 | 26 | import builtins 27 | 28 | from ._version import __version__ 29 | from .entry_points import imports 30 | from .loader import Notebook, reload 31 | 32 | builtins.true, builtins.false, builtins.null = True, False, None 33 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: importnb 2 | site_description: from notebooks to source code 3 | repo_name: deathbeds/importnb 4 | site_dir: site 5 | 6 | theme: 7 | name: material 8 | features: 9 | # - navigation.tabs 10 | - navigation.tracking 11 | - navigation.expand 12 | - toc.follow 13 | plugins: 14 | - search 15 | - mkdocs-jupyter 16 | 17 | nav: 18 | - importing notebooks: README.md 19 | - tests: 20 | - importnb test notebook: Untitled42.ipynb 21 | - unit tests: test_importnb.py 22 | - command line tests: test_cli.py 23 | 24 | markdown_extensions: 25 | - admonition 26 | - pymdownx.emoji 27 | - pymdownx.magiclink 28 | - footnotes 29 | - pymdownx.snippets: 30 | check_paths: true 31 | - pymdownx.superfences 32 | - toc: 33 | permalink: "¤" 34 | - attr_list 35 | - pymdownx.emoji: 36 | emoji_index: !!python/name:materialx.emoji.twemoji 37 | emoji_generator: !!python/name:materialx.emoji.to_svg 38 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | 2 | name: Python package 3 | on: 4 | push: 5 | tags: 6 | - '*' 7 | release: 8 | types: [published] 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v3 14 | - name: Set up Python 3.9 15 | uses: actions/setup-python@v4 16 | with: 17 | python-version: 3.9 18 | - name: Install dependencies 19 | run: | 20 | python -m pip install --upgrade pip 21 | pip install hatch 22 | - name: Test the project 23 | run: hatch run test:cov 24 | - name: Build 25 | run: hatch build 26 | - name: Publish package 27 | run: hatch publish -r test --user ${{secrets.HATCH_INDEX_USER}} --auth ${{secrets.HATCH_TEST_INDEX_AUTH}} 28 | if: github.event_name != 'release' 29 | - name: Test the released packaged 30 | run: hatch run released:test 31 | if: github.event_name != 'release' 32 | - name: Publish to mainline 33 | run: hatch publish --user ${{secrets.HATCH_INDEX_USER}} --auth ${{secrets.HATCH_INDEX_AUTH}} 34 | if: github.event_name != 'release' 35 | -------------------------------------------------------------------------------- /src/importnb/utils/pytest_importnb.py: -------------------------------------------------------------------------------- 1 | """A `pytest` plugin for importing notebooks as modules and using standard test discovered. 2 | 3 | The `AlternativeModule` is reusable. See `pidgin` for an example. 4 | """ 5 | 6 | from pathlib import Path 7 | 8 | import pytest 9 | 10 | from importnb import Notebook 11 | 12 | 13 | def get_file_patterns(cls, parent): 14 | for pat in parent.config.getini("python_files"): 15 | for e in cls.loader().extensions: 16 | yield "*" + pat.rstrip(".py") + e 17 | 18 | 19 | class AlternativeModule(pytest.Module): 20 | def _getobj(self): 21 | return self.loader.load_file(str(self.path), False) 22 | 23 | @classmethod 24 | def pytest_collect_file(cls, parent, path): 25 | if not parent.session.isinitpath(path): 26 | for pat in get_file_patterns(cls, parent): 27 | if path.fnmatch(pat): 28 | break 29 | else: 30 | return None 31 | 32 | if hasattr(cls, "from_parent"): 33 | return cls.from_parent(parent, path=Path(path)) 34 | return cls(path, parent) 35 | 36 | 37 | class NotebookModule(AlternativeModule): 38 | loader = Notebook 39 | 40 | 41 | pytest_collect_file = NotebookModule.pytest_collect_file 42 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2023, importnb contributors 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /hatch_build.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | from io import StringIO 3 | from pathlib import Path 4 | 5 | from hatchling.builders.hooks.plugin.interface import BuildHookInterface 6 | 7 | 8 | class LarkStandAloneBuildHook(BuildHookInterface): 9 | PLUGIN_NAME = "lark_standalone" 10 | 11 | def initialize(self, version, build_data): 12 | L = get_logger() 13 | L.info("converting json grammar to python") 14 | python_parser = Path(self.root, "src/importnb/_json_parser.py") 15 | if not python_parser.exists(): 16 | py = get_standalone() 17 | python_parser.write_text(py) 18 | # its really important to remember the preceeding / 19 | build_data["artifacts"].extend( 20 | [ 21 | "/src/importnb/_json_parser.py", 22 | "/src/importnb/json.g", 23 | ] 24 | ) 25 | 26 | 27 | def get_logger(): 28 | import logging 29 | 30 | logger = logging.getLogger(__name__) 31 | logger.setLevel(logging.INFO) 32 | logging.basicConfig(level=logging.INFO) 33 | return logger 34 | 35 | 36 | def get_lark(): 37 | from lark.tools.standalone import build_lalr, lalr_argparser 38 | 39 | return build_lalr(lalr_argparser.parse_args(["--propagate_positions", "src/importnb/json.g"]))[ 40 | 0 41 | ] 42 | 43 | 44 | def write(buffer, *lines): 45 | buffer.writelines(map(str, lines or ["\n"])) 46 | 47 | 48 | def get_standalone(): 49 | from lark.tools.standalone import gen_standalone 50 | 51 | lark = get_lark() 52 | python = StringIO() 53 | gen_standalone(lark, partial(print, file=python)) 54 | return python.getvalue() 55 | -------------------------------------------------------------------------------- /src/importnb/loaders.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | from types import ModuleType 3 | 4 | from .loader import Loader, SourceModule 5 | 6 | 7 | class DataModule(SourceModule): 8 | def _repr_json_(self): 9 | return self.data, dict(root=repr(self), expanded=False) 10 | 11 | 12 | @dataclass 13 | class DataStreamLoader(Loader): 14 | """an import loader for data streams""" 15 | 16 | module_type: ModuleType = field(default=DataModule) 17 | 18 | def exec_module(self, module): 19 | with open(module.__file__, "rb") as file: 20 | module.data = self.get_data_loader()(file) 21 | return module 22 | 23 | def get_data_loader(self): 24 | raise NotImplementedError("load_data not implemented.") 25 | 26 | 27 | @dataclass 28 | class Json(DataStreamLoader): 29 | """an import loader for json files""" 30 | 31 | extensions: tuple = field(default_factory=[".json"].copy) 32 | 33 | def get_data_loader(self): 34 | from json import load 35 | 36 | return load 37 | 38 | 39 | @dataclass 40 | class Yaml(DataStreamLoader): 41 | """an import loader for yml and yaml""" 42 | 43 | extensions: tuple = field(default_factory=[".yml", ".yaml"].copy) 44 | 45 | def get_data_loader(self): 46 | try: 47 | from ruamel.yaml import YAML 48 | 49 | yaml = YAML(typ="safe", pure=True) 50 | safe_load = yaml.load 51 | except ModuleNotFoundError: 52 | from yaml import safe_load 53 | # probably want an error message about how to fix this if we cant find yamls 54 | return safe_load 55 | 56 | 57 | @dataclass 58 | class Toml(DataStreamLoader): 59 | """an import loader for toml""" 60 | 61 | extensions: tuple = field(default_factory=[".toml"].copy) 62 | 63 | def get_data_loader(self): 64 | try: 65 | from tomllib import load 66 | except ModuleNotFoundError: 67 | from tomli import load 68 | return load 69 | -------------------------------------------------------------------------------- /src/importnb/entry_points.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from contextlib import ExitStack, contextmanager 3 | 4 | # See compatibility note on `group` 5 | # https://docs.python.org/3/library/importlib.metadata.html#entry-points 6 | if sys.version_info < (3, 10): 7 | from importlib_metadata import entry_points 8 | else: 9 | from importlib.metadata import entry_points 10 | 11 | 12 | __all__ = ("imports",) 13 | ENTRY_POINTS = dict() 14 | 15 | 16 | def get_importnb_entry_points(): 17 | """Discover the known importnb entry points""" 18 | global ENTRY_POINTS 19 | for ep in entry_points(group="importnb"): 20 | ENTRY_POINTS[ep.name] = ep.value 21 | return ENTRY_POINTS 22 | 23 | 24 | def loader_from_alias(alias): 25 | """Load an attribute from a module using the entry points value specificaiton""" 26 | from importlib import import_module 27 | from operator import attrgetter 28 | 29 | module, _, member = alias.rpartition(":") 30 | module = import_module(module) 31 | return attrgetter(member)(module) 32 | 33 | 34 | def loader_from_ep(alias): 35 | """Discover a loader for an importnb alias or vaue""" 36 | if ":" in alias: 37 | return loader_from_alias(alias) 38 | 39 | if not ENTRY_POINTS: 40 | get_importnb_entry_points() 41 | 42 | if alias in ENTRY_POINTS: 43 | return loader_from_alias(ENTRY_POINTS[alias]) 44 | 45 | raise ValueError(f"{alias} is not a valid loader alias.") 46 | 47 | 48 | @contextmanager 49 | def imports(*names): 50 | """A shortcut to importnb loaders through entrypoints""" 51 | types = set() 52 | with ExitStack() as stack: 53 | for name in names: 54 | t = loader_from_ep(name) 55 | if t not in types: 56 | stack.enter_context(t()) 57 | types.add(t) 58 | yield stack 59 | 60 | 61 | def list_aliases(): 62 | """List the entry points associated with importnb""" 63 | if not ENTRY_POINTS: 64 | get_importnb_entry_points() 65 | return list(ENTRY_POINTS) 66 | -------------------------------------------------------------------------------- /docs/async-cells.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# top-level await\n", 8 | "\n", 9 | "this feature is tested separately because it requires a non-standard invocationin `exec_module`.\n", 10 | "\n", 11 | "## why\n", 12 | "\n", 13 | "[top-level auto-await](https://ipython.readthedocs.io/en/stable/interactive/autoawait.html) behavior is a feature of `IPython`, and [nodejs](https://maximorlov.com/tips/top-level-await-in-nodejs/). `importnb` supports top-level awaits by implicitly running coroutines in the current event loop.s" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": 2, 19 | "metadata": {}, 20 | "outputs": [], 21 | "source": [ 22 | "from contextlib import asynccontextmanager\n", 23 | "\n", 24 | "\n", 25 | "async def async_function():\n", 26 | " return 10\n", 27 | "\n", 28 | "\n", 29 | "@asynccontextmanager\n", 30 | "async def async_context_context_manager():\n", 31 | " yield async_function\n", 32 | "\n", 33 | "\n", 34 | "awaited_data = await async_function()" 35 | ] 36 | }, 37 | { 38 | "cell_type": "markdown", 39 | "metadata": {}, 40 | "source": [ 41 | "[⚠](https://gist.github.com/Rich-Harris/0b6f317657f5167663b493c722647221 \"top level async is a foot gun\")" 42 | ] 43 | } 44 | ], 45 | "metadata": { 46 | "kernelspec": { 47 | "display_name": "Python [conda env:root] *", 48 | "language": "python", 49 | "name": "conda-root-py" 50 | }, 51 | "language_info": { 52 | "codemirror_mode": { 53 | "name": "ipython", 54 | "version": 3 55 | }, 56 | "file_extension": ".py", 57 | "mimetype": "text/x-python", 58 | "name": "python", 59 | "nbconvert_exporter": "python", 60 | "pygments_lexer": "ipython3", 61 | "version": "3.9.13" 62 | }, 63 | "vscode": { 64 | "interpreter": { 65 | "hash": "fe3da9b82b3647283ed4a64afb514c08b1aa39774194ba3fa7469a9333d7260c" 66 | } 67 | } 68 | }, 69 | "nbformat": 4, 70 | "nbformat_minor": 4 71 | } 72 | -------------------------------------------------------------------------------- /src/importnb/utils/ipython.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from pathlib import Path 4 | 5 | from IPython import paths 6 | from IPython.core import profiledir 7 | 8 | 9 | def get_config(profile="default"): 10 | profile_dir = profiledir.ProfileDir() 11 | try: 12 | profile = profile_dir.find_profile_dir_by_name(paths.get_ipython_dir(), profile) 13 | except profiledir.ProfileDirError: 14 | os.makedirs(paths.get_ipython_dir(), exist_ok=True) 15 | profile = profile_dir.create_profile_dir_by_name(paths.get_ipython_dir(), profile) 16 | return Path(profile.location, "ipython_config.json") 17 | 18 | 19 | def load_config(): 20 | location = get_config() 21 | try: 22 | with location.open() as file: 23 | config = json.load(file) 24 | except (FileNotFoundError, getattr(json, "JSONDecodeError", ValueError)): 25 | config = {} 26 | 27 | if "InteractiveShellApp" not in config: 28 | config["InteractiveShellApp"] = {} 29 | 30 | if "extensions" not in config["InteractiveShellApp"]: 31 | config["InteractiveShellApp"]["extensions"] = [] 32 | 33 | return config, location 34 | 35 | 36 | def install(project="importnb"): 37 | """Install the importnb extension""" 38 | config, location = load_config() 39 | projects = [project] 40 | if not installed(project): 41 | config["InteractiveShellApp"]["extensions"].extend(projects) 42 | 43 | with location.open("w") as file: 44 | json.dump(config, file) 45 | 46 | print(f"""✅ {projects}""") 47 | 48 | 49 | def installed(project): 50 | config, location = load_config() 51 | return project in config.get("InteractiveShellApp", {}).get("extensions", []) 52 | 53 | 54 | def uninstall(project="importnb"): 55 | """Uninstall the importnb extension""" 56 | config, location = load_config() 57 | projects = [project] 58 | config["InteractiveShellApp"]["extensions"] = [ 59 | ext for ext in config["InteractiveShellApp"]["extensions"] if ext not in projects 60 | ] 61 | 62 | with location.open("w") as file: 63 | json.dump(config, file) 64 | print(f"""❌ {projects}.""") 65 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: test 2 | 3 | on: 4 | pull_request: 5 | branches: ["*"] 6 | push: 7 | branches: [main] 8 | workflow_dispatch: 9 | 10 | concurrency: 11 | group: ${{ github.workflow }}-${{ github.ref }} 12 | cancel-in-progress: true 13 | 14 | env: 15 | CACHE_EPOCH: 0 16 | 17 | jobs: 18 | format: 19 | name: format 20 | runs-on: ubuntu-latest 21 | steps: 22 | - uses: actions/checkout@v3 23 | with: 24 | fetch-depth: 0 25 | - uses: actions/setup-python@v4 26 | with: 27 | python-version: "3.12" 28 | cache: pip 29 | cache-dependency-path: pyproject.toml 30 | - name: install dev dependencies 31 | run: python -m pip install --upgrade pip hatch 32 | - name: run formatters 33 | run: | 34 | echo "~~~bash" > "${GITHUB_STEP_SUMMARY}" 35 | hatch run format:code 2>&1 | tee --append "${GITHUB_STEP_SUMMARY}" 36 | echo "~~~" >> "${GITHUB_STEP_SUMMARY}" 37 | - name: print diff 38 | run: | 39 | echo "~~~diff" >> "${GITHUB_STEP_SUMMARY}" 40 | git diff | tee --append "${GITHUB_STEP_SUMMARY}" 41 | echo "~~~" >> "${GITHUB_STEP_SUMMARY}" 42 | 43 | test: 44 | name: ${{ matrix.os }} ${{ matrix.python-version }} 45 | runs-on: ${{ matrix.os }} 46 | strategy: 47 | fail-fast: false 48 | matrix: 49 | os: [ubuntu-latest, macos-latest, windows-latest] 50 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "pypy3.9"] 51 | include: 52 | - os: ubuntu-latest 53 | hatch-envs: /home/runner/.local/share/hatch/env 54 | - os: macos-latest 55 | hatch-envs: /Users/runner/Library/Application Support/hatch/env 56 | - os: windows-latest 57 | hatch-envs: C:\Users\runneradmin\AppData\Local\hatch\env 58 | 59 | steps: 60 | - uses: actions/checkout@v4 61 | with: 62 | fetch-depth: 0 63 | - uses: actions/setup-python@v4 64 | with: 65 | python-version: ${{ matrix.python-version }} 66 | cache: pip 67 | cache-dependency-path: pyproject.toml 68 | - name: cache hatch envs 69 | uses: actions/cache@v3 70 | with: 71 | path: | 72 | ${{ matrix.hatch-envs }} 73 | key: | 74 | ${{ env.CACHE_EPOCH }}-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('pyproject.toml') }} 75 | - name: install dev dependencies 76 | run: python -m pip install --upgrade pip hatch 77 | - name: run tests (coverage) 78 | if: ${{ !contains(matrix.python-version, 'pypy') }} 79 | run: hatch run test:cov 80 | - name: run tests (no coverage) 81 | if: ${{ contains(matrix.python-version, 'pypy') }} 82 | run: hatch run test:run 83 | -------------------------------------------------------------------------------- /docs/test_cli.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from subprocess import check_call 3 | from sys import executable, path, version_info 4 | 5 | from pytest import importorskip 6 | 7 | from importnb import Notebook 8 | from importnb import __version__ as importnb_version 9 | 10 | GTE10 = version_info.major == 3 and version_info.minor >= 10 11 | 12 | HERE = Path(__file__).parent 13 | 14 | path.insert(0, str(HERE)) 15 | 16 | UNTITLED = HERE / "Untitled42.ipynb" 17 | 18 | ref = Notebook.load_file(UNTITLED) 19 | REF = Path(ref.__file__) 20 | 21 | 22 | def get_prepared_string(x): 23 | if GTE10: 24 | x = x.replace("optional arguments:", "options:") 25 | return x.replace("\r", "") 26 | 27 | 28 | def cli_test(command): 29 | def delay(f): 30 | def wrapper(tmp_path: Path): 31 | from shlex import split 32 | 33 | path = tmp_path / "tmp" 34 | with path.open("w") as file: 35 | check_call( 36 | [executable] + split(command), stderr=file, stdout=file, cwd=str(tmp_path) 37 | ) 38 | out = path.read_text() 39 | match = get_prepared_string( 40 | f.__doc__.format( 41 | UNTITLED=UNTITLED.as_posix(), SLUG=ref.magic_slug, VERSION=importnb_version 42 | ) 43 | ) 44 | 45 | if "UserWarning: Attempting to work in a virtualenv." in out: 46 | out = "".join(out.splitlines(True)[2:]) 47 | assert out == match 48 | 49 | return wrapper 50 | 51 | return delay 52 | 53 | 54 | @cli_test("-m importnb") 55 | def test_usage(): 56 | """\ 57 | usage: importnb [-h] [-m MODULE] [-c CODE] [-d DIR] [-t] [--version] 58 | [file] ... 59 | 60 | run notebooks as python code 61 | 62 | positional arguments: 63 | file run a file 64 | args arguments to pass to script 65 | 66 | optional arguments: 67 | -h, --help show this help message and exit 68 | -m MODULE, --module MODULE 69 | run a module 70 | -c CODE, --code CODE run raw code 71 | -d DIR, --dir DIR path to run script in 72 | -t, --tasks run doit tasks 73 | --version display the importnb version 74 | """ 75 | 76 | 77 | @cli_test(rf"-m importnb -d {UNTITLED.parent.as_posix()} {UNTITLED.as_posix()}") 78 | def test_file(): 79 | """\ 80 | i was printed from {UNTITLED} and my name is __main__ 81 | {SLUG} 82 | the parser namespace is Namespace(args=None) 83 | """ 84 | 85 | 86 | @cli_test(rf"-m importnb -d {UNTITLED.parent.as_posix()} -m {UNTITLED.stem}") 87 | def test_module(): 88 | """\ 89 | i was printed from {UNTITLED} and my name is __main__ 90 | {SLUG} 91 | the parser namespace is Namespace(args=None) 92 | """ 93 | 94 | 95 | @cli_test("-m importnb -c '{}'") 96 | def test_empty_code(): 97 | """""" 98 | 99 | 100 | @cli_test("-m importnb --version") 101 | def test_version(): 102 | """\ 103 | {VERSION} 104 | """ 105 | 106 | 107 | @cli_test(rf"-m importnb -d {UNTITLED.parent.as_posix()} -t {UNTITLED.as_posix()} list") 108 | def test_doit(): 109 | """\ 110 | i was printed from {UNTITLED} and my name is __main__ 111 | {SLUG} 112 | echo this the docstring for the `echo` task that echos hello. 113 | """ 114 | importorskip("doit") 115 | -------------------------------------------------------------------------------- /src/importnb/decoder.py: -------------------------------------------------------------------------------- 1 | import json 2 | import linecache 3 | import textwrap 4 | from functools import partial 5 | 6 | 7 | def quote(object, *, quotes="'''"): 8 | if quotes in object: 9 | quotes = '"""' 10 | return quotes + object + "\n" + quotes 11 | 12 | 13 | from ._json_parser import Lark_StandAlone, Transformer, Tree 14 | 15 | 16 | class Transformer(Transformer): 17 | def __init__( 18 | self, 19 | markdown=quote, 20 | code=textwrap.dedent, 21 | raw=partial(textwrap.indent, prefix="# "), 22 | **kwargs, 23 | ): 24 | super().__init__(**kwargs) 25 | 26 | for key in ("markdown", "code", "raw"): 27 | setattr(self, "transform_" + key, locals().get(key)) 28 | 29 | def string(self, s): 30 | return s[0].line, json.loads(s[0]) 31 | 32 | def item(self, s): 33 | key = s[0][-1] 34 | if key == "cells": 35 | if not isinstance(s[-1], Tree): 36 | return self.render(list(map(dict, s[-1]))) 37 | elif key in {"source", "text"}: 38 | return key, s[-1] 39 | elif key == "cell_type": 40 | if isinstance(s[-1], tuple): 41 | return key, s[-1][-1] 42 | 43 | def array(self, s): 44 | if s: 45 | return s 46 | return [] 47 | 48 | def object(self, s): 49 | return [x for x in s if x is not None] 50 | 51 | def render_one(self, kind, lines): 52 | s = "".join(lines) 53 | if not s.endswith(("\n",)): 54 | s += "\n" 55 | return getattr(self, f"transform_{kind}")(s) 56 | 57 | def render(self, x): 58 | body = [] 59 | for token in x: 60 | t = token.get("cell_type") 61 | try: 62 | s = token["source"] 63 | except KeyError: 64 | s = token.get("text") 65 | if s: 66 | if not isinstance(s, list): 67 | s = [s] 68 | l, lines = s[0][0], [x[1] for x in s] 69 | body.extend([""] * (l - len(body))) 70 | lines = self.render_one(t, lines) 71 | body.extend(lines.splitlines()) 72 | return "\n".join(body + [""]) 73 | 74 | 75 | class LineCacheNotebookDecoder(Transformer): 76 | def __init__( 77 | self, 78 | markdown=quote, 79 | code=textwrap.dedent, 80 | raw=partial(textwrap.indent, prefix="# "), 81 | **kwargs, 82 | ): 83 | super().__init__(**kwargs) 84 | 85 | for key in ("markdown", "code", "raw"): 86 | setattr(self, "transform_" + key, locals().get(key)) 87 | 88 | def source_from_json_grammar(self, object): 89 | return Lark_StandAlone(transformer=self).parse(object) 90 | 91 | def decode(self, object, filename): 92 | s = self.source_from_json_grammar(object) 93 | if s: 94 | source = s[0] 95 | linecache.updatecache(filename) 96 | if filename in linecache.cache: 97 | linecache.cache[filename] = ( 98 | linecache.cache[filename][0], 99 | linecache.cache[filename][1], 100 | source.splitlines(True), 101 | filename, 102 | ) 103 | return source 104 | return "" 105 | -------------------------------------------------------------------------------- /src/importnb/docstrings.py: -------------------------------------------------------------------------------- 1 | """# Special handling of markdown cells as docstrings. 2 | 3 | Modify the Python `ast` to assign docstrings to functions when they are preceded by a Markdown cell. 4 | """ 5 | 6 | import ast 7 | 8 | """# Modifying the `ast` 9 | 10 | >>> assert isinstance(create_test, ast.Assign) 11 | >>> assert isinstance(test_update, ast.Attribute) 12 | """ 13 | 14 | create_test = ast.parse("""__test__ = globals().get('__test__', {})""", mode="single").body[0] 15 | test_update = ast.parse("""__test__.update""", mode="single").body[0].value 16 | str_nodes = (ast.Constant,) 17 | 18 | """`TestStrings` is an `ast.NodeTransformer` that captures `str_nodes` in the `TestStrings.strings` object. 19 | 20 | ```ipython 21 | >>> assert isinstance(ast.parse(TestStrings().visit(ast.parse('"Test me"'))), ast.Module) 22 | 23 | ``` 24 | """ 25 | 26 | 27 | class TestStrings(ast.NodeTransformer): 28 | strings = None 29 | 30 | def visit_Module(self, module): 31 | """`TestStrings.visit_Module` initializes the capture. After all the nodes are visit we append `create_test and test_update` 32 | to populate the `"__test__"` attribute. 33 | """ 34 | self.strings = [] 35 | module = self.visit_body(module) 36 | module.body += ( 37 | [create_test] 38 | + [ 39 | ast.copy_location( 40 | ast.Expr( 41 | ast.Call( 42 | func=test_update, 43 | args=[ 44 | ast.Dict( 45 | keys=[ast.Constant(f"string-{node.lineno}")], 46 | values=[node], 47 | ), 48 | ], 49 | keywords=[], 50 | ), 51 | ), 52 | node, 53 | ) 54 | for node in self.strings 55 | ] 56 | if self.strings 57 | else [] 58 | ) 59 | return module 60 | 61 | def visit_body(self, node): 62 | """`TestStrings.visit_body` visits nodes with a `"body"` attibute and extracts potential string tests.""" 63 | body = [] 64 | if ( 65 | node.body 66 | and isinstance(node.body[0], ast.Expr) 67 | and isinstance(node.body[0].value, str_nodes) 68 | ): 69 | body.append(node.body.pop(0)) 70 | node.body = body + [ 71 | (self.visit_body if hasattr(object, "body") else self.visit)(object) 72 | for object in node.body 73 | ] 74 | return node 75 | 76 | def visit_Expr(self, node): 77 | """`TestStrings.visit_Expr` append the `str_nodes` to `TestStrings.strings` to append to the `ast.Module`.""" 78 | if isinstance(node.value, str_nodes): 79 | self.strings.append( 80 | ast.copy_location(ast.Constant(node.value.value.replace("\n```", "\n")), node), 81 | ) 82 | return node 83 | 84 | 85 | def update_docstring(module): 86 | from functools import reduce 87 | 88 | module.body = reduce(markdown_docstring, module.body, []) 89 | return TestStrings().visit(module) 90 | 91 | 92 | docstring_ast_types = ast.ClassDef, ast.FunctionDef 93 | try: 94 | docstring_ast_types += (ast.AsyncFunctionDef,) 95 | except: 96 | ... 97 | 98 | 99 | def markdown_docstring(nodes, node): 100 | if ( 101 | len(nodes) > 1 102 | and str_expr(nodes[-1]) 103 | and isinstance(node, docstring_ast_types) 104 | and not str_expr(node.body[0]) 105 | ): 106 | node.body.insert(0, nodes.pop()) 107 | return nodes.append(node) or nodes 108 | 109 | 110 | def str_expr(node): 111 | return isinstance(node, ast.Expr) and isinstance(node.value, ast.Constant) 112 | -------------------------------------------------------------------------------- /src/importnb/finder.py: -------------------------------------------------------------------------------- 1 | """# `sys.path_hook` modifiers 2 | 3 | Many suggestions for importing notebooks use `sys.meta_paths`, but `importnb` relies on the `sys.path_hooks` to load any notebook in the path. `PathHooksContext` is a base class for the `importnb.Notebook` `SourceFileLoader`. 4 | """ 5 | 6 | import inspect 7 | import sys 8 | from importlib._bootstrap_external import FileFinder 9 | from importlib.machinery import ModuleSpec 10 | from pathlib import Path 11 | 12 | 13 | class FileModuleSpec(ModuleSpec): 14 | def __init__(self, *args, **kwargs): 15 | super().__init__(*args, **kwargs) 16 | self._set_fileattr = True 17 | 18 | 19 | class FuzzySpec(FileModuleSpec): 20 | def __init__( 21 | self, 22 | name, 23 | loader, 24 | *, 25 | alias=None, 26 | origin=None, 27 | loader_state=None, 28 | is_package=None, 29 | ): 30 | super().__init__( 31 | name, 32 | loader, 33 | origin=origin, 34 | loader_state=loader_state, 35 | is_package=is_package, 36 | ) 37 | self.alias = alias 38 | 39 | 40 | def fuzzy_query(str): 41 | new = "" 42 | for chr in str: 43 | new += (not new.endswith("__") or chr != "_") and chr or "" 44 | return new.replace("__", "*").replace("_", "?") 45 | 46 | 47 | def fuzzy_file_search(path, fullname): 48 | results = [] 49 | id, details = get_loader_details() 50 | for ext in sum((list(object[1]) for object in details), []): 51 | results.extend(Path(path).glob(fullname + ext)) 52 | "_" in fullname and results.extend(Path(path).glob(fuzzy_query(fullname) + ext)) 53 | return results 54 | 55 | 56 | class FuzzyFinder(FileFinder): 57 | """Adds the ability to open file names with special characters using underscores.""" 58 | 59 | def find_spec(self, fullname, target=None): 60 | """Try to finder the spec and if it cannot be found, use the underscore starring syntax 61 | to identify potential matches. 62 | """ 63 | spec = super().find_spec(fullname, target=target) 64 | raw = fullname 65 | if spec is None: 66 | original = fullname 67 | 68 | if "." in fullname: 69 | original, fullname = fullname.rsplit(".", 1) 70 | else: 71 | original, fullname = "", original 72 | 73 | if "_" in fullname: 74 | # find any files using the fuzzy convention 75 | files = fuzzy_file_search(self.path, fullname) 76 | if files: 77 | # sort and create of a path of the chosen file 78 | file = sorted(files, key=lambda x: x.stat().st_mtime, reverse=True)[0] 79 | name = file.stem 80 | if original: 81 | name = ".".join((original, name)) 82 | name = (original + "." + file.stem).lstrip(".") 83 | spec = super().find_spec(name, target=target) 84 | spec = spec and FuzzySpec( 85 | spec.name, 86 | spec.loader, 87 | origin=spec.origin, 88 | loader_state=spec.loader_state, 89 | alias=raw, 90 | is_package=bool(spec.submodule_search_locations), 91 | ) 92 | return spec 93 | 94 | 95 | def get_loader_details(): 96 | for id, path_hook in enumerate(sys.path_hooks): 97 | try: 98 | return ( 99 | id, 100 | list(inspect.getclosurevars(path_hook).nonlocals["loader_details"]), 101 | ) 102 | except: 103 | continue 104 | 105 | 106 | def get_loader_index(ext): 107 | path_id, details = get_loader_details() 108 | for i, (loader, exts) in enumerate(details): 109 | if ext in exts: 110 | return path_id, i, details 111 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | build-backend = "hatchling.build" 3 | requires = [ 4 | "hatch-vcs", 5 | "hatchling", 6 | ] 7 | 8 | [project] 9 | name = "importnb" 10 | description = "import jupyter notebooks as python modules and scripts." 11 | readme = "README.md" 12 | keywords = [ 13 | ] 14 | license = "BSD-3-Clause AND MPL-2.0" 15 | license-files = { paths = ["LICENSE", "LICENSE-MPL-2.0"] } 16 | authors = [{ name = "tonyfast", email = "tony.fast@gmail.com" }] 17 | requires-python = ">=3.8" 18 | classifiers = [ 19 | "Development Status :: 4 - Beta", 20 | "Framework :: IPython", 21 | "Framework :: Jupyter", 22 | "License :: OSI Approved :: BSD License", 23 | "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)", 24 | "Natural Language :: English", 25 | "Programming Language :: Python", 26 | "Programming Language :: Python :: 3 :: Only", 27 | "Programming Language :: Python :: 3.8", 28 | "Programming Language :: Python :: 3.9", 29 | "Programming Language :: Python :: 3.10", 30 | "Programming Language :: Python :: 3.11", 31 | "Programming Language :: Python :: 3.12", 32 | "Programming Language :: Python :: Implementation :: CPython", 33 | "Programming Language :: Python :: Implementation :: PyPy", 34 | ] 35 | dynamic = [ 36 | "version", 37 | ] # uses hatch-vcs 38 | dependencies = [ 39 | 'importlib-metadata>=4.8.3; python_version < "3.10"', 40 | ] 41 | [project.optional-dependencies] 42 | docs = [ 43 | "mkdocs-jupyter", 44 | "mkdocs-material", 45 | "ruamel.yaml", 46 | ] 47 | interactive = [ 48 | "ipython", 49 | ] 50 | tasks = [ 51 | "doit", 52 | "tomli", 53 | ] 54 | [project.urls] 55 | Documentation = "https://github.com/deathbeds/importnb#readme" 56 | Issues = "https://github.com/deathbeds/importnb/issues" 57 | Source = "https://github.com/deathbeds/importnb" 58 | [project.scripts] 59 | importnb = "importnb.__main__:main" 60 | [project.entry-points.importnb] 61 | ipy = "importnb.loader:Notebook" 62 | ipynb = "importnb.loader:Notebook" 63 | json = "importnb.loaders:Json" 64 | py = "importnb.loader:Loader" 65 | toml = "importnb.loaders:Toml" 66 | yaml = "importnb.loaders:Yaml" 67 | yml = "importnb.loaders:Yaml" 68 | [project.entry-points.pytest11] 69 | importnb = "importnb.utils.pytest_importnb" 70 | 71 | [tool.hatch.build.hooks.custom] 72 | # we build a json grammar with each release because we fiend for line numbers 73 | dependencies = ["lark"] 74 | 75 | [tool.hatch.version] 76 | # versioning 77 | source = "vcs" 78 | 79 | [tool.hatch.build.hooks.vcs] 80 | version-file = "src/importnb/_version.py" 81 | 82 | [tool.hatch.build.targets.sdist] 83 | exclude = ["docs/coverage", "docs/*.png"] 84 | 85 | [tool.hatch.envs.test] 86 | # test matrix 87 | dependencies = [ 88 | "pytest", 89 | "pytest-cov", 90 | "doit", 91 | "tomli", 92 | "ruamel.yaml", 93 | "tomli_w", 94 | ] 95 | 96 | [[tool.hatch.envs.test.matrix]] 97 | version = ["stdlib", "interactive"] 98 | 99 | [tool.hatch.envs.test.overrides] 100 | matrix.version.features = [{ value = "interactive", if = ["interactive"] }] 101 | matrix.version.dev-mode = [{ value = false, env = ["CI=true"] }] 102 | 103 | [tool.hatch.envs.test.scripts] 104 | cov = """ 105 | pytest --cov=importnb --cov-branch --cov-context=test --cov-report=html --cov-report=term-missing:skip-covered --no-cov-on-fail 106 | """ 107 | run = "pytest" 108 | 109 | [tool.hatch.envs.released] 110 | # test a release on test-pypi 111 | dependencies = [ 112 | "importnb", 113 | "IPython", 114 | "pytest", 115 | "pytest-cov", 116 | "doit", 117 | "tomli_w", 118 | "ruamel.yaml", 119 | ] 120 | 121 | [tool.hatch.envs.released.scripts] 122 | test = "pytest" 123 | 124 | [tool.hatch.envs.released.env-vars] 125 | PIP_INDEX_URL = "https://test.pypi.org/simple/" 126 | PIP_EXTRA_INDEX_URL = "https://pypi.org/simple/" 127 | PIP_PRE = "1" 128 | 129 | [[tool.hatch.envs.released.matrix]] 130 | version = ["interactive", "stdlib"] 131 | 132 | [tool.hatch.envs.docs] 133 | # documentation 134 | features = ["docs"] 135 | 136 | [tool.hatch.envs.docs.scripts] 137 | build = "mkdocs build" 138 | serve = "mkdocs serve" 139 | 140 | [tool.hatch.envs.format] 141 | skip-install = true 142 | dependencies = [ 143 | "ruff", 144 | "pyproject-fmt" 145 | ] 146 | 147 | [tool.hatch.envs.format.scripts] 148 | code = """ 149 | pyproject-fmt pyproject.toml 150 | ruff --fix-only . 151 | ruff format . 152 | """ 153 | 154 | [tool.ruff] 155 | # formatting cause linting sucks 156 | cache-dir = "build/.cache/ruff" 157 | line-length = 100 158 | extend-include = ["*.ipynb"] 159 | 160 | [tool.ruff.lint] 161 | isort.known-first-party = ["importnb"] 162 | ignore = [ 163 | "D203", 164 | "D213", 165 | "COM812", 166 | "ISC001", 167 | ] 168 | select = [ 169 | "A", 170 | "ANN", 171 | "ARG", 172 | "B", 173 | "BLE", 174 | "C4", 175 | "C90", 176 | "COM", 177 | "D", 178 | "DJ", 179 | "DTZ", 180 | "E", 181 | "EM", 182 | "ERA", 183 | "EXE", 184 | "F", 185 | "FBT", 186 | "G", 187 | "I", 188 | "ICN", 189 | "INP", 190 | "ISC", 191 | "N", 192 | "NPY", 193 | "PD", 194 | "PGH", 195 | "PIE", 196 | "PL", 197 | "PT", 198 | "PTH", 199 | "PYI", 200 | "Q", 201 | "RET", 202 | "RSE", 203 | "RUF", 204 | "S", 205 | "SIM", 206 | "SLF", 207 | "T10", 208 | "T20", 209 | "TCH", 210 | "TID", 211 | "TRY", 212 | "UP", 213 | "W", 214 | "YTT", 215 | ] 216 | [tool.ruff.lint.per-file-ignores] 217 | "test_cli.py" = ["D415", "D207", "D208"] 218 | 219 | [tool.pytest.ini_options] 220 | cache_dir = "build/.cache/pytest" 221 | addopts = [ 222 | "-vv", 223 | "--tb=long", 224 | "--color=yes", 225 | "-ppytester", 226 | ] 227 | filterwarnings = [ 228 | "error", 229 | ] 230 | 231 | [tool.coverage.run] 232 | data_file = "build/.coverage" 233 | [tool.coverage.report] 234 | omit = ["_json_parser.py"] 235 | [tool.coverage.html] 236 | directory = "docs/coverage" 237 | show_contexts = true 238 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # `importnb` imports notebooks as python modules. 2 | 3 | if you're here, then there is a chance you have a notebook (`.ipynb`) in a directory saved as `Untitled.ipynb`. it is just sitting there, but what if it could be used as a python module? `importnb` is here to answer that question. 4 | 5 | 6 | ## basic example 7 | use `importnb`'s `Notebook` finder and loader to import notebooks as modules 8 | 9 | # with the new api 10 | from importnb import imports 11 | with imports("ipynb"): 12 | import Untitled 13 | 14 | # with the explicit api 15 | from importnb import imports 16 | with Notebook(): 17 | import Untitled 18 | 19 | 20 | 21 | 22 | ### What does this snippet do? 23 | 24 | > the snippet begins `with` a context manager that modifies the files python can discover. 25 | it will find the `Untitled.ipynb` notebook and import it as a module with `__name__` `Untitled`. 26 | the `__file__` description will have `.ipynb` as an extension. 27 | 28 | maybe when we give notebooks new life they eventually earn a better name than `Untitled`? 29 | 30 | ## run a notebook as a script 31 | 32 | the `importnb` command line interface mimics python's. it permits running notebooks files, modules, and raw json data. 33 | 34 | the commands below execute a notebook module and file respectively. 35 | 36 | importnb -m Untitled # call the Untitled module as __main__ 37 | importnb Untitled.ipynb # call the Untitled file as __main__ 38 | 39 | ## installing `importnb` 40 | 41 | use either `pip` or `conda/mamba` 42 | 43 | pip install importnb 44 | conda install -cconda-forge importnb 45 | mamba install -cconda-forge importnb 46 | 47 | 48 | 49 | ## `importnb` features 50 | 51 | * `importnb.Notebook` offers parameters to customize how modules are imported 52 | * imports Jupyter notebooks as python modules 53 | * fuzzy finding conventions for finding files that are not valid python names 54 | * works with top-level await statements 55 | * integration with `pytest` 56 | * extensible machinery and entry points 57 | * translates Jupyter notebook files (ie `.ipynb` files) line-for-line to python source providing natural error messages 58 | * command line interface for running notebooks as python scripts 59 | * has no required dependencies 60 | 61 | ### customizing parameters 62 | 63 | the `Notebook` object has a few features that can be toggled: 64 | 65 | * `lazy:bool=False` lazy load the module, the namespace is populated when the module is access the first time. 66 | * `position:int=0` the relative position of the import loader in the `sys.path_hooks` 67 | * `fuzzy:bool=True` use fuzzy searching syntax when underscores are encountered. 68 | * `include_markdown_docstring:bool=True` markdown blocks preceding function/class defs become docstrings. 69 | * `include_magic:bool=True` ignore any ipython magic syntaxes 70 | * `only_defs:bool=False` import only function and class definitions. ignore intermediate * expressions. 71 | * `no_magic:bool=False` execute `IPython` magic statements from the loader. 72 | 73 | these features are defined in the `importnb.loader.Interface` class and they can be controlled throught the command line interface. 74 | 75 | ### importing notebooks 76 | 77 | the primary goal of this library is to make it easy to reuse python code in notebooks. below are a few ways to invoke python's import system within the context manager. 78 | 79 | with importnb.imports("ipynb"): 80 | import Untitled 81 | import Untitled as nb 82 | __import__("Untitled") 83 | from importlib import import_module 84 | import_module("Untitled") 85 | 86 | #### import data files 87 | 88 | there is support for discovering data files. when discovered, data from disk on loaded and stored on the module with rich reprs. 89 | 90 | with importnb.imports("toml", "json", "yaml"): 91 | pass 92 | 93 | all the available entry points are found with 94 | 95 | from importnb.entry_points import list_aliases 96 | list_aliases() 97 | 98 | #### loading directly from file 99 | 100 | Untitled = Notebook.load("Untitled.ipynb") 101 | 102 | 103 | ### fuzzy finding 104 | 105 | often notebooks have names that are not valid python files names that are restricted alphanumeric characters and an `_`. the `importnb` fuzzy finder converts python's import convention into globs that will find modules matching specific patters. consider the statement: 106 | 107 | with importnb.Notebook(): 108 | import U_titl__d # U*titl**d.ipynb 109 | 110 | `importnb` translates `U_titl__d` to a glob format that matches the pattern `U*titl**d.ipynb` when searching for the source. that means that `importnb` should fine `Untitled.ipynb` as the source for the import[^unless]. 111 | 112 | with importnb.Notebook(): 113 | import _ntitled # *ntitled.ipynb 114 | import __d # **d.ipynb 115 | import U__ # U**.ipynb 116 | 117 | a primary motivation for this feature is name notebooks as if they were blog posts using the `YYYY-MM-DD-title-here.ipynb` convention. there are a few ways we could this file explicitly. the fuzzy finder syntax could like any of the following: 118 | 119 | with importnb.Notebook(): 120 | import __title_here 121 | import YYYY_MM_DD_title_here 122 | import __MM_DD_title_here 123 | 124 | #### fuzzy name ambiguity 125 | 126 | it is possible that a fuzzy import may be ambiguous are return multiple files. 127 | the `importnb` fuzzy finder will prefer the most recently changed file. 128 | 129 | ambiguity can be avoided by using more explicit fuzzy imports that will reduce collisions. 130 | another option is use python's explicit import functions. 131 | 132 | 133 | with importnb.Notebook(): 134 | __import__("YYYY-MM-DD-title-here") 135 | import_module("YYYY-MM-DD-title-here") 136 | 137 | 138 | #### importing your most recently changed notebook 139 | 140 | an outcome of resolving the most recently changed is that you can import your most recent notebook with: 141 | 142 | import __ # **.ipynb 143 | 144 | ### integrations 145 | 146 | #### `pytest` 147 | 148 | since `importnb` transforms notebooks to python documents we can use these as source for tests. 149 | `importnb`s `pytest` extension is not fancy, it only allows for conventional pytest test discovery. 150 | 151 | `nbval` is alternative testing tools that validates notebook outputs. this style is near to using notebooks as `doctest` while `importnb` primarily adds the ability to write `unittest`s in notebooks. adding tests to notebooks help preserve them over time. 152 | 153 | #### extensible 154 | 155 | the `importnb.Notebook` machinery is extensible. it allows other file formats to be used. for example, `pidgy` uses `importnb` to import `markdown` files as compiled python code. 156 | 157 | class MyLoader(importnb.Notebook): pass 158 | 159 | 160 | --- 161 | 162 | ## developer 163 | 164 | ```bash 165 | pip install -e. # install in development mode 166 | hatch run test:cov # test 167 | ``` 168 | 169 | * `importnb` uses `hatch` for testing in python and `IPython` 170 | 171 | --- 172 | 173 | ## appendix 174 | ### line-for-line translation and natural error messages 175 | 176 | a challenge with Jupyter notebooks is that they are `json` data. this poses problems: 177 | 178 | 1. every valid line of code in a Jupyter notebook is a quoted `json` string 179 | 2. `json` parsers don't have a reason to return line numbers. 180 | 181 | #### the problem with quoted code 182 | 183 | #### line-for-line `json` parser 184 | 185 | python's `json` module is not pluggable in the way we need to find line numbers. since `importnb` is meant to be dependency free on installation we couldn't look to any other packages like `ujson` or `json5`. 186 | 187 | the need for line numbers is enough that we ship a standalone `json` grammar parser. to do this without extra dependencies we use the `lark` grammar package at build time: 188 | * we've defined a `json.g`ramar 189 | * we use `hatch` hooks to invoke `lark-standalone` that generates a standalone parser for the grammar. the generated file is shipped with the package. 190 | * this code is licensed under the Mozilla Public License 2.0 191 | 192 | the result of `importnb` is `json` data translated into vertically sparse, valid python code. 193 | 194 | #### reproducibility caution with the fuzzy finder 195 | 196 | ⚠️ fuzzy finding is not reproducible as your system will change over time. in python, "explicit is better than implicit" so defining strong fuzzy strings is best practice if you MUST use esotric names. an alternative option is to use the `importlib.import_module` machinery 197 | 198 | 199 | [pip]: # 200 | [conda]: # 201 | [mamba]: # 202 | [pidgy]: # 203 | -------------------------------------------------------------------------------- /docs/test_importnb.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import inspect 3 | import json 4 | import linecache 5 | import sys 6 | from importlib import reload 7 | from importlib.util import find_spec 8 | from pathlib import Path 9 | from shutil import copyfile, rmtree 10 | from types import FunctionType 11 | 12 | from pytest import fixture, mark, raises 13 | 14 | import importnb 15 | from importnb import Notebook, get_ipython, imports 16 | from importnb.loader import VERSION 17 | 18 | CLOBBER = ("Untitled42", "my_package", "__42", "__ed42", "__d42") 19 | 20 | HERE = locals().get("__file__", None) 21 | HERE = (Path(HERE).parent if HERE else Path()).absolute() 22 | 23 | sys.path.insert(0, str(HERE)) 24 | 25 | IPY = bool(get_ipython()) 26 | print(88, IPY) 27 | ipy = mark.skipif(not IPY, reason="""Not IPython.""") 28 | 29 | 30 | @fixture(scope="session") 31 | def ref(): 32 | return Notebook.load_file(HERE / "Untitled42.ipynb") 33 | 34 | 35 | @fixture() 36 | def clean(): 37 | yield 38 | unimport(CLOBBER) 39 | 40 | 41 | @fixture() 42 | def package(ref): 43 | package = HERE / "my_package" 44 | package.mkdir(parents=True, exist_ok=True) 45 | target = package / "my_module.ipynb" 46 | copyfile(ref.__file__, package / target) 47 | yield package 48 | target.unlink() 49 | rmtree(package) 50 | 51 | 52 | @fixture() 53 | def minified(ref): 54 | minified = Path(HERE / "minified.ipynb") 55 | with open(ref.__file__) as f, open(minified, "w") as o: 56 | json.dump(json.load(f), o, separators=(",", ":")) 57 | 58 | yield 59 | minified.unlink() 60 | 61 | 62 | @fixture() 63 | def untitled_py(ref): 64 | py = Path(ref.__file__).with_suffix(".py") 65 | py.touch() 66 | yield 67 | py.unlink() 68 | 69 | 70 | def cant_reload(m): 71 | with raises(ImportError): 72 | reload(m) 73 | 74 | 75 | def unimport(ns): 76 | """Unimport a module namespace""" 77 | from sys import modules, path_importer_cache 78 | 79 | for module in [x for x in modules if x.startswith(ns)]: 80 | del modules[module] 81 | 82 | path_importer_cache.clear() 83 | 84 | 85 | def test_version(): 86 | assert importnb.__version__ 87 | 88 | 89 | def test_ref(ref): 90 | assert ref.__file__.endswith(".ipynb") 91 | 92 | 93 | def test_finder(): 94 | assert not find_spec("Untitled42") 95 | with Notebook(): 96 | assert find_spec("Untitled42") 97 | 98 | 99 | def test_basic(clean, ref): 100 | with Notebook(): 101 | import Untitled42 102 | 103 | assert ref is not Untitled42 104 | assert Untitled42.__file__ == ref.__file__ 105 | assert isinstance(Untitled42.__loader__, Notebook) 106 | with Notebook(): 107 | assert reload(Untitled42) 108 | 109 | 110 | def test_load_module(clean, ref): 111 | m = Notebook.load_module("Untitled42") 112 | assert m.__file__ == ref.__file__ 113 | cant_reload(m) 114 | 115 | 116 | def test_load_module_package(clean, package): 117 | m = Notebook.load_module("my_package.my_module") 118 | 119 | 120 | def test_load_file(clean, ref): 121 | m = Notebook.load_file("docs/Untitled42.ipynb") 122 | assert ref.__file__.endswith(str(Path(m.__file__))) 123 | cant_reload(m) 124 | 125 | 126 | def test_load_code(clean): 127 | assert Notebook.load_code(""), "can't load an empty notebook" 128 | body = Path("docs/Untitled42.ipynb").read_text() 129 | m = Notebook.load_code(body) 130 | cant_reload(m) 131 | 132 | 133 | def test_package(clean, package): 134 | with Notebook(): 135 | import my_package.my_module 136 | 137 | assert hasattr(my_package, "__path__") 138 | with raises(ModuleNotFoundError): 139 | # we can't find a spec for a notebook without the notebook loader context 140 | reload(my_package.my_module) 141 | 142 | with Notebook(): 143 | reload(my_package.my_module) 144 | 145 | 146 | @mark.parametrize("magic", [True, False]) 147 | def test_no_magic(capsys, clean, magic, ref): 148 | with Notebook(no_magic=not magic): 149 | import Untitled42 150 | 151 | assert Untitled42 152 | 153 | stdout = capsys.readouterr()[0] 154 | if IPY: 155 | if magic: 156 | assert ref.magic_slug.rstrip() in stdout 157 | else: 158 | assert ref.magic_slug.rstrip() not in stdout 159 | 160 | 161 | @mark.parametrize("defs", [True, False]) 162 | def test_defs_only(defs, ref): 163 | known_defs = [ 164 | k for k, v in vars(ref).items() if k[0] != "_" and isinstance(v, (type, FunctionType)) 165 | ] 166 | not_defs = [k for k, v in vars(ref).items() if k[0] != "_" and isinstance(v, (str,))] 167 | with Notebook(include_non_defs=not defs): 168 | import Untitled42 169 | 170 | assert all(hasattr(Untitled42, k) for k in known_defs) 171 | 172 | if defs: 173 | assert not any(hasattr(Untitled42, k) for k in not_defs) 174 | 175 | 176 | def test_fuzzy_finder(clean, ref, capsys): 177 | outs = [] 178 | with Notebook(): 179 | import __ed42 180 | 181 | assert __ed42 182 | 183 | outs.append(capsys.readouterr()) 184 | import __d42 185 | 186 | assert __d42 187 | 188 | outs.append(capsys.readouterr()) 189 | import __42 190 | 191 | assert __42 192 | 193 | outs.append(capsys.readouterr()) 194 | import __42 195 | 196 | assert __42 197 | 198 | outs.append(capsys.readouterr()) 199 | import __42 as nb 200 | 201 | assert nb 202 | 203 | outs.append(capsys.readouterr()) 204 | 205 | assert outs[0] == outs[1] == outs[2] 206 | assert not any([outs[3].out, outs[3].err] + [outs[4].out, outs[4].err]) 207 | 208 | 209 | def test_fuzzy_finder_conflict(clean, ref): 210 | try: 211 | with Notebook(): 212 | spec = find_spec("__d42") 213 | assert find_spec("__d42") 214 | 215 | new = HERE / "d42.ipynb" 216 | new.write_text("{}") 217 | spec2 = find_spec("__d42") 218 | 219 | assert spec.loader.path != spec2.loader.path 220 | finally: 221 | with Notebook(): 222 | new.unlink() 223 | spec3 = find_spec("__d42") 224 | assert spec.loader.path == spec3.loader.path 225 | 226 | 227 | def test_minified_json(ref, minified): 228 | with Notebook(): 229 | import minified as minned 230 | 231 | example_source = inspect.getsource(minned.function_with_a_markdown_docstring) 232 | assert example_source 233 | 234 | 235 | def test_docstrings(clean, ref): 236 | with Notebook(): 237 | import Untitled42 as nb 238 | 239 | assert nb 240 | assert nb.function_with_a_markdown_docstring.__doc__ 241 | assert nb.class_with_a_python_docstring.__doc__ 242 | assert nb.function_with_a_markdown_docstring.__doc__ 243 | 244 | assert nb.__doc__ == ref.__doc__ 245 | assert ( 246 | nb.function_with_a_markdown_docstring.__doc__ 247 | == ref.function_with_a_markdown_docstring.__doc__ 248 | ) 249 | assert nb.class_with_a_python_docstring.__doc__ == ref.class_with_a_python_docstring.__doc__ 250 | assert nb.class_with_a_markdown_docstring.__doc__ == ref.class_with_a_markdown_docstring.__doc__ 251 | 252 | assert ast.parse( 253 | inspect.getsource(nb.function_with_a_markdown_docstring), 254 | ), """The source is invalid""" 255 | 256 | # the line cache isnt json, it is python 257 | with raises(getattr(json, "JSONDecodeError", ValueError)): 258 | json.loads("".join(linecache.cache[nb.__file__][2])) 259 | 260 | assert inspect.getsource(nb).strip() == "".join(linecache.cache[nb.__file__][2]).strip() 261 | 262 | 263 | def test_python_file_takes_precedent(clean, ref, untitled_py): 264 | with Notebook(): 265 | import Untitled42 266 | assert Untitled42.__file__.endswith(".py") 267 | 268 | 269 | def test_lazy(capsys, clean): 270 | """Use stdout to test this depsite there probably being a better way""" 271 | with Notebook(lazy=True): 272 | import Untitled42 as module 273 | assert not capsys.readouterr()[0], capsys.readouterr()[0] 274 | module.slug, "The function gets executed here" 275 | assert capsys.readouterr()[0] 276 | 277 | 278 | @ipy 279 | def test_import_ipy(): 280 | """Import ipy scripts, this won't really work without ipython.""" 281 | with Notebook(): 282 | import ascript 283 | 284 | assert ascript.msg 285 | 286 | 287 | @ipy 288 | def test_cli(clean): 289 | with Notebook(): 290 | import Untitled42 as module 291 | __import__("subprocess").check_call( 292 | f"ipython -m {module.__name__}".split(), 293 | cwd=str(Path(module.__file__).parent), 294 | ) 295 | __import__("subprocess").check_call( 296 | f"ipython -m importnb -- {module.__file__}".split(), 297 | cwd=str(Path(module.__file__).parent), 298 | ) 299 | 300 | 301 | @mark.skipif(VERSION < (3, 8), reason="async not supported in 3.7") 302 | @mark.filterwarnings("ignore::DeprecationWarning") 303 | def test_top_level_async(): 304 | with Notebook(): 305 | import async_cells 306 | 307 | assert async_cells 308 | 309 | 310 | def test_data_loaders(pytester): 311 | some_random_data = {"top": [{}]} 312 | 313 | import io 314 | import json 315 | 316 | import tomli_w 317 | from ruamel.yaml import YAML 318 | 319 | yaml = YAML(typ="safe", pure=True) 320 | 321 | sys.path.insert(0, str(pytester._path)) 322 | pytester.makefile(".json", json_data=json.dumps(some_random_data)) 323 | pytester.makefile(".toml", toml_data=tomli_w.dumps(some_random_data)) 324 | y = io.StringIO() 325 | yaml.dump(some_random_data, y) 326 | pytester.makefile(".yaml", yaml_data=y.getvalue()) 327 | 328 | with imports("json", "yaml", "toml"): 329 | import json_data 330 | import toml_data 331 | import yaml_data 332 | assert json_data.__file__.endswith(".json") 333 | assert toml_data.__file__.endswith(".toml") 334 | assert yaml_data.__file__.endswith(".yaml") 335 | -------------------------------------------------------------------------------- /LICENSE-MPL-2.0: -------------------------------------------------------------------------------- 1 | Mozilla Public License Version 2.0 2 | ================================== 3 | 4 | 1. Definitions 5 | -------------- 6 | 7 | 1.1. "Contributor" 8 | means each individual or legal entity that creates, contributes to 9 | the creation of, or owns Covered Software. 10 | 11 | 1.2. "Contributor Version" 12 | means the combination of the Contributions of others (if any) used 13 | by a Contributor and that particular Contributor's Contribution. 14 | 15 | 1.3. "Contribution" 16 | means Covered Software of a particular Contributor. 17 | 18 | 1.4. "Covered Software" 19 | means Source Code Form to which the initial Contributor has attached 20 | the notice in Exhibit A, the Executable Form of such Source Code 21 | Form, and Modifications of such Source Code Form, in each case 22 | including portions thereof. 23 | 24 | 1.5. "Incompatible With Secondary Licenses" 25 | means 26 | 27 | (a) that the initial Contributor has attached the notice described 28 | in Exhibit B to the Covered Software; or 29 | 30 | (b) that the Covered Software was made available under the terms of 31 | version 1.1 or earlier of the License, but not also under the 32 | terms of a Secondary License. 33 | 34 | 1.6. "Executable Form" 35 | means any form of the work other than Source Code Form. 36 | 37 | 1.7. "Larger Work" 38 | means a work that combines Covered Software with other material, in 39 | a separate file or files, that is not Covered Software. 40 | 41 | 1.8. "License" 42 | means this document. 43 | 44 | 1.9. "Licensable" 45 | means having the right to grant, to the maximum extent possible, 46 | whether at the time of the initial grant or subsequently, any and 47 | all of the rights conveyed by this License. 48 | 49 | 1.10. "Modifications" 50 | means any of the following: 51 | 52 | (a) any file in Source Code Form that results from an addition to, 53 | deletion from, or modification of the contents of Covered 54 | Software; or 55 | 56 | (b) any new file in Source Code Form that contains any Covered 57 | Software. 58 | 59 | 1.11. "Patent Claims" of a Contributor 60 | means any patent claim(s), including without limitation, method, 61 | process, and apparatus claims, in any patent Licensable by such 62 | Contributor that would be infringed, but for the grant of the 63 | License, by the making, using, selling, offering for sale, having 64 | made, import, or transfer of either its Contributions or its 65 | Contributor Version. 66 | 67 | 1.12. "Secondary License" 68 | means either the GNU General Public License, Version 2.0, the GNU 69 | Lesser General Public License, Version 2.1, the GNU Affero General 70 | Public License, Version 3.0, or any later versions of those 71 | licenses. 72 | 73 | 1.13. "Source Code Form" 74 | means the form of the work preferred for making modifications. 75 | 76 | 1.14. "You" (or "Your") 77 | means an individual or a legal entity exercising rights under this 78 | License. For legal entities, "You" includes any entity that 79 | controls, is controlled by, or is under common control with You. For 80 | purposes of this definition, "control" means (a) the power, direct 81 | or indirect, to cause the direction or management of such entity, 82 | whether by contract or otherwise, or (b) ownership of more than 83 | fifty percent (50%) of the outstanding shares or beneficial 84 | ownership of such entity. 85 | 86 | 2. License Grants and Conditions 87 | -------------------------------- 88 | 89 | 2.1. Grants 90 | 91 | Each Contributor hereby grants You a world-wide, royalty-free, 92 | non-exclusive license: 93 | 94 | (a) under intellectual property rights (other than patent or trademark) 95 | Licensable by such Contributor to use, reproduce, make available, 96 | modify, display, perform, distribute, and otherwise exploit its 97 | Contributions, either on an unmodified basis, with Modifications, or 98 | as part of a Larger Work; and 99 | 100 | (b) under Patent Claims of such Contributor to make, use, sell, offer 101 | for sale, have made, import, and otherwise transfer either its 102 | Contributions or its Contributor Version. 103 | 104 | 2.2. Effective Date 105 | 106 | The licenses granted in Section 2.1 with respect to any Contribution 107 | become effective for each Contribution on the date the Contributor first 108 | distributes such Contribution. 109 | 110 | 2.3. Limitations on Grant Scope 111 | 112 | The licenses granted in this Section 2 are the only rights granted under 113 | this License. No additional rights or licenses will be implied from the 114 | distribution or licensing of Covered Software under this License. 115 | Notwithstanding Section 2.1(b) above, no patent license is granted by a 116 | Contributor: 117 | 118 | (a) for any code that a Contributor has removed from Covered Software; 119 | or 120 | 121 | (b) for infringements caused by: (i) Your and any other third party's 122 | modifications of Covered Software, or (ii) the combination of its 123 | Contributions with other software (except as part of its Contributor 124 | Version); or 125 | 126 | (c) under Patent Claims infringed by Covered Software in the absence of 127 | its Contributions. 128 | 129 | This License does not grant any rights in the trademarks, service marks, 130 | or logos of any Contributor (except as may be necessary to comply with 131 | the notice requirements in Section 3.4). 132 | 133 | 2.4. Subsequent Licenses 134 | 135 | No Contributor makes additional grants as a result of Your choice to 136 | distribute the Covered Software under a subsequent version of this 137 | License (see Section 10.2) or under the terms of a Secondary License (if 138 | permitted under the terms of Section 3.3). 139 | 140 | 2.5. Representation 141 | 142 | Each Contributor represents that the Contributor believes its 143 | Contributions are its original creation(s) or it has sufficient rights 144 | to grant the rights to its Contributions conveyed by this License. 145 | 146 | 2.6. Fair Use 147 | 148 | This License is not intended to limit any rights You have under 149 | applicable copyright doctrines of fair use, fair dealing, or other 150 | equivalents. 151 | 152 | 2.7. Conditions 153 | 154 | Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted 155 | in Section 2.1. 156 | 157 | 3. Responsibilities 158 | ------------------- 159 | 160 | 3.1. Distribution of Source Form 161 | 162 | All distribution of Covered Software in Source Code Form, including any 163 | Modifications that You create or to which You contribute, must be under 164 | the terms of this License. You must inform recipients that the Source 165 | Code Form of the Covered Software is governed by the terms of this 166 | License, and how they can obtain a copy of this License. You may not 167 | attempt to alter or restrict the recipients' rights in the Source Code 168 | Form. 169 | 170 | 3.2. Distribution of Executable Form 171 | 172 | If You distribute Covered Software in Executable Form then: 173 | 174 | (a) such Covered Software must also be made available in Source Code 175 | Form, as described in Section 3.1, and You must inform recipients of 176 | the Executable Form how they can obtain a copy of such Source Code 177 | Form by reasonable means in a timely manner, at a charge no more 178 | than the cost of distribution to the recipient; and 179 | 180 | (b) You may distribute such Executable Form under the terms of this 181 | License, or sublicense it under different terms, provided that the 182 | license for the Executable Form does not attempt to limit or alter 183 | the recipients' rights in the Source Code Form under this License. 184 | 185 | 3.3. Distribution of a Larger Work 186 | 187 | You may create and distribute a Larger Work under terms of Your choice, 188 | provided that You also comply with the requirements of this License for 189 | the Covered Software. If the Larger Work is a combination of Covered 190 | Software with a work governed by one or more Secondary Licenses, and the 191 | Covered Software is not Incompatible With Secondary Licenses, this 192 | License permits You to additionally distribute such Covered Software 193 | under the terms of such Secondary License(s), so that the recipient of 194 | the Larger Work may, at their option, further distribute the Covered 195 | Software under the terms of either this License or such Secondary 196 | License(s). 197 | 198 | 3.4. Notices 199 | 200 | You may not remove or alter the substance of any license notices 201 | (including copyright notices, patent notices, disclaimers of warranty, 202 | or limitations of liability) contained within the Source Code Form of 203 | the Covered Software, except that You may alter any license notices to 204 | the extent required to remedy known factual inaccuracies. 205 | 206 | 3.5. Application of Additional Terms 207 | 208 | You may choose to offer, and to charge a fee for, warranty, support, 209 | indemnity or liability obligations to one or more recipients of Covered 210 | Software. However, You may do so only on Your own behalf, and not on 211 | behalf of any Contributor. You must make it absolutely clear that any 212 | such warranty, support, indemnity, or liability obligation is offered by 213 | You alone, and You hereby agree to indemnify every Contributor for any 214 | liability incurred by such Contributor as a result of warranty, support, 215 | indemnity or liability terms You offer. You may include additional 216 | disclaimers of warranty and limitations of liability specific to any 217 | jurisdiction. 218 | 219 | 4. Inability to Comply Due to Statute or Regulation 220 | --------------------------------------------------- 221 | 222 | If it is impossible for You to comply with any of the terms of this 223 | License with respect to some or all of the Covered Software due to 224 | statute, judicial order, or regulation then You must: (a) comply with 225 | the terms of this License to the maximum extent possible; and (b) 226 | describe the limitations and the code they affect. Such description must 227 | be placed in a text file included with all distributions of the Covered 228 | Software under this License. Except to the extent prohibited by statute 229 | or regulation, such description must be sufficiently detailed for a 230 | recipient of ordinary skill to be able to understand it. 231 | 232 | 5. Termination 233 | -------------- 234 | 235 | 5.1. The rights granted under this License will terminate automatically 236 | if You fail to comply with any of its terms. However, if You become 237 | compliant, then the rights granted under this License from a particular 238 | Contributor are reinstated (a) provisionally, unless and until such 239 | Contributor explicitly and finally terminates Your grants, and (b) on an 240 | ongoing basis, if such Contributor fails to notify You of the 241 | non-compliance by some reasonable means prior to 60 days after You have 242 | come back into compliance. Moreover, Your grants from a particular 243 | Contributor are reinstated on an ongoing basis if such Contributor 244 | notifies You of the non-compliance by some reasonable means, this is the 245 | first time You have received notice of non-compliance with this License 246 | from such Contributor, and You become compliant prior to 30 days after 247 | Your receipt of the notice. 248 | 249 | 5.2. If You initiate litigation against any entity by asserting a patent 250 | infringement claim (excluding declaratory judgment actions, 251 | counter-claims, and cross-claims) alleging that a Contributor Version 252 | directly or indirectly infringes any patent, then the rights granted to 253 | You by any and all Contributors for the Covered Software under Section 254 | 2.1 of this License shall terminate. 255 | 256 | 5.3. In the event of termination under Sections 5.1 or 5.2 above, all 257 | end user license agreements (excluding distributors and resellers) which 258 | have been validly granted by You or Your distributors under this License 259 | prior to termination shall survive termination. 260 | 261 | ************************************************************************ 262 | * * 263 | * 6. Disclaimer of Warranty * 264 | * ------------------------- * 265 | * * 266 | * Covered Software is provided under this License on an "as is" * 267 | * basis, without warranty of any kind, either expressed, implied, or * 268 | * statutory, including, without limitation, warranties that the * 269 | * Covered Software is free of defects, merchantable, fit for a * 270 | * particular purpose or non-infringing. The entire risk as to the * 271 | * quality and performance of the Covered Software is with You. * 272 | * Should any Covered Software prove defective in any respect, You * 273 | * (not any Contributor) assume the cost of any necessary servicing, * 274 | * repair, or correction. This disclaimer of warranty constitutes an * 275 | * essential part of this License. No use of any Covered Software is * 276 | * authorized under this License except under this disclaimer. * 277 | * * 278 | ************************************************************************ 279 | 280 | ************************************************************************ 281 | * * 282 | * 7. Limitation of Liability * 283 | * -------------------------- * 284 | * * 285 | * Under no circumstances and under no legal theory, whether tort * 286 | * (including negligence), contract, or otherwise, shall any * 287 | * Contributor, or anyone who distributes Covered Software as * 288 | * permitted above, be liable to You for any direct, indirect, * 289 | * special, incidental, or consequential damages of any character * 290 | * including, without limitation, damages for lost profits, loss of * 291 | * goodwill, work stoppage, computer failure or malfunction, or any * 292 | * and all other commercial damages or losses, even if such party * 293 | * shall have been informed of the possibility of such damages. This * 294 | * limitation of liability shall not apply to liability for death or * 295 | * personal injury resulting from such party's negligence to the * 296 | * extent applicable law prohibits such limitation. Some * 297 | * jurisdictions do not allow the exclusion or limitation of * 298 | * incidental or consequential damages, so this exclusion and * 299 | * limitation may not apply to You. * 300 | * * 301 | ************************************************************************ 302 | 303 | 8. Litigation 304 | ------------- 305 | 306 | Any litigation relating to this License may be brought only in the 307 | courts of a jurisdiction where the defendant maintains its principal 308 | place of business and such litigation shall be governed by laws of that 309 | jurisdiction, without reference to its conflict-of-law provisions. 310 | Nothing in this Section shall prevent a party's ability to bring 311 | cross-claims or counter-claims. 312 | 313 | 9. Miscellaneous 314 | ---------------- 315 | 316 | This License represents the complete agreement concerning the subject 317 | matter hereof. If any provision of this License is held to be 318 | unenforceable, such provision shall be reformed only to the extent 319 | necessary to make it enforceable. Any law or regulation which provides 320 | that the language of a contract shall be construed against the drafter 321 | shall not be used to construe this License against a Contributor. 322 | 323 | 10. Versions of the License 324 | --------------------------- 325 | 326 | 10.1. New Versions 327 | 328 | Mozilla Foundation is the license steward. Except as provided in Section 329 | 10.3, no one other than the license steward has the right to modify or 330 | publish new versions of this License. Each version will be given a 331 | distinguishing version number. 332 | 333 | 10.2. Effect of New Versions 334 | 335 | You may distribute the Covered Software under the terms of the version 336 | of the License under which You originally received the Covered Software, 337 | or under the terms of any subsequent version published by the license 338 | steward. 339 | 340 | 10.3. Modified Versions 341 | 342 | If you create software not governed by this License, and you want to 343 | create a new license for such software, you may create and use a 344 | modified version of this License if you rename the license and remove 345 | any references to the name of the license steward (except to note that 346 | such modified license differs from this License). 347 | 348 | 10.4. Distributing Source Code Form that is Incompatible With Secondary 349 | Licenses 350 | 351 | If You choose to distribute Source Code Form that is Incompatible With 352 | Secondary Licenses under the terms of this version of the License, the 353 | notice described in Exhibit B of this License must be attached. 354 | 355 | Exhibit A - Source Code Form License Notice 356 | ------------------------------------------- 357 | 358 | This Source Code Form is subject to the terms of the Mozilla Public 359 | License, v. 2.0. If a copy of the MPL was not distributed with this 360 | file, You can obtain one at http://mozilla.org/MPL/2.0/. 361 | 362 | If it is not possible or desirable to put the notice in a particular 363 | file, then You may include the notice in a location (such as a LICENSE 364 | file in a relevant directory) where a recipient would be likely to look 365 | for such a notice. 366 | 367 | You may add additional accurate notices of copyright ownership. 368 | 369 | Exhibit B - "Incompatible With Secondary Licenses" Notice 370 | --------------------------------------------------------- 371 | 372 | This Source Code Form is "Incompatible With Secondary Licenses", as 373 | defined by the Mozilla Public License, v. 2.0. 374 | -------------------------------------------------------------------------------- /docs/Untitled42.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# `importnb` test specification\n", 8 | "\n", 9 | "this notebook is written to test many of the features of `importnb`.\n", 10 | "\n", 11 | "these features in this notebook test:\n", 12 | "* basic finding and loading\n", 13 | "* filtering magics, classes and functions\n", 14 | "* lazy loading\n", 15 | "* the command line interface" 16 | ] 17 | }, 18 | { 19 | "cell_type": "markdown", 20 | "metadata": {}, 21 | "source": [ 22 | "## a sentinel for execution" 23 | ] 24 | }, 25 | { 26 | "cell_type": "markdown", 27 | "metadata": {}, 28 | "source": [ 29 | "the `slug` below is used to measure that a module has been executed,\n", 30 | "we specifically use this expression to measure the lazy importing system." 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 1, 36 | "metadata": {}, 37 | "outputs": [ 38 | { 39 | "name": "stdout", 40 | "output_type": "stream", 41 | "text": [ 42 | "i was printed from and my name is __main__\n" 43 | ] 44 | } 45 | ], 46 | "source": [ 47 | "from pathlib import Path\n", 48 | "\n", 49 | "from importnb import get_ipython\n", 50 | "\n", 51 | "where = \"\"\n", 52 | "if \"__file__\" in locals():\n", 53 | " where = Path(__file__).as_posix()\n", 54 | "\n", 55 | "slug = \"i was printed from {where} \\\n", 56 | "and my name is {__name__}\"\n", 57 | "print(slug.format(**locals()))" 58 | ] 59 | }, 60 | { 61 | "cell_type": "markdown", 62 | "metadata": {}, 63 | "source": [ 64 | "## implicit markdown docstrings" 65 | ] 66 | }, 67 | { 68 | "cell_type": "markdown", 69 | "metadata": {}, 70 | "source": [ 71 | "there is a strict separation of code and non-code in notebooks.\n", 72 | "to encourage more/better documentation `importnb` will use a markdown\n", 73 | "cell preceeding a function as a docstring. \n", 74 | "as a result, the `function_with_a_markdown` docstring will have this markdown cell for a value." 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": 2, 80 | "metadata": {}, 81 | "outputs": [], 82 | "source": [ 83 | "def function_with_a_markdown_docstring():\n", 84 | " return # function_with_a_markdown has a docstring defined by the preceeding markdown cell" 85 | ] 86 | }, 87 | { 88 | "cell_type": "markdown", 89 | "metadata": {}, 90 | "source": [ 91 | "the same convention holds for classes and async functions." 92 | ] 93 | }, 94 | { 95 | "cell_type": "code", 96 | "execution_count": 3, 97 | "metadata": {}, 98 | "outputs": [], 99 | "source": [ 100 | "class class_with_a_markdown_docstring:\n", 101 | " ... # my docstring is the cell above." 102 | ] 103 | }, 104 | { 105 | "cell_type": "markdown", 106 | "metadata": {}, 107 | "source": [ 108 | "this is not a docstring for `class_with_a_string` because it defines its own." 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": 4, 114 | "metadata": {}, 115 | "outputs": [], 116 | "source": [ 117 | "class class_with_a_python_docstring:\n", 118 | " \"\"\"when a class defines its own docstring the preceeding cell is ignored.\"\"\"" 119 | ] 120 | }, 121 | { 122 | "cell_type": "markdown", 123 | "metadata": {}, 124 | "source": [ 125 | "## cell magics" 126 | ] 127 | }, 128 | { 129 | "cell_type": "code", 130 | "execution_count": 5, 131 | "metadata": {}, 132 | "outputs": [ 133 | { 134 | "name": "stdout", 135 | "output_type": "stream", 136 | "text": [ 137 | "i'm only show when cell magics are active.\n" 138 | ] 139 | } 140 | ], 141 | "source": [ 142 | "%%python\n", 143 | "print(\"i'm only show when cell magics are active.\")" 144 | ] 145 | }, 146 | { 147 | "cell_type": "code", 148 | "execution_count": 6, 149 | "metadata": {}, 150 | "outputs": [], 151 | "source": [ 152 | "if get_ipython():\n", 153 | " magic_slug = \"i'm only show when cell magics are active.\"\n", 154 | " if __import__(\"sys\").platform == \"win32\":\n", 155 | " magic_slug += \"\\n\"\n", 156 | "else:\n", 157 | " magic_slug = f\"this was printed from the module named {__name__}\"\n", 158 | " print(magic_slug)" 159 | ] 160 | }, 161 | { 162 | "cell_type": "markdown", 163 | "metadata": {}, 164 | "source": [ 165 | "## notebooks as scripts" 166 | ] 167 | }, 168 | { 169 | "cell_type": "markdown", 170 | "metadata": {}, 171 | "source": [ 172 | "the main block is a python convention we can apply in notebooks imported by importnb." 173 | ] 174 | }, 175 | { 176 | "cell_type": "code", 177 | "execution_count": 7, 178 | "metadata": {}, 179 | "outputs": [], 180 | "source": [ 181 | "def get_parser():\n", 182 | " from argparse import REMAINDER, ArgumentParser\n", 183 | "\n", 184 | " parser = ArgumentParser(\"test_parser\")\n", 185 | " parser.add_argument(\"--\", nargs=REMAINDER, dest=\"args\")\n", 186 | " return parser" 187 | ] 188 | }, 189 | { 190 | "cell_type": "code", 191 | "execution_count": 8, 192 | "metadata": {}, 193 | "outputs": [], 194 | "source": [ 195 | "def main(argv=None):\n", 196 | " parser = get_parser()\n", 197 | " print(\"the parser namespace is\", parser.parse_args(argv))" 198 | ] 199 | }, 200 | { 201 | "cell_type": "markdown", 202 | "metadata": {}, 203 | "source": [ 204 | "### notebooks as `doit` tasks\n", 205 | "\n", 206 | "[`doit`](https://pydoit.org/) is powerful alternative to makefiles for running development tasks.\n", 207 | "the `importnb` command line provides support for `doit` conventions, but does not provide the dependency;\n", 208 | "you the `doit` are responsible for that." 209 | ] 210 | }, 211 | { 212 | "cell_type": "markdown", 213 | "metadata": {}, 214 | "source": [ 215 | "this the docstring for the `echo` task that echos hello." 216 | ] 217 | }, 218 | { 219 | "cell_type": "code", 220 | "execution_count": 9, 221 | "metadata": {}, 222 | "outputs": [], 223 | "source": [ 224 | "def task_echo():\n", 225 | " return dict(actions=[\"echo hello\"])" 226 | ] 227 | }, 228 | { 229 | "cell_type": "code", 230 | "execution_count": 10, 231 | "metadata": {}, 232 | "outputs": [], 233 | "source": [ 234 | "import sys\n", 235 | "\n", 236 | "if __name__ == \"__main__\":\n", 237 | " if \"__file__\" in locals():\n", 238 | " # run this notebook like it is a cli\n", 239 | " if \"pytest\" not in sys.modules:\n", 240 | " if \"doit\" not in sys.modules:\n", 241 | " main(sys.argv[1:])" 242 | ] 243 | }, 244 | { 245 | "cell_type": "markdown", 246 | "metadata": {}, 247 | "source": [ 248 | "## data loaders\n", 249 | "\n", 250 | "data loaders can import other file formats. we can hide loading logic underneath `import` statements." 251 | ] 252 | }, 253 | { 254 | "cell_type": "code", 255 | "execution_count": 11, 256 | "metadata": {}, 257 | "outputs": [ 258 | { 259 | "data": { 260 | "application/json": { 261 | "cells": [ 262 | { 263 | "cell_type": "markdown", 264 | "metadata": {}, 265 | "source": [ 266 | "# `importnb` test specification\n", 267 | "\n", 268 | "this notebook is written to test many of the features of `importnb`.\n", 269 | "\n", 270 | "these features in this notebook test:\n", 271 | "* basic finding and loading\n", 272 | "* filtering magics, classes and functions\n", 273 | "* lazy loading\n", 274 | "* the command line interface" 275 | ] 276 | }, 277 | { 278 | "cell_type": "markdown", 279 | "metadata": {}, 280 | "source": [ 281 | "## a sentinel for execution" 282 | ] 283 | }, 284 | { 285 | "cell_type": "markdown", 286 | "metadata": {}, 287 | "source": [ 288 | "the `slug` below is used to measure that a module has been executed,\n", 289 | "we specifically use this expression to measure the lazy importing system." 290 | ] 291 | }, 292 | { 293 | "cell_type": "code", 294 | "execution_count": 1, 295 | "metadata": {}, 296 | "outputs": [ 297 | { 298 | "name": "stdout", 299 | "output_type": "stream", 300 | "text": [ 301 | "i was printed from and my name is __main__\n" 302 | ] 303 | } 304 | ], 305 | "source": [ 306 | "from importnb import get_ipython\n", 307 | "from pathlib import Path\n", 308 | "where = \"\"\n", 309 | "if \"__file__\" in locals():\n", 310 | " where = Path(__file__).as_posix()\n", 311 | "\n", 312 | "slug = \"i was printed from {where} \\\n", 313 | "and my name is {__name__}\"\n", 314 | "print(slug.format(**locals()))" 315 | ] 316 | }, 317 | { 318 | "cell_type": "markdown", 319 | "metadata": {}, 320 | "source": [ 321 | "## implicit markdown docstrings" 322 | ] 323 | }, 324 | { 325 | "cell_type": "markdown", 326 | "metadata": {}, 327 | "source": [ 328 | "there is a strict separation of code and non-code in notebooks.\n", 329 | "to encourage more/better documentation `importnb` will use a markdown\n", 330 | "cell preceeding a function as a docstring. \n", 331 | "as a result, the `function_with_a_markdown` docstring will have this markdown cell for a value." 332 | ] 333 | }, 334 | { 335 | "cell_type": "code", 336 | "execution_count": 2, 337 | "metadata": {}, 338 | "outputs": [], 339 | "source": [ 340 | "def function_with_a_markdown_docstring():\n", 341 | " return # function_with_a_markdown has a docstring defined by the preceeding markdown cell" 342 | ] 343 | }, 344 | { 345 | "cell_type": "markdown", 346 | "metadata": {}, 347 | "source": [ 348 | "the same convention holds for classes and async functions." 349 | ] 350 | }, 351 | { 352 | "cell_type": "code", 353 | "execution_count": 3, 354 | "metadata": {}, 355 | "outputs": [], 356 | "source": [ 357 | "class class_with_a_markdown_docstring:\n", 358 | " ... # my docstring is the cell above. " 359 | ] 360 | }, 361 | { 362 | "cell_type": "markdown", 363 | "metadata": {}, 364 | "source": [ 365 | "this is not a docstring for `class_with_a_string` because it defines its own." 366 | ] 367 | }, 368 | { 369 | "cell_type": "code", 370 | "execution_count": 4, 371 | "metadata": {}, 372 | "outputs": [], 373 | "source": [ 374 | "class class_with_a_python_docstring:\n", 375 | " \"\"\"when a class defines its own docstring the preceeding cell is ignored.\"\"\"" 376 | ] 377 | }, 378 | { 379 | "cell_type": "markdown", 380 | "metadata": {}, 381 | "source": [ 382 | "## cell magics" 383 | ] 384 | }, 385 | { 386 | "cell_type": "code", 387 | "execution_count": 5, 388 | "metadata": {}, 389 | "outputs": [ 390 | { 391 | "name": "stdout", 392 | "output_type": "stream", 393 | "text": [ 394 | "i'm only show when cell magics are active.\n" 395 | ] 396 | } 397 | ], 398 | "source": [ 399 | "%%python\n", 400 | "print(\"i'm only show when cell magics are active.\")" 401 | ] 402 | }, 403 | { 404 | "cell_type": "code", 405 | "execution_count": 6, 406 | "metadata": {}, 407 | "outputs": [], 408 | "source": [ 409 | "if get_ipython():\n", 410 | " magic_slug = \"i'm only show when cell magics are active.\"\n", 411 | " if __import__('sys').platform == \"win32\": magic_slug += \"\\n\"\n", 412 | "else:\n", 413 | " magic_slug = f\"this was printed from the module named {__name__}\"\n", 414 | " print(magic_slug)" 415 | ] 416 | }, 417 | { 418 | "cell_type": "markdown", 419 | "metadata": {}, 420 | "source": [ 421 | "## notebooks as scripts" 422 | ] 423 | }, 424 | { 425 | "cell_type": "markdown", 426 | "metadata": {}, 427 | "source": [ 428 | "the main block is a python convention we can apply in notebooks imported by importnb." 429 | ] 430 | }, 431 | { 432 | "cell_type": "code", 433 | "execution_count": 7, 434 | "metadata": {}, 435 | "outputs": [], 436 | "source": [ 437 | "def get_parser():\n", 438 | " from argparse import ArgumentParser, REMAINDER\n", 439 | " parser = ArgumentParser(\"test_parser\")\n", 440 | " parser.add_argument(\"--\", nargs=REMAINDER, dest=\"args\")\n", 441 | " return parser" 442 | ] 443 | }, 444 | { 445 | "cell_type": "code", 446 | "execution_count": 8, 447 | "metadata": {}, 448 | "outputs": [], 449 | "source": [ 450 | "def main(argv=None):\n", 451 | " parser = get_parser()\n", 452 | " print(\"the parser namespace is\", parser.parse_args(argv))" 453 | ] 454 | }, 455 | { 456 | "cell_type": "markdown", 457 | "metadata": {}, 458 | "source": [ 459 | "### notebooks as `doit` tasks\n", 460 | "\n", 461 | "[`doit`](https://pydoit.org/) is powerful alternative to makefiles for running development tasks.\n", 462 | "the `importnb` command line provides support for `doit` conventions, but does not provide the dependency;\n", 463 | "you the `doit` are responsible for that." 464 | ] 465 | }, 466 | { 467 | "cell_type": "markdown", 468 | "metadata": {}, 469 | "source": [ 470 | "this the docstring for the `echo` task that echos hello." 471 | ] 472 | }, 473 | { 474 | "cell_type": "code", 475 | "execution_count": 9, 476 | "metadata": {}, 477 | "outputs": [], 478 | "source": [ 479 | "def task_echo():\n", 480 | " return dict(actions=[\"echo hello\"])" 481 | ] 482 | }, 483 | { 484 | "cell_type": "code", 485 | "execution_count": 10, 486 | "metadata": {}, 487 | "outputs": [], 488 | "source": [ 489 | "import sys\n", 490 | "if __name__ == \"__main__\":\n", 491 | " if \"__file__\" in locals():\n", 492 | " # run this notebook like it is a cli\n", 493 | " if \"pytest\" not in sys.modules:\n", 494 | " if \"doit\" not in sys.modules:\n", 495 | " main(sys.argv[1:])" 496 | ] 497 | }, 498 | { 499 | "cell_type": "markdown", 500 | "metadata": {}, 501 | "source": [ 502 | "## data loaders\n", 503 | "\n", 504 | "data loaders can import other file formats. we can hide loading logic underneath `import` statements." 505 | ] 506 | }, 507 | { 508 | "cell_type": "markdown", 509 | "metadata": {}, 510 | "source": [ 511 | "if get_ipython() and not where:\n", 512 | " from importnb import loaders\n", 513 | " display(loaders.Json.load_file(\"Untitled42.ipynb\"))" 514 | ] 515 | } 516 | ], 517 | "metadata": { 518 | "kernelspec": { 519 | "display_name": "Python [conda env:root] *", 520 | "language": "python", 521 | "name": "conda-root-py" 522 | }, 523 | "language_info": { 524 | "codemirror_mode": { 525 | "name": "ipython", 526 | "version": 3 527 | }, 528 | "file_extension": ".py", 529 | "mimetype": "text/x-python", 530 | "name": "python", 531 | "nbconvert_exporter": "python", 532 | "pygments_lexer": "ipython3", 533 | "version": "3.9.13" 534 | }, 535 | "vscode": { 536 | "interpreter": { 537 | "hash": "6624ee388a1c346f3d0811b591fe9e170807496b8a5fea1a5f5986a819dc2334" 538 | } 539 | } 540 | }, 541 | "nbformat": 4, 542 | "nbformat_minor": 4 543 | }, 544 | "text/plain": [ 545 | "" 546 | ] 547 | }, 548 | "metadata": { 549 | "application/json": { 550 | "expanded": false, 551 | "root": "" 552 | } 553 | }, 554 | "output_type": "display_data" 555 | } 556 | ], 557 | "source": [ 558 | "if get_ipython() and not where:\n", 559 | " from importnb import loaders\n", 560 | "\n", 561 | " display(loaders.Json.load_file(\"Untitled42.ipynb\"))" 562 | ] 563 | }, 564 | { 565 | "cell_type": "code", 566 | "execution_count": null, 567 | "metadata": {}, 568 | "outputs": [], 569 | "source": [] 570 | } 571 | ], 572 | "metadata": { 573 | "kernelspec": { 574 | "display_name": "Python [conda env:root] *", 575 | "language": "python", 576 | "name": "conda-root-py" 577 | }, 578 | "language_info": { 579 | "codemirror_mode": { 580 | "name": "ipython", 581 | "version": 3 582 | }, 583 | "file_extension": ".py", 584 | "mimetype": "text/x-python", 585 | "name": "python", 586 | "nbconvert_exporter": "python", 587 | "pygments_lexer": "ipython3", 588 | "version": "3.9.13" 589 | }, 590 | "vscode": { 591 | "interpreter": { 592 | "hash": "6624ee388a1c346f3d0811b591fe9e170807496b8a5fea1a5f5986a819dc2334" 593 | } 594 | } 595 | }, 596 | "nbformat": 4, 597 | "nbformat_minor": 4 598 | } 599 | -------------------------------------------------------------------------------- /src/importnb/loader.py: -------------------------------------------------------------------------------- 1 | """# `loader` 2 | 3 | the loading machinery for notebooks style documents, and less. 4 | notebooks combine code, markdown, and raw cells to create a complete document. 5 | the importnb loader provides an interface for transforming these objects to valid python. 6 | """ 7 | 8 | 9 | import ast 10 | import inspect 11 | import re 12 | import shlex 13 | import sys 14 | import textwrap 15 | from contextlib import contextmanager 16 | from dataclasses import asdict, dataclass, field 17 | from functools import partial 18 | from importlib import _bootstrap as bootstrap 19 | from importlib import reload 20 | from importlib._bootstrap import _init_module_attrs, _requires_builtin 21 | from importlib._bootstrap_external import FileFinder, decode_source 22 | from importlib.machinery import SourceFileLoader 23 | from importlib.util import LazyLoader, find_spec 24 | from pathlib import Path 25 | from types import ModuleType 26 | 27 | from . import get_ipython 28 | from .decoder import LineCacheNotebookDecoder, quote 29 | from .docstrings import update_docstring 30 | from .finder import FileModuleSpec, FuzzyFinder, get_loader_details, get_loader_index 31 | 32 | __all__ = "Notebook", "reload" 33 | 34 | VERSION = sys.version_info.major, sys.version_info.minor 35 | 36 | MAGIC = re.compile(r"^\s*%{2}", re.MULTILINE) 37 | ALLOW_TOP_LEVEL_AWAIT = getattr(ast, "PyCF_ALLOW_TOP_LEVEL_AWAIT", 0x0) 38 | 39 | 40 | def _get_co_flags_set(co_flags): 41 | """Return a deconstructed set of code flags from a code object.""" 42 | flags = set() 43 | for i in range(12): 44 | flag = 1 << i 45 | if co_flags & flag: 46 | flags.add(flag) 47 | co_flags ^= flag 48 | if not co_flags: 49 | break 50 | else: 51 | flags.intersection_update(flags) 52 | return flags 53 | 54 | 55 | class SourceModule(ModuleType): 56 | def __fspath__(self): 57 | return self.__file__ 58 | 59 | 60 | @dataclass 61 | class Interface: 62 | """a configuration python importing interface""" 63 | 64 | name: str = None 65 | path: str = None 66 | lazy: bool = False 67 | extensions: tuple = field(default_factory=[".ipy", ".ipynb"].copy) 68 | include_fuzzy_finder: bool = True 69 | include_markdown_docstring: bool = True 70 | include_non_defs: bool = True 71 | include_await: bool = True 72 | module_type: ModuleType = field(default=SourceModule) 73 | no_magic: bool = False 74 | 75 | _loader_hook_position: int = field(default=0, repr=False) 76 | 77 | def __new__(cls, name=None, path=None, **kwargs): 78 | kwargs.update(name=name, path=path) 79 | self = super().__new__(cls) 80 | self.__init__(**kwargs) 81 | return self 82 | 83 | 84 | class Loader(Interface, SourceFileLoader): 85 | """The simplest implementation of a Notebook Source File Loader. 86 | This class breaks down the loading process into finer steps. 87 | """ 88 | 89 | extensions: tuple = field(default_factory=[".py"].copy) 90 | 91 | @property 92 | def loader(self): 93 | """Generate a new loader based on the state of an existing loader.""" 94 | loader = type(self) 95 | if self.lazy: 96 | loader = LazyLoader.factory(loader) 97 | # Strip the leading underscore from slots 98 | params = asdict(self) 99 | params.pop("name") 100 | params.pop("path") 101 | return partial(loader, **params) 102 | 103 | @property 104 | def finder(self): 105 | """Generate a new finder based on the state of an existing loader""" 106 | return self.include_fuzzy_finder and FuzzyFinder or FileFinder 107 | 108 | def raw_to_source(self, source): 109 | """Transform a string from a raw file to python source.""" 110 | if self.path and self.path.endswith(".ipynb"): 111 | # when we encounter notebooks we apply different transformers to the diff cell types 112 | return LineCacheNotebookDecoder( 113 | code=self.code, 114 | raw=self.raw, 115 | markdown=self.markdown, 116 | ).decode(source, self.path) 117 | 118 | # for a normal file we just apply the code transformer. 119 | return self.code(source) 120 | 121 | def source_to_nodes(self, source, path="", *, _optimize=-1): 122 | """Parse source string as python ast""" 123 | flags = ast.PyCF_ONLY_AST 124 | return bootstrap._call_with_frames_removed( 125 | compile, 126 | source, 127 | path, 128 | "exec", 129 | flags=flags, 130 | dont_inherit=True, 131 | optimize=_optimize, 132 | ) 133 | 134 | def nodes_to_code(self, nodes, path="", *, _optimize=-1): 135 | """Compile ast nodes to python code object""" 136 | flags = ALLOW_TOP_LEVEL_AWAIT 137 | return bootstrap._call_with_frames_removed( 138 | compile, 139 | nodes, 140 | path, 141 | "exec", 142 | flags=flags, 143 | dont_inherit=True, 144 | optimize=_optimize, 145 | ) 146 | 147 | def source_to_code(self, source, path="", *, _optimize=-1): 148 | """Tangle python source to compiled code by: 149 | 1. parsing the source as ast nodes 150 | 2. compiling the ast nodes as python code 151 | """ 152 | nodes = self.source_to_nodes(source, path, _optimize=_optimize) 153 | return self.nodes_to_code(nodes, path, _optimize=_optimize) 154 | 155 | def get_data(self, path): 156 | """get_data injects an input transformation before the raw text. 157 | 158 | this method allows notebook json to be transformed line for line into vertically sparse python code. 159 | """ 160 | return self.raw_to_source(decode_source(super().get_data(self.path))) 161 | 162 | def create_module(self, spec): 163 | """An overloaded create_module method injecting fuzzy finder setup up logic.""" 164 | module = self.module_type(str(spec.name)) 165 | _init_module_attrs(spec, module) 166 | if self.name: 167 | module.__name__ = self.name 168 | 169 | if module.__file__.endswith((".ipynb", ".ipy")): 170 | module.get_ipython = get_ipython 171 | 172 | if getattr(spec, "alias", None): 173 | # put a fuzzy spec on the modules to avoid re importing it. 174 | # there is a funky trick you do with the fuzzy finder where you 175 | # load multiple versions with different finders. 176 | 177 | sys.modules[spec.alias] = module 178 | 179 | return module 180 | 181 | def exec_module(self, module): 182 | """Execute the module.""" 183 | # importlib uses module.__name__, but when running modules as __main__ name will change. 184 | # this approach uses the original name on the spec. 185 | try: 186 | code = self.get_code(module.__spec__.name) 187 | 188 | # from importlib 189 | if code is None: 190 | raise ImportError( 191 | f"cannot load module {module.__name__!r} when " "get_code() returns None", 192 | ) 193 | 194 | if inspect.CO_COROUTINE not in _get_co_flags_set(code.co_flags): 195 | # if there isn't any async non sense then we proceed with convention. 196 | bootstrap._call_with_frames_removed(exec, code, module.__dict__) 197 | else: 198 | self.aexec_module_sync(module) 199 | 200 | except BaseException as e: 201 | alias = getattr(module.__spec__, "alias", None) 202 | if alias: 203 | sys.modules.pop(alias, None) 204 | 205 | raise e 206 | 207 | def aexec_module_sync(self, module): 208 | if "anyio" in sys.modules: 209 | __import__("anyio").run(self.aexec_module, module) 210 | else: 211 | from asyncio import get_event_loop 212 | 213 | get_event_loop().run_until_complete(self.aexec_module(module)) 214 | 215 | async def aexec_module(self, module): 216 | """An async exec_module method permitting top-level await.""" 217 | # there is so redudancy in this approach, but it starts getting asynchier. 218 | nodes = self.source_to_nodes(self.get_data(self.path)) 219 | 220 | # iterate through the nodes and compile individual statements 221 | for node in nodes.body: 222 | co = bootstrap._call_with_frames_removed( 223 | compile, 224 | ast.Module([node], []), 225 | module.__file__, 226 | "exec", 227 | flags=ALLOW_TOP_LEVEL_AWAIT, 228 | ) 229 | if inspect.CO_COROUTINE in _get_co_flags_set(co.co_flags): 230 | # when something async is encountered we compile it with the single flag 231 | # this lets us use eval to retreive our coroutine. 232 | co = bootstrap._call_with_frames_removed( 233 | compile, 234 | ast.Interactive([node]), 235 | module.__file__, 236 | "single", 237 | flags=ALLOW_TOP_LEVEL_AWAIT, 238 | ) 239 | await bootstrap._call_with_frames_removed( 240 | eval, 241 | co, 242 | module.__dict__, 243 | module.__dict__, 244 | ) 245 | else: 246 | bootstrap._call_with_frames_removed(exec, co, module.__dict__, module.__dict__) 247 | 248 | def code(self, str): 249 | return dedent(str) 250 | 251 | @classmethod 252 | @_requires_builtin 253 | def is_package(cls, fullname): 254 | """Return False as built-in modules are never packages.""" 255 | if "." not in fullname: 256 | return True 257 | return super().is_package(fullname) 258 | 259 | def __enter__(self): 260 | path_id, loader_id, details = get_loader_index(".py") 261 | for _, e in details: 262 | if all(map(e.__contains__, self.extensions)): 263 | self._loader_hook_position = None 264 | return self 265 | else: 266 | self._loader_hook_position = loader_id + 1 267 | details.insert(self._loader_hook_position, (self.loader, self.extensions)) 268 | sys.path_hooks[path_id] = self.finder.path_hook(*details) 269 | sys.path_importer_cache.clear() 270 | return self 271 | 272 | def __exit__(self, *excepts): 273 | if self._loader_hook_position is not None: 274 | path_id, details = get_loader_details() 275 | details.pop(self._loader_hook_position) 276 | sys.path_hooks[path_id] = self.finder.path_hook(*details) 277 | sys.path_importer_cache.clear() 278 | 279 | @classmethod 280 | def load_file(cls, filename, main=True, **kwargs): 281 | """Import a notebook as a module from a filename. 282 | 283 | dir: The directory to load the file from. 284 | main: Load the module in the __main__ context. 285 | 286 | >>> assert Notebook.load_file('foo.ipynb') 287 | """ 288 | name = main and "__main__" or filename 289 | loader = cls(name, str(filename), **kwargs) 290 | spec = FileModuleSpec(name, loader, origin=loader.path) 291 | module = loader.create_module(spec) 292 | loader.exec_module(module) 293 | return module 294 | 295 | @classmethod 296 | def load_module(cls, module, main=False, **kwargs): 297 | """Import a notebook as a module. 298 | 299 | main: Load the module in the __main__ context. 300 | 301 | >>> assert Notebook.load_module('foo') 302 | """ 303 | with cls() as loader: 304 | spec = find_spec(module) 305 | module = spec.loader.create_module(spec) 306 | if main: 307 | sys.modules["__main__"] = module 308 | module.__name__ = "__main__" 309 | spec.loader.exec_module(module) 310 | return module 311 | 312 | @classmethod 313 | def load_argv(cls, argv=None, *, parser=None): 314 | """Load a module based on python arguments 315 | 316 | load a notebook from its file name 317 | >>> Notebook.load_argv("foo.ipynb --arg abc") 318 | 319 | load the same notebook from a module alias. 320 | >>> Notebook.load_argv("-m foo --arg abc") 321 | """ 322 | if parser is None: 323 | parser = cls.get_argparser() 324 | 325 | if argv is None: 326 | from sys import argv 327 | 328 | argv = argv[1:] 329 | 330 | if isinstance(argv, str): 331 | argv = shlex.split(argv) 332 | 333 | parsed_args = parser.parse_args(argv) 334 | module = cls.load_ns(parsed_args) 335 | if module is None: 336 | return parser.print_help() 337 | 338 | return module 339 | 340 | @classmethod 341 | def load_ns(cls, ns): 342 | """Load a module from a namespace, used when loading module from sys.argv parameters.""" 343 | if ns.tasks: 344 | # i don't quite why we need to do this here, but we do. so don't move it 345 | from doit.cmd_base import ModuleTaskLoader 346 | from doit.doit_cmd import DoitMain 347 | 348 | if ns.code: 349 | with main_argv(sys.argv[0], ns.args): 350 | result = cls.load_code(ns.code) 351 | elif ns.module: 352 | if ns.dir: 353 | if ns.dir not in sys.path: 354 | sys.path.insert(0, ns.dir) 355 | elif "" in sys.path: 356 | pass 357 | else: 358 | sys.path.insert(0, "") 359 | with main_argv(ns.module, ns.args): 360 | result = cls.load_module(ns.module, main=True) 361 | elif ns.file: 362 | where = Path(ns.dir, ns.file) if ns.dir else Path(ns.file) 363 | with main_argv(str(where), ns.args): 364 | result = cls.load_file(ns.file) 365 | else: 366 | return None 367 | if ns.tasks: 368 | DoitMain(ModuleTaskLoader(result)).run(ns.args or ["help"]) 369 | return result 370 | 371 | @classmethod 372 | def load_code(cls, code, argv=None, mod_name=None, script_name=None, main=False): 373 | """Load a module from raw source code""" 374 | from runpy import _run_module_code 375 | 376 | self = cls() 377 | name = main and "__main__" or mod_name or "" 378 | 379 | return _dict_module( 380 | _run_module_code(self.raw_to_source(code), mod_name=name, script_name=script_name), 381 | ) 382 | 383 | @staticmethod 384 | def get_argparser(parser=None): 385 | from argparse import REMAINDER, ArgumentParser 386 | 387 | from importnb import __version__ 388 | 389 | if parser is None: 390 | parser = ArgumentParser("importnb", description="run notebooks as python code") 391 | parser.add_argument("file", nargs="?", help="run a file") 392 | parser.add_argument("args", nargs=REMAINDER, help="arguments to pass to script") 393 | parser.add_argument("-m", "--module", help="run a module") 394 | parser.add_argument("-c", "--code", help="run raw code") 395 | parser.add_argument("-d", "--dir", help="path to run script in") 396 | parser.add_argument("-t", "--tasks", action="store_true", help="run doit tasks") 397 | parser.add_argument( 398 | "--version", action="version", version=__version__, help="display the importnb version" 399 | ) 400 | return parser 401 | 402 | 403 | def comment(str): 404 | return textwrap.indent(str, "# ") 405 | 406 | 407 | class DefsOnly(ast.NodeTransformer): 408 | INCLUDE = ast.Import, ast.ImportFrom, ast.ClassDef, ast.FunctionDef, ast.AsyncFunctionDef 409 | 410 | def visit_Module(self, node): 411 | args = ([x for x in node.body if isinstance(x, self.INCLUDE)],) 412 | if VERSION >= (3, 8): 413 | args += (node.type_ignores,) 414 | return ast.Module(*args) 415 | 416 | 417 | class Notebook(Loader): 418 | """Notebook is a user friendly file finder and module loader for notebook source code. 419 | 420 | > Remember, restart and run all or it didn't happen. 421 | 422 | Notebook provides several useful options. 423 | 424 | * Lazy module loading. A module is executed the first time it is used in a script. 425 | """ 426 | 427 | def markdown(self, str): 428 | return quote(str) 429 | 430 | def raw(self, str): 431 | return comment(str) 432 | 433 | def visit(self, nodes): 434 | if self.include_non_defs: 435 | return nodes 436 | return DefsOnly().visit(nodes) 437 | 438 | def code(self, str): 439 | if self.no_magic: 440 | if MAGIC.match(str): 441 | return comment(str) 442 | return super().code(str) 443 | 444 | def source_to_nodes(self, source, path="", *, _optimize=-1): 445 | nodes = super().source_to_nodes(source, path) 446 | if self.include_markdown_docstring: 447 | nodes = update_docstring(nodes) 448 | nodes = self.visit(nodes) 449 | return ast.fix_missing_locations(nodes) 450 | 451 | def raw_to_source(self, source): 452 | """Transform a string from a raw file to python source.""" 453 | if self.path and self.path.endswith(".ipynb"): 454 | # when we encounter notebooks we apply different transformers to the diff cell types 455 | return LineCacheNotebookDecoder( 456 | code=self.code, 457 | raw=self.raw, 458 | markdown=self.markdown, 459 | ).decode(source, self.path) 460 | 461 | # for a normal file we just apply the code transformer. 462 | return self.code(source) 463 | 464 | 465 | def _dict_module(ns): 466 | m = ModuleType(ns.get("__name__"), ns.get("__doc__")) 467 | m.__dict__.update(ns) 468 | return m 469 | 470 | 471 | @contextmanager 472 | def main_argv(prog, args=None): 473 | if args is not None: 474 | args = [prog] + list(args) 475 | prior, sys.argv = sys.argv, args 476 | yield 477 | if args is not None: 478 | sys.argv = prior 479 | 480 | 481 | try: 482 | import IPython 483 | from IPython.core.inputsplitter import IPythonInputSplitter 484 | 485 | dedent = IPythonInputSplitter( 486 | line_input_checker=False, 487 | physical_line_transforms=[ 488 | IPython.core.inputsplitter.leading_indent(), 489 | IPython.core.inputsplitter.ipy_prompt(), 490 | IPython.core.inputsplitter.cellmagic(end_on_blank_line=False), 491 | ], 492 | ).transform_cell 493 | except ModuleNotFoundError: 494 | 495 | def dedent(body): 496 | from textwrap import dedent, indent 497 | 498 | if MAGIC.match(body): 499 | return indent(body, "# ") 500 | return dedent(body) 501 | --------------------------------------------------------------------------------