├── .bumpversion.cfg ├── .eslintrc.js ├── .github └── workflows │ ├── binder-on-pr.yml │ ├── build.yml │ └── python.yml ├── .gitignore ├── .lintstagedrc.js ├── .prettierignore ├── .prettierrc ├── LICENSE ├── MANIFEST.in ├── README.md ├── binder ├── environment.yml └── postBuild ├── docs └── internal │ └── black_with_precommit │ ├── .pre-commit-config.yaml │ └── README.md ├── example ├── cutnpaste.ipynb ├── nested.hdf5 ├── nested_data_ext.hdf5 ├── nested_int.hdf5 ├── nested_int_data_ext.hdf5 ├── non_simple_entities.h5 ├── snippet.ipynb └── tutorial_animation.gif ├── jupyter-config └── jupyter_notebook_config.d │ └── jupyterlab_hdf.json ├── jupyterlab_hdf ├── __init__.py ├── _version.py ├── api │ └── api.yaml ├── attrs.py ├── baseHandler.py ├── config.py ├── contents.py ├── data.py ├── exception.py ├── meta.py ├── responses.py ├── snippet.py ├── tests │ ├── test_attrs.py │ ├── test_contents.py │ ├── test_data.py │ ├── test_meta.py │ ├── test_meta_links.py │ └── utils.py └── util.py ├── package.json ├── pyproject.toml ├── release.py ├── scratch ├── dataset.ai ├── genNested.py ├── nested-contents.ipynb ├── nested-dataset.ipynb └── nested-meta.ipynb ├── setup.cfg ├── setup.py ├── setupbase.py ├── src ├── AttributeViewer.tsx ├── browser.ts ├── complex.ts ├── contents.ts ├── dataregistry.ts ├── dataset.ts ├── exception.tsx ├── hdf.ts ├── index.ts ├── slice.ts ├── toolbar.tsx └── utils.ts ├── style ├── AttributeViewer.css ├── bad.svg ├── dataset.svg ├── hdf.svg ├── hdf_large.png └── index.css ├── tsconfig.eslint.json ├── tsconfig.json ├── tsconfigbase.json └── yarn.lock /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 1.3.0 3 | commit = True 4 | message = Bump master to {new_version} 5 | 6 | [bumpversion:file:package.json] 7 | 8 | [bumpversion:file:jupyterlab_hdf/_version.py] 9 | 10 | [bumpversion:file:jupyterlab_hdf/api/api.yaml] 11 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | extends: [ 3 | 'eslint:recommended', 4 | 'plugin:@typescript-eslint/eslint-recommended', 5 | 'plugin:@typescript-eslint/recommended', 6 | // 'plugin:react/recommended', 7 | // 'plugin:jest/recommended', 8 | 'prettier', 9 | 'prettier/@typescript-eslint', 10 | ], 11 | ignorePatterns: [ 12 | '**/.autoversion', 13 | '**/*.d.ts', 14 | '**/.github', 15 | '**/.history', 16 | '**/dist', 17 | '**/lib', 18 | '**/node_modules', 19 | ], 20 | parser: '@typescript-eslint/parser', 21 | parserOptions: { 22 | project: 'tsconfig.eslint.json', 23 | sourceType: 'module', 24 | }, 25 | plugins: [ 26 | '@typescript-eslint', 27 | // 'jest', 28 | ], 29 | rules: { 30 | '@typescript-eslint/no-floating-promises': ['warn', { ignoreVoid: true }], 31 | '@typescript-eslint/naming-convention': [ 32 | 'error', 33 | { 34 | selector: 'interface', 35 | format: ['PascalCase'], 36 | custom: { 37 | regex: '^I[A-Z]', 38 | match: true, 39 | }, 40 | }, 41 | ], 42 | '@typescript-eslint/no-unused-vars': ['warn', { args: 'none' }], 43 | '@typescript-eslint/no-use-before-define': 'off', 44 | '@typescript-eslint/camelcase': 'off', 45 | '@typescript-eslint/no-explicit-any': 'off', 46 | '@typescript-eslint/no-non-null-assertion': 'off', 47 | '@typescript-eslint/no-namespace': 'off', 48 | '@typescript-eslint/interface-name-prefix': 'off', 49 | '@typescript-eslint/explicit-function-return-type': 'off', 50 | '@typescript-eslint/ban-ts-comment': ['warn', { 'ts-ignore': true }], 51 | '@typescript-eslint/ban-types': 'warn', 52 | '@typescript-eslint/no-non-null-asserted-optional-chain': 'warn', 53 | '@typescript-eslint/no-var-requires': 'off', 54 | '@typescript-eslint/no-empty-interface': 'off', 55 | '@typescript-eslint/triple-slash-reference': 'warn', 56 | '@typescript-eslint/no-inferrable-types': 'off', 57 | // 'jest/no-conditional-expect': 'warn', 58 | // 'jest/valid-title': 'warn', 59 | 'no-inner-declarations': 'off', 60 | 'no-prototype-builtins': 'off', 61 | 'no-control-regex': 'warn', 62 | 'no-undef': 'warn', 63 | 'no-case-declarations': 'warn', 64 | 'no-useless-escape': 'off', 65 | 'prefer-const': 'off', 66 | // 'react/prop-types': 'warn' 67 | }, 68 | env: { 69 | browser: true, 70 | node: true, 71 | }, 72 | // settings: { 73 | // react: { 74 | // version: 'detect' 75 | // } 76 | // } 77 | }; 78 | -------------------------------------------------------------------------------- /.github/workflows/binder-on-pr.yml: -------------------------------------------------------------------------------- 1 | # Reference https://mybinder.readthedocs.io/en/latest/howto/gh-actions-badges.html 2 | name: Binder Badge 3 | on: 4 | pull_request_target: 5 | types: [opened] 6 | 7 | permissions: 8 | pull-requests: write 9 | 10 | jobs: 11 | binder: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: comment on PR with Binder link 15 | uses: actions/github-script@v1 16 | with: 17 | github-token: ${{secrets.GITHUB_TOKEN}} 18 | script: | 19 | var PR_HEAD_USERREPO = process.env.PR_HEAD_USERREPO; 20 | var PR_HEAD_REF = process.env.PR_HEAD_REF; 21 | github.issues.createComment({ 22 | issue_number: context.issue.number, 23 | owner: context.repo.owner, 24 | repo: context.repo.repo, 25 | body: `[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/${PR_HEAD_USERREPO}/${PR_HEAD_REF}?urlpath=lab) :point_left: Launch a binder notebook on branch _${PR_HEAD_USERREPO}/${PR_HEAD_REF}_` 26 | }) 27 | env: 28 | PR_HEAD_REF: ${{ github.event.pull_request.head.ref }} 29 | PR_HEAD_USERREPO: ${{ github.event.pull_request.head.repo.full_name }} 30 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | 3 | on: 4 | push: 5 | branches: main 6 | pull_request: 7 | branches: '*' 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | strategy: 13 | matrix: 14 | python: [3.7, 3.8, 3.9] 15 | fail-fast: false 16 | steps: 17 | - name: Checkout 18 | uses: actions/checkout@v2 19 | 20 | - name: Install node 21 | uses: actions/setup-node@v2 22 | with: 23 | node-version: '16.x' 24 | 25 | - name: Install Python 26 | uses: actions/setup-python@v2 27 | with: 28 | python-version: ${{ matrix.python }} 29 | 30 | - name: Install dependencies 31 | run: python -m pip install jupyterlab~=3.0 32 | 33 | - name: Build the extension 34 | run: | 35 | jlpm 36 | jlpm build 37 | pip install . 38 | jupyter labextension install 39 | jupyter lab build 40 | jupyter serverextension list 1>serverextensions 2>&1 41 | cat serverextensions | grep "jupyterlab_hdf.*OK" 42 | jupyter labextension list 1>labextensions 2>&1 43 | cat labextensions | grep "@jupyterlab/hdf5.*OK" 44 | python -m jupyterlab.browser_check 45 | -------------------------------------------------------------------------------- /.github/workflows/python.yml: -------------------------------------------------------------------------------- 1 | name: pytest 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | pytest: 7 | name: pytest jupyterlab_hdf 8 | strategy: 9 | matrix: 10 | python: [3.7, 3.8, 3.9] 11 | fail-fast: false 12 | timeout-minutes: 120 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v2 16 | - name: Set up Python 17 | uses: actions/setup-python@v1 18 | with: 19 | python-version: ${{ matrix.python }} 20 | 21 | - name: Cache pip on Linux 22 | uses: actions/cache@v2 23 | if: startsWith(runner.os, 'Linux') 24 | with: 25 | path: ~/.cache/pip 26 | key: ${{ runner.os }}-pip-${{ matrix.python }}-${{ hashFiles('**/requirements.txt', 'setup.py') }} 27 | restore-keys: | 28 | ${{ runner.os }}-pip-${{ matrix.python }} 29 | 30 | - name: Install dependencies 31 | run: | 32 | python -m pip install --upgrade pip 33 | python -m pip install -e .[dev] 34 | 35 | - name: Test with pytest 36 | run: | 37 | python -m pytest jupyterlab_hdf 38 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .autoversion 2 | lib/ 3 | node_modules/ 4 | 5 | # build/test cruft 6 | *.bundle.* 7 | .cache 8 | *.py[co] 9 | .pytest_cache 10 | __pycache__ 11 | *.egg-info 12 | *.tsbuildinfo 13 | 14 | # python distribution stuff 15 | build 16 | dist 17 | MANIFEST 18 | 19 | # scratch cruft 20 | /scratch/*.cutie 21 | /scratch/*.h5 22 | /scratch/*.hdf5 23 | /scratch/tutorial 24 | /scratch/Untitled* 25 | 26 | # temp/system files 27 | *~ 28 | *.bak 29 | .ipynb_checkpoints 30 | .DS_Store 31 | \#*# 32 | .#* 33 | 34 | # jetbrains IDE stuff 35 | *.iml 36 | .idea/ 37 | 38 | # ms IDE stuff 39 | *.code-workspace 40 | .history 41 | .vscode 42 | -------------------------------------------------------------------------------- /.lintstagedrc.js: -------------------------------------------------------------------------------- 1 | const escape = require('shell-quote').quote; 2 | const fs = require('fs'); 3 | const isWin = process.platform === 'win32'; 4 | 5 | const escapeFileNames = filenames => 6 | filenames 7 | .filter(filename => fs.existsSync(filename)) 8 | .map(filename => `"${isWin ? filename : escape([filename])}"`) 9 | .join(' '); 10 | 11 | module.exports = { 12 | '**/*{.css,.json,.md}': filenames => { 13 | const escapedFileNames = escapeFileNames(filenames); 14 | return [`prettier --write ${escapedFileNames}`]; 15 | }, 16 | '**/*{.ts,.tsx,.js,.jsx}': filenames => { 17 | const escapedFileNames = escapeFileNames(filenames); 18 | return [ 19 | `prettier --write ${escapedFileNames}`, 20 | `eslint --fix ${escapedFileNames}`, 21 | ]; 22 | }, 23 | 'jupyterlab_hdf/**/*.py': filenames => { 24 | const escapedFileNames = escapeFileNames(filenames); 25 | return [`black ${escapedFileNames}`]; 26 | }, 27 | 'setup.py': filenames => { 28 | const escapedFileNames = escapeFileNames(filenames); 29 | return [`black ${escapedFileNames}`]; 30 | }, 31 | }; 32 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | **/.autoversion 2 | **/.github 3 | **/.history 4 | **/.pytest_cache 5 | **/.vscode 6 | **/build 7 | **/dist 8 | **/lib 9 | **/node_modules 10 | **/package.json 11 | **/static 12 | tests/**/coverage 13 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true, 3 | "arrowParens": "avoid" 4 | } 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2019 Project Jupyter Contributors 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include README.* 3 | include *.md 4 | include package.json 5 | include setupbase.py 6 | 7 | exclude scratch 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![PyPI version][pypi-badge]][pypi] 2 | [![npm_version][npm-badge]][npm] 3 | 4 | [interactive api docs][redoc] 5 | 6 | # jupyterlab-hdf5 7 | 8 | ## Important Notice 9 | 10 | [jupyterlab-hdf5](.) is no longer actively maintained, and will not work with JupyterLab 4 or later. [jupyterlab-h5web](https://github.com/silx-kit/jupyterlab-h5web) is the recommended replacement. 11 | 12 | ## Overview 13 | 14 | Open and explore HDF5 files in JupyterLab. Can handle very large (TB) sized files. New in release v0.5.0, jlab-hdf5 can now open datasets of any dimensionality, from 0 to 32. Any 0D, 1D, or 2D slab of any dataset can easily be selected and displayed using numpy-style index syntax. 15 | 16 | ![hdf_preview][tutorial_animation] 17 | 18 | Double clicking on an `.hdf5` file in the file browser will open it in a special HDF browser. You can then browse through the groups and open the datasets in the `.hdf5` file. All datasets will open read only. 19 | 20 | For the moment, the browser context menu does not work with `.hdf5` files/groups/datasets. Only double clicking is currently supported. 21 | 22 | Designed from the ground up to be as efficient as possible. Data will only be fetched as needed to create the visible display. This allows the extension to work with very large files (tested working up to the TB range). 23 | 24 | ## Installation 25 | 26 | ```bash 27 | pip install jupyterlab_hdf 28 | jupyter labextension install @jupyterlab/hdf5 29 | ``` 30 | 31 | This will install both the server extension and the labextension needed by this plugin. 32 | 33 | You can also install the labextension via Jupyterlab's extension manager GUI. Keep in mind that if you use the lab extension GUI, you'll still need to install the `jupyterlab_hdf` server extension via `pip`. 34 | 35 | ### Compression filters 36 | 37 | The extension supports all compression filters supported by h5py: https://docs.h5py.org/en/stable/high/dataset.html#filter-pipeline. 38 | 39 | To enable support for additional filters such as [blosc](https://github.com/Blosc/hdf5-blosc) or [bitshuffle](https://github.com/kiyo-masui/bitshuffle), you need to install [hdf5plugin](https://pypi.org/project/hdf5plugin/) in addition to the extension: 40 | 41 | ```bash 42 | pip install hdf5plugin 43 | ``` 44 | 45 | ## Development 46 | 47 | For a development install, clone the repository and then run the following in the repo dir: 48 | 49 | ```bash 50 | pip install -e .[dev] 51 | jlpm build:dev 52 | ``` 53 | 54 | To watch for/rebuild on changes to this extension's source code, run: 55 | 56 | ```bash 57 | jlpm run build:watch 58 | ``` 59 | 60 | ## What's in this extension 61 | 62 | This extension has two main parts: an hdf5 filebrowser plugin, and an hdf5 dataset file type plugin. 63 | 64 | ### HDF5 Filebrowser 65 | 66 | Allows you to navigate an `.hdf5` file's groups as though they were directories in a filesystem. Any `.hdf5` file on a user's system can be opened by entering its path (relative to the Jupyterlab home directory) in the box at the top of the browser. 67 | 68 | #### Note on link resolution 69 | 70 | HDF5 files can contain links that point to entities in the same file (soft links) or to entities in a different files (external links). By default, the extension does not resolve such links. 71 | 72 | Link resolution must be enabled explicitly by setting the config field `HdfConfig.resolve_links` to `True`. For this, there are two possibilities: 73 | 74 | - Set the config field when launching JupyterLab: 75 | 76 | ``` 77 | jupyter lab --HdfConfig.resolve_links=True 78 | ``` 79 | 80 | - Add the following line to [your notebook configuration file](https://jupyter-notebook.readthedocs.io/en/stable/config_overview.html#configure-nbserver) 81 | 82 | ``` 83 | c.HdfConfig.resolve_links = True 84 | ``` 85 | 86 | Note that this will only resolve valid links. Broken links (e.g. links to a non-existent entity) will still appear as links. 87 | 88 | ### HDF5 dataset file type 89 | 90 | When you open a dataset using the hdf5 filebrowser, a document will open that displays the contents of the dataset via a grid. 91 | 92 | [pypi-badge]: https://badge.fury.io/py/jupyterlab-hdf.svg 93 | [pypi]: https://badge.fury.io/py/jupyterlab-hdf 94 | [npm-badge]: https://badge.fury.io/js/%40jupyterlab%2Fhdf5.svg 95 | [npm]: https://badge.fury.io/js/%40jupyterlab%2Fhdf5 96 | [redoc]: https://jupyterlab.github.io/jupyterlab-hdf5/ 97 | [swagger]: https://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyterlab/jupyterlab-hdf5/master/jupyterlab_hdf/api/api.yaml 98 | [tutorial_animation]: https://raw.githubusercontent.com/jupyterlab/jupyterlab-hdf5/master/example/tutorial_animation.gif 99 | -------------------------------------------------------------------------------- /binder/environment.yml: -------------------------------------------------------------------------------- 1 | channels: 2 | - conda-forge 3 | dependencies: 4 | - jupyterlab=3 5 | - nodejs=15.8 6 | # Python Kernel 7 | - python=3.7 8 | -------------------------------------------------------------------------------- /binder/postBuild: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | mkdir -p /tmp/yarn 3 | export YARN_CACHE_FOLDER="/tmp/yarn" 4 | 5 | python --version 6 | 7 | jupyter lab clean --all 8 | 9 | pip install .[dev] 10 | 11 | jupyter --version 12 | 13 | jlpm 14 | jlpm build 15 | jupyter labextension install . --no-build 16 | jupyter lab build --debug 17 | -------------------------------------------------------------------------------- /docs/internal/black_with_precommit/.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/psf/black 3 | rev: 20.8b1 4 | hooks: 5 | - id: black 6 | # ref: https://stackoverflow.com/a/58459499/425458 7 | # ref: https://github.com/pre-commit/pre-commit/issues/747 8 | entry: bash -c 'black "$@"; git add -u' -- 9 | language_version: python # Should be a command that runs python3.6+ 10 | -------------------------------------------------------------------------------- /docs/internal/black_with_precommit/README.md: -------------------------------------------------------------------------------- 1 | # Run Black Python formatter using pre-commit 2 | 3 | Currently, Black runs with the rest of the lint fixes as part of lint-staged. This is not standard. 4 | 5 | The standard way to run Black is via pre-commit. This can be accomplished by moving the `.pre-commit-config.yaml` file from this dir to the root of this repo. 6 | -------------------------------------------------------------------------------- /example/cutnpaste.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "%matplotlib inline\n", 10 | "\n", 11 | "import h5py\n", 12 | "from matplotlib import pyplot as plt\n", 13 | "import pandas as pd" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": null, 19 | "metadata": {}, 20 | "outputs": [], 21 | "source": [ 22 | "from io import StringIO\n", 23 | "\n", 24 | "d = '''paste_data_here'''\n", 25 | "\n", 26 | "pd.read_csv(StringIO(d), sep='\\s+')" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "metadata": {}, 32 | "source": [ 33 | "### Pre-filled example" 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": 2, 39 | "metadata": { 40 | "collapsed": true, 41 | "jupyter": { 42 | "outputs_hidden": true, 43 | "source_hidden": true 44 | } 45 | }, 46 | "outputs": [ 47 | { 48 | "data": { 49 | "text/html": [ 50 | "
\n", 51 | "\n", 64 | "\n", 65 | " \n", 66 | " \n", 67 | " \n", 68 | " \n", 69 | " \n", 70 | " \n", 71 | " \n", 72 | " \n", 73 | " \n", 74 | " \n", 75 | " \n", 76 | " \n", 77 | " \n", 78 | " \n", 79 | " \n", 80 | " \n", 81 | " \n", 82 | " \n", 83 | " \n", 84 | " \n", 85 | " \n", 86 | " \n", 87 | " \n", 88 | " \n", 89 | " \n", 90 | " \n", 91 | " \n", 92 | " \n", 93 | " \n", 94 | " \n", 95 | " \n", 96 | " \n", 97 | " \n", 98 | " \n", 99 | " \n", 100 | " \n", 101 | " \n", 102 | " \n", 103 | " \n", 104 | " \n", 105 | " \n", 106 | " \n", 107 | " \n", 108 | " \n", 109 | " \n", 110 | " \n", 111 | " \n", 112 | " \n", 113 | " \n", 114 | " \n", 115 | " \n", 116 | " \n", 117 | " \n", 118 | " \n", 119 | " \n", 120 | " \n", 121 | " \n", 122 | " \n", 123 | " \n", 124 | " \n", 125 | " \n", 126 | " \n", 127 | " \n", 128 | " \n", 129 | " \n", 130 | " \n", 131 | " \n", 132 | " \n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | " \n", 138 | " \n", 139 | " \n", 140 | " \n", 141 | " \n", 142 | " \n", 143 | " \n", 144 | " \n", 145 | " \n", 146 | " \n", 147 | " \n", 148 | " \n", 149 | " \n", 150 | " \n", 151 | " \n", 152 | " \n", 153 | " \n", 154 | " \n", 155 | " \n", 156 | " \n", 157 | " \n", 158 | " \n", 159 | " \n", 160 | " \n", 161 | " \n", 162 | " \n", 163 | " \n", 164 | " \n", 165 | " \n", 166 | " \n", 167 | " \n", 168 | " \n", 169 | " \n", 170 | " \n", 171 | " \n", 172 | " \n", 173 | " \n", 174 | " \n", 175 | " \n", 176 | " \n", 177 | "
91866634613742585
0412937654651613761
1510536728826766967
2546155905424734894
3853683869324563
4271568456811165945
5827354883445344242
67383026950237175
7840307158183779847
8707767547261625903
95707142810182195
10674192132409251319
\n", 178 | "
" 179 | ], 180 | "text/plain": [ 181 | " 91 866 634 613 742 585\n", 182 | "0 412 937 654 651 613 761\n", 183 | "1 510 536 728 826 766 967\n", 184 | "2 546 155 905 424 734 894\n", 185 | "3 853 683 869 32 45 63\n", 186 | "4 271 568 456 811 165 945\n", 187 | "5 827 354 883 445 344 242\n", 188 | "6 738 302 695 0 237 175\n", 189 | "7 840 307 158 183 779 847\n", 190 | "8 707 767 547 261 625 903\n", 191 | "9 570 7 142 810 182 195\n", 192 | "10 674 192 132 409 251 319" 193 | ] 194 | }, 195 | "execution_count": 2, 196 | "metadata": {}, 197 | "output_type": "execute_result" 198 | } 199 | ], 200 | "source": [ 201 | "from io import StringIO\n", 202 | "\n", 203 | "d = '''91\t866\t634\t613\t742\t585\n", 204 | "412\t937\t654\t651\t613\t761\n", 205 | "510\t536\t728\t826\t766\t967\n", 206 | "546\t155\t905\t424\t734\t894\n", 207 | "853\t683\t869\t32\t45\t63\n", 208 | "271\t568\t456\t811\t165\t945\n", 209 | "827\t354\t883\t445\t344\t242\n", 210 | "738\t302\t695\t0\t237\t175\n", 211 | "840\t307\t158\t183\t779\t847\n", 212 | "707\t767\t547\t261\t625\t903\n", 213 | "570\t7\t142\t810\t182\t195\n", 214 | "674\t192\t132\t409\t251\t319'''\n", 215 | "\n", 216 | "pd.read_csv(StringIO(d), sep='\\s+')" 217 | ] 218 | }, 219 | { 220 | "cell_type": "code", 221 | "execution_count": null, 222 | "metadata": {}, 223 | "outputs": [], 224 | "source": [] 225 | } 226 | ], 227 | "metadata": { 228 | "kernelspec": { 229 | "display_name": "Python 3", 230 | "language": "python", 231 | "name": "python3" 232 | }, 233 | "language_info": { 234 | "codemirror_mode": { 235 | "name": "ipython", 236 | "version": 3 237 | }, 238 | "file_extension": ".py", 239 | "mimetype": "text/x-python", 240 | "name": "python", 241 | "nbconvert_exporter": "python", 242 | "pygments_lexer": "ipython3", 243 | "version": "3.7.4" 244 | } 245 | }, 246 | "nbformat": 4, 247 | "nbformat_minor": 4 248 | } 249 | -------------------------------------------------------------------------------- /example/nested.hdf5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyterlab/jupyterlab-hdf5/31ef7b732dbfe98c85d54dfab0f28699a1ab6f62/example/nested.hdf5 -------------------------------------------------------------------------------- /example/nested_data_ext.hdf5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyterlab/jupyterlab-hdf5/31ef7b732dbfe98c85d54dfab0f28699a1ab6f62/example/nested_data_ext.hdf5 -------------------------------------------------------------------------------- /example/nested_int.hdf5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyterlab/jupyterlab-hdf5/31ef7b732dbfe98c85d54dfab0f28699a1ab6f62/example/nested_int.hdf5 -------------------------------------------------------------------------------- /example/nested_int_data_ext.hdf5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyterlab/jupyterlab-hdf5/31ef7b732dbfe98c85d54dfab0f28699a1ab6f62/example/nested_int_data_ext.hdf5 -------------------------------------------------------------------------------- /example/non_simple_entities.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyterlab/jupyterlab-hdf5/31ef7b732dbfe98c85d54dfab0f28699a1ab6f62/example/non_simple_entities.h5 -------------------------------------------------------------------------------- /example/tutorial_animation.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyterlab/jupyterlab-hdf5/31ef7b732dbfe98c85d54dfab0f28699a1ab6f62/example/tutorial_animation.gif -------------------------------------------------------------------------------- /jupyter-config/jupyter_notebook_config.d/jupyterlab_hdf.json: -------------------------------------------------------------------------------- 1 | { 2 | "NotebookApp": { 3 | "nbserver_extensions": { 4 | "jupyterlab_hdf": true 5 | } 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /jupyterlab_hdf/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) Jupyter Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | from ._version import __version__ 7 | 8 | from notebook.utils import url_path_join 9 | 10 | from .attrs import HdfAttrsHandler 11 | from .contents import HdfContentsHandler 12 | from .data import HdfDataHandler 13 | from .meta import HdfMetaHandler 14 | from .snippet import HdfSnippetHandler 15 | 16 | path_regex = r'(?P(?:(?:/[^/]+)+|/?))' 17 | 18 | 19 | def _jupyter_server_extension_paths(): 20 | return [{'module': 'jupyterlab_hdf'}] 21 | 22 | 23 | def _load_handlers(notebook_dir, web_app): 24 | # Prepend the base_url so that it works in a jupyterhub setting 25 | base_url = web_app.settings['base_url'] if 'base_url' in web_app.settings else '/' 26 | 27 | _handlerDict = dict(( 28 | ('attrs', HdfAttrsHandler), 29 | ('contents', HdfContentsHandler), 30 | ('data', HdfDataHandler), 31 | ('meta', HdfMetaHandler), 32 | ('snippet', HdfSnippetHandler), 33 | )) 34 | 35 | handlers = [ 36 | (url_path_join(base_url, 'hdf', ep, '(.*)'), handler, {'notebook_dir': notebook_dir}) 37 | for ep,handler in _handlerDict.items() 38 | ] 39 | 40 | web_app.add_handlers('.*$', handlers) 41 | 42 | 43 | def load_jupyter_server_extension(nb_server_app): 44 | """ 45 | Called when the extension is loaded. 46 | 47 | Args: 48 | nb_server_app (NotebookApp): handle to the Notebook webserver instance. 49 | """ 50 | notebook_dir = nb_server_app.root_dir if hasattr(nb_server_app, 'root_dir') else nb_server_app.notebook_dir 51 | _load_handlers(notebook_dir, nb_server_app.web_app) 52 | -------------------------------------------------------------------------------- /jupyterlab_hdf/_version.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) Jupyter Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | __version__ = "1.3.0" 7 | -------------------------------------------------------------------------------- /jupyterlab_hdf/api/api.yaml: -------------------------------------------------------------------------------- 1 | # Copyright (c) Jupyter Development Team. 2 | # Distributed under the terms of the Modified BSD License. 3 | # 4 | # jupyterLab_hdf : HDF5 api for Jupyter/Jupyterlab 5 | 6 | openapi: 3.0.0 7 | info: 8 | title: JupyterLab HDF5 proxy 9 | description: 'Proxies HDF5 API requests from JupyterLab to HDF5.' 10 | version: 1.3.0 11 | 12 | servers: 13 | - url: http://{host}:{port}/ 14 | description: 'a jupyter server running the jupyterlab_hdf serverextension. Defaults to "localhost:8888"' 15 | variables: 16 | host: 17 | default: 'localhost' 18 | description: 'the address of the host on which your jupyter server is running. Defaults to "localhost"' 19 | port: 20 | default: '8888' 21 | description: 'the port on which your jupyter server is running. Defaults to "8888"' 22 | 23 | paths: 24 | /hdf/attrs/{fpath}: 25 | parameters: 26 | - $ref: '#/components/parameters/fpath' 27 | - $ref: '#/components/parameters/uri' 28 | - $ref: '#/components/parameters/attr_keys' 29 | get: 30 | description: 'get the attributes of an hdf object' 31 | summary: 'get the attributes of an hdf object' 32 | responses: 33 | '200': 34 | $ref: '#/components/responses/attrs' 35 | '400': 36 | $ref: '#/components/responses/400' 37 | '401': 38 | $ref: '#/components/responses/401' 39 | '403': 40 | $ref: '#/components/responses/403' 41 | '500': 42 | $ref: '#/components/responses/500' 43 | 44 | /hdf/contents/{fpath}: 45 | parameters: 46 | - $ref: '#/components/parameters/fpath' 47 | - $ref: '#/components/parameters/uri' 48 | - $ref: '#/components/parameters/ixstr' 49 | - $ref: '#/components/parameters/min_ndim' 50 | get: 51 | description: 'get the contents of an hdf object' 52 | summary: 'get the contents of an hdf object' 53 | responses: 54 | '200': 55 | $ref: '#/components/responses/contents' 56 | '400': 57 | $ref: '#/components/responses/400' 58 | '401': 59 | $ref: '#/components/responses/401' 60 | '403': 61 | $ref: '#/components/responses/403' 62 | '500': 63 | $ref: '#/components/responses/500' 64 | 65 | /hdf/data/{fpath}: 66 | parameters: 67 | - $ref: '#/components/parameters/fpath' 68 | - $ref: '#/components/parameters/uri' 69 | - $ref: '#/components/parameters/ixstr' 70 | - $ref: '#/components/parameters/subixstr' 71 | - $ref: '#/components/parameters/min_ndim' 72 | get: 73 | description: 'get raw array data from one hdf dataset, as a json blob' 74 | summary: 'get data from an hdf dataset' 75 | responses: 76 | '200': 77 | $ref: '#/components/responses/data' 78 | '400': 79 | $ref: '#/components/responses/400' 80 | '401': 81 | $ref: '#/components/responses/401' 82 | '403': 83 | $ref: '#/components/responses/403' 84 | '500': 85 | $ref: '#/components/responses/500' 86 | 87 | /hdf/meta/{fpath}: 88 | parameters: 89 | - $ref: '#/components/parameters/fpath' 90 | - $ref: '#/components/parameters/uri' 91 | - $ref: '#/components/parameters/ixstr' 92 | - $ref: '#/components/parameters/min_ndim' 93 | get: 94 | description: 'get the metadata of an hdf object. If the object is a dataset and the ixstr parameter is provided, all shape-related metadata will be for the slab specified by ixstr' 95 | summary: 'get the metadata of an hdf object' 96 | responses: 97 | '200': 98 | $ref: '#/components/responses/meta' 99 | '400': 100 | $ref: '#/components/responses/400' 101 | '401': 102 | $ref: '#/components/responses/401' 103 | '403': 104 | $ref: '#/components/responses/403' 105 | '500': 106 | $ref: '#/components/responses/500' 107 | 108 | /hdf/snippet/{fpath}: 109 | parameters: 110 | - $ref: '#/components/parameters/fpath' 111 | - $ref: '#/components/parameters/uri' 112 | - $ref: '#/components/parameters/ixstr' 113 | - $ref: '#/components/parameters/subixstr' 114 | get: 115 | description: 'get a Python snippet that fetches the hdf dataset or group pointed to by the path and uri' 116 | summary: 'get a Python snippet that fetches an hdf dataset or group' 117 | responses: 118 | '200': 119 | $ref: '#/components/responses/py_snippet' 120 | '400': 121 | $ref: '#/components/responses/400' 122 | '401': 123 | $ref: '#/components/responses/401' 124 | '403': 125 | $ref: '#/components/responses/403' 126 | '500': 127 | $ref: '#/components/responses/500' 128 | 129 | components: 130 | examples: 131 | dataset_contents: 132 | description: 'example contents of a dataset object' 133 | value: 134 | { 135 | 'content': 136 | { 137 | 'attributes': 138 | [ 139 | { 'name': 'attr1', 'dtype': '|O', 'shape': [] }, 140 | { 'name': 'attr2', 'dtype': 'i8', 143 | 'labels': 144 | [ 145 | { 'start': 0, 'stop': 13, 'step': 1 }, 146 | { 'start': 0, 'stop': 5, 'step': 1 }, 147 | { 'start': 0, 'stop': 17, 'step': 1 }, 148 | ], 149 | 'name': 'foo', 150 | 'ndim': 3, 151 | 'shape': [13, 5, 17], 152 | 'size': 1105, 153 | 'type': 'dataset', 154 | }, 155 | 'name': 'foo', 156 | 'type': 'dataset', 157 | 'uri': '/able/foo', 158 | } 159 | group_contents: 160 | description: 'example contents of a group object' 161 | value: 162 | [ 163 | { 'name': 'baker', 'type': 'group', 'uri': '/able/baker' }, 164 | { 'name': 'foo', 'type': 'dataset', 'uri': '/able/foo' }, 165 | { 'name': 'bar', 'type': 'dataset', 'uri': '/able/bar' }, 166 | ] 167 | data_1d: 168 | description: 'a 1D chunk of raw array data' 169 | value: [11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26] 170 | data_2d: 171 | description: 'a 2D chunk of raw array data' 172 | value: 173 | [[11, 12, 13, 14], [15, 16, 17, 18], [19, 20, 21, 22], [23, 24, 25, 26]] 174 | data_4d: 175 | description: 'a 4D chunk of raw array data' 176 | value: 177 | [ 178 | [[[11, 12], [13, 14]], [[15, 16], [17, 18]]], 179 | [[[19, 20], [21, 22]], [[23, 24], [25, 26]]], 180 | ] 181 | dataset_meta: 182 | description: 'metadata for dataset of shape `[13, 5, 17]`' 183 | value: 184 | { 185 | 'attributes': 186 | [ 187 | { 'name': 'attr1', 'dtype': '|O', 'shape': [] }, 188 | { 'name': 'attr2', 'dtype': 'i8', 191 | 'labels': 192 | [ 193 | { 'start': 0, 'stop': 13, 'step': 1 }, 194 | { 'start': 0, 'stop': 5, 'step': 1 }, 195 | { 'start': 0, 'stop': 17, 'step': 1 }, 196 | ], 197 | 'name': 'foo', 198 | 'ndim': 3, 199 | 'shape': [13, 5, 17], 200 | 'size': 1105, 201 | 'type': 'dataset', 202 | } 203 | dataset_meta_w_ixstr: 204 | description: 'metadata for dataset of shape `[13, 5, 17]`, given an ixstr of `"2:11, 3, 1:16"`' 205 | value: 206 | { 207 | 'attributes': 208 | [ 209 | { 'name': 'attr1', 'dtype': '|O', 'shape': [] }, 210 | { 'name': 'attr2', 'dtype': 'i8', 213 | 'labels': 214 | [ 215 | { 'start': 2, 'stop': 11, 'step': 1 }, 216 | { 'start': 3, 'stop': 4, 'step': 1 }, 217 | { 'start': 1, 'stop': 16, 'step': 1 }, 218 | ], 219 | 'name': 'foo', 220 | 'ndim': 2, 221 | 'shape': [9, 15], 222 | 'size': 135, 223 | 'type': 'dataset', 224 | } 225 | group_meta: 226 | description: 'metadata for group' 227 | value: 228 | { 229 | 'attributes': [], 230 | 'children': 231 | [ 232 | { 233 | 'name': 'dataset_1', 234 | 'dtype': 'f4', 'shape': [10] }], 244 | }, 245 | ], 246 | 'name': 'beta', 247 | 'type': 'group', 248 | } 249 | child_group_meta: 250 | description: 'metadata for child group' 251 | value: { 'attributes': [], 'name': 'beta', 'type': 'group' } 252 | dataset_py_snippet: 253 | description: 'python snippet for dataset' 254 | value: "with h5py.File('/Users/alice/git/jupyterlab-hdf/example/nested_int.hdf5', 'r') as f:\n dataset = f['/leaf01/data01']" 255 | group_py_snippet: 256 | description: 'python snippet for group' 257 | value: "with h5py.File('/Users/alice/git/jupyterlab-hdf/example/nested_int.hdf5', 'r') as f:\n group = f['/leaf01']" 258 | 259 | parameters: 260 | fpath: 261 | name: fpath 262 | in: path 263 | required: true 264 | description: 'path on disk to an HDF5 file' 265 | schema: 266 | type: string 267 | format: uri 268 | ixstr: 269 | name: ixstr 270 | in: query 271 | required: false 272 | description: 'index specifying which ND slab of a dataset to consider when fetching data. Uses numpy-style index syntax' 273 | schema: 274 | type: string 275 | subixstr: 276 | name: subixstr 277 | in: query 278 | required: false 279 | description: 'index specifying which chunk (of the ND slab specified by ixstr) of a dataset to fetch. Uses numpy-style index syntax. The count of slices in ixstr and subixstr should match' 280 | schema: 281 | type: string 282 | min_ndim: 283 | name: min_ndim 284 | in: query 285 | required: false 286 | description: 'if set, all shape-related metadata and array data fetched from a dataset will be promoted to have at least this many dimensions' 287 | schema: 288 | type: number 289 | attr_keys: 290 | name: attr_keys 291 | in: query 292 | required: false 293 | description: 'keys of the attributes to fetch. If not set, all attributes will be fetched' 294 | schema: 295 | type: array 296 | uri: 297 | name: uri 298 | in: query 299 | required: true 300 | description: 'path within an HDF5 file to a specific group or dataset' 301 | schema: 302 | type: string 303 | 304 | responses: 305 | '400': 306 | description: 'the request was malformed; url should be of the format `"fpath?uri=uri"`' 307 | '401': 308 | description: 'the request did not specify a file that `h5py` could understand' 309 | '403': 310 | description: 'the request specified a file that does not exist' 311 | '500': 312 | description: 'found and opened file, error getting contents from object specified by the uri' 313 | attrs: 314 | description: 'attributes of an arbitrary hdf object, as a dictionary' 315 | content: 316 | application/json: 317 | schema: 318 | $ref: '#/components/schemas/attrs' 319 | contents: 320 | description: "data representing an arbitrary hdf object, in the format required by the jupyterlab `Contents` stack. If object is a dataset, basic information (including metadata) about that dataset will be reutrned as a dict. If object is a group, then basic information (but not metadata) about that group's children will be returned as an array of dicts" 321 | content: 322 | application/json: 323 | schema: 324 | oneOf: 325 | - $ref: '#/components/schemas/dataset_contents' 326 | - type: array 327 | items: 328 | $ref: '#/components/schemas/contents' 329 | examples: 330 | 'contents of a dataset': 331 | $ref: '#/components/examples/dataset_contents' 332 | 'contents of a group': 333 | $ref: '#/components/examples/group_contents' 334 | data: 335 | description: 'a chunk of raw array data from an hdf dataset. May be of any dimensionality' 336 | content: 337 | application/json: 338 | schema: 339 | $ref: '#/components/schemas/data' 340 | examples: 341 | '1D data': 342 | $ref: '#/components/examples/data_1d' 343 | '2D data': 344 | $ref: '#/components/examples/data_2d' 345 | '4D data': 346 | $ref: '#/components/examples/data_4d' 347 | meta: 348 | description: 'metadata of an arbitrary hdf object, as a dictionary' 349 | content: 350 | application/json: 351 | schema: 352 | $ref: '#/components/schemas/meta' 353 | examples: 354 | 'metadata for dataset': 355 | $ref: '#/components/examples/dataset_meta' 356 | 'metadata for dataset, given an ixstr': 357 | $ref: '#/components/examples/dataset_meta_w_ixstr' 358 | 'metadata for group': 359 | $ref: '#/components/examples/group_meta' 360 | py_snippet: 361 | description: 'python code snippet' 362 | content: 363 | application/json: 364 | schema: 365 | $ref: '#/components/schemas/py_snippet' 366 | examples: 367 | 'python snippet for dataset': 368 | $ref: '#/components/examples/dataset_py_snippet' 369 | 'python snippet for group': 370 | $ref: '#/components/examples/group_py_snippet' 371 | 372 | schemas: 373 | attrs: 374 | description: 'attributes of an arbitrary hdf object, as a dictionary' 375 | type: object 376 | additionalProperties: true 377 | attr_meta: 378 | description: 'metadata of an attribute' 379 | required: [name, dtype, shape] 380 | type: object 381 | properties: 382 | name: 383 | description: 'name of an attribute' 384 | type: string 385 | dtype: 386 | description: 'datatype of an attribute' 387 | type: string 388 | shape: 389 | description: 'shape of an attribute' 390 | type: array 391 | items: 392 | type: number 393 | dataset_contents: 394 | description: 'a basic description of an hdf dataset, in the format required by the jupyterlab `Contents` stack' 395 | required: [name, type, uri] 396 | type: object 397 | properties: 398 | content: 399 | $ref: '#/components/schemas/dataset_meta' 400 | name: 401 | description: 'object name (ie last part of uri)' 402 | type: string 403 | type: 404 | description: 'the string literal `"dataset"`' 405 | enum: ['dataset'] 406 | type: string 407 | uri: 408 | description: 'full uri pointing to the object' 409 | type: string 410 | group_contents: 411 | description: 'a basic description of an hdf group, in the format required by the jupyterlab `Contents` stack' 412 | required: [name, type, uri] 413 | type: object 414 | properties: 415 | name: 416 | description: 'object name (ie last part of uri)' 417 | type: string 418 | type: 419 | description: 'the string literal `"group"`' 420 | enum: ['group'] 421 | type: string 422 | uri: 423 | description: 'full uri pointing to the object' 424 | type: string 425 | contents: 426 | description: 'data representing an arbitrary hdf object, in the format required by the jupyterlab `Contents` stack' 427 | discriminator: 428 | propertyName: type 429 | mapping: 430 | dataset: '#/components/schemas/dataset_contents' 431 | group: '#/components/schemas/group_contents' 432 | oneOf: 433 | - $ref: '#/components/schemas/dataset_contents' 434 | - $ref: '#/components/schemas/group_contents' 435 | data: 436 | description: 'a chunk of raw array data from an hdf dataset. May be of any dimensionality' 437 | type: array 438 | items: 439 | oneOf: 440 | - type: number 441 | - $ref: '#/components/schemas/data' 442 | dataset_meta: 443 | description: 'metadata of an hdf dataset, as a dictionary' 444 | required: [attributes, dtype, labels, name, ndim, shape, size, type] 445 | type: object 446 | properties: 447 | attributes: 448 | description: 'metadata of the attributes of a dataset' 449 | type: array 450 | items: 451 | $ref: '#/components/schemas/attr_meta' 452 | dtype: 453 | description: 'datatype of an hdf dataset' 454 | type: string 455 | labels: 456 | description: 'ranges that label the indices of an hdf dataset, given as an array of slices' 457 | type: array 458 | items: 459 | $ref: '#/components/schemas/slice' 460 | name: 461 | description: 'name of hdf dataset' 462 | type: string 463 | ndim: 464 | description: 'count of dimensions of an hdf dataset' 465 | type: number 466 | shape: 467 | description: 'shape of an hdf dataset' 468 | type: array 469 | items: 470 | type: number 471 | size: 472 | description: 'count of entries of an hdf dataset' 473 | type: number 474 | type: 475 | description: 'the string literal `"dataset"`' 476 | enum: ['dataset'] 477 | type: string 478 | group_meta: 479 | description: 'metadata of an hdf group, as a dictionary' 480 | required: [attributes, children, name, type] 481 | type: object 482 | properties: 483 | attributes: 484 | description: 'metadata of the attributes of the group' 485 | type: array 486 | items: 487 | $ref: '#/components/schemas/attr_meta' 488 | children: 489 | description: 'metadata of the children of the group' 490 | type: array 491 | items: 492 | oneOf: 493 | - $ref: '#/components/schemas/dataset_meta' 494 | - $ref: '#/components/schemas/child_group_meta' 495 | name: 496 | description: 'name of hdf group' 497 | type: string 498 | type: 499 | description: 'the string literal `"group"`' 500 | enum: ['group'] 501 | type: string 502 | child_group_meta: 503 | description: 'metadata of a child hdf group (does not include children)' 504 | required: [attributes, children, name, type] 505 | type: object 506 | properties: 507 | attributes: 508 | description: 'metadata of the attributes of the group' 509 | type: array 510 | items: 511 | $ref: '#/components/schemas/attr_meta' 512 | name: 513 | description: 'name of hdf group' 514 | type: string 515 | type: 516 | description: 'the string literal `"group"`' 517 | enum: ['group'] 518 | type: string 519 | meta: 520 | description: 'the metadata of an hdf object' 521 | discriminator: 522 | propertyName: type 523 | mapping: 524 | dataset: '#/components/schemas/dataset_meta' 525 | group: '#/components/schemas/group_meta' 526 | oneOf: 527 | - $ref: '#/components/schemas/dataset_meta' 528 | - $ref: '#/components/schemas/group_meta' 529 | py_snippet: 530 | description: 'python code snippet' 531 | type: string 532 | slice: 533 | description: 'python-style slice' 534 | required: [start, stop, step] 535 | type: object 536 | properties: 537 | start: 538 | description: 'first index of the slice' 539 | type: number 540 | stop: 541 | description: 'one past the last index of the slice' 542 | type: number 543 | step: 544 | description: 'step of the slice' 545 | type: number 546 | -------------------------------------------------------------------------------- /jupyterlab_hdf/attrs.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) Jupyter Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | from .baseHandler import HdfFileManager, HdfBaseHandler 7 | 8 | __all__ = ["HdfAttrsManager", "HdfAttrsHandler"] 9 | 10 | ## manager 11 | class HdfAttrsManager(HdfFileManager): 12 | """Implements HDF5 attributes handling""" 13 | 14 | def _getResponse(self, responseObj, attr_keys=None, **kwargs): 15 | return responseObj.attributes(attr_keys) 16 | 17 | 18 | ## handler 19 | class HdfAttrsHandler(HdfBaseHandler): 20 | """A handler for HDF5 attributes""" 21 | 22 | managerClass = HdfAttrsManager 23 | -------------------------------------------------------------------------------- /jupyterlab_hdf/baseHandler.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) Jupyter Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | from typing import Union 7 | import h5py 8 | import os 9 | import traceback 10 | from h5grove.encoders import orjson_encode 11 | from h5grove.models import LinkResolution 12 | from h5grove.utils import NotFoundError 13 | from tornado import web 14 | from tornado.httpclient import HTTPError 15 | 16 | from notebook.base.handlers import APIHandler 17 | from notebook.utils import url_path_join 18 | 19 | from .config import HdfConfig 20 | from .exception import JhdfError 21 | from .responses import create_response 22 | from .util import jsonize 23 | 24 | __all__ = ["HdfBaseManager", "HdfFileManager", "HdfBaseHandler"] 25 | 26 | 27 | ## manager 28 | class HdfBaseManager: 29 | """Base class for implementing HDF5 handling""" 30 | 31 | def __init__(self, log, notebook_dir): 32 | self.log = log 33 | self.notebook_dir = notebook_dir 34 | 35 | def _get(self, f, uri, **kwargs): 36 | raise NotImplementedError 37 | 38 | def get(self, relfpath, uri, **kwargs): 39 | def _handleErr(code: int, msg: Union[str, dict]): 40 | extra = dict( 41 | ( 42 | ("relfpath", relfpath), 43 | ("uri", uri), 44 | *kwargs.items(), 45 | ) 46 | ) 47 | 48 | if isinstance(msg, dict): 49 | # encode msg as json 50 | msg["debugVars"] = {**msg.get("debugVars", {}), **extra} 51 | msg = orjson_encode(msg).decode() 52 | else: 53 | msg = "\n".join((msg, ", ".join(f"{key}: {val}" for key, val in extra.items()))) 54 | 55 | self.log.error(msg) 56 | raise HTTPError(code, msg) 57 | 58 | if not relfpath: 59 | msg = f"The request was malformed; fpath should not be empty." 60 | _handleErr(400, msg) 61 | 62 | fpath = url_path_join(self.notebook_dir, relfpath) 63 | 64 | if not os.path.exists(fpath): 65 | msg = f"The request specified a file that does not exist." 66 | _handleErr(403, msg) 67 | else: 68 | try: 69 | # test opening the file with h5py 70 | with h5py.File(fpath, "r"): 71 | pass 72 | except Exception: 73 | msg = f"The request did not specify a file that `h5py` could understand.\n" f"Error: {traceback.format_exc()}" 74 | _handleErr(401, msg) 75 | try: 76 | result = self._get(fpath, uri, **kwargs) 77 | except JhdfError as e: 78 | msg = e.args[0] 79 | msg["traceback"] = traceback.format_exc() 80 | msg["type"] = "JhdfError" 81 | _handleErr(400, msg) 82 | except NotFoundError as e: 83 | _handleErr(404, str(e)) 84 | except Exception: 85 | msg = f"Found and opened file, error getting contents from object specified by the uri.\n" f"Error: {traceback.format_exc()}" 86 | _handleErr(500, msg) 87 | 88 | return result 89 | 90 | 91 | class HdfFileManager(HdfBaseManager): 92 | """Implements base HDF5 file handling""" 93 | 94 | def __init__(self, log, notebook_dir, resolve_links): 95 | super().__init__(log, notebook_dir) 96 | self.resolve_links = resolve_links 97 | 98 | def _get(self, fpath, uri, **kwargs): 99 | with h5py.File(fpath, "r") as f: 100 | return self._getFromFile(f, uri, **kwargs) 101 | 102 | def _getFromFile(self, f, uri, **kwargs): 103 | return jsonize(self._getResponse(create_response(f, uri, self.resolve_links), **kwargs)) 104 | 105 | def _getResponse(self, responseObj, **kwargs): 106 | raise NotImplementedError 107 | 108 | 109 | ## handler 110 | class HdfBaseHandler(APIHandler): 111 | managerClass = None 112 | 113 | """Base class for HDF5 api handlers 114 | """ 115 | 116 | def initialize(self, notebook_dir): 117 | if self.managerClass is None: 118 | raise NotImplementedError 119 | 120 | self.notebook_dir = notebook_dir 121 | hdf_config = HdfConfig(config=self.config) 122 | self.manager = self.managerClass(log=self.log, notebook_dir=notebook_dir, resolve_links=LinkResolution.ONLY_VALID if hdf_config.resolve_links else LinkResolution.NONE) 123 | 124 | @web.authenticated 125 | async def get(self, path): 126 | """Based on an api request, get either the contents of a group or a 127 | slice of a dataset and return it as serialized JSON. 128 | """ 129 | uri = "/" + self.get_query_argument("uri").lstrip("/") 130 | itemss = () 131 | 132 | # get any query parameter vals 133 | _kws = ("min_ndim", "ixstr", "subixstr") 134 | _vals = (self.get_query_argument(kw, default=None) for kw in _kws) 135 | itemss += (zip(_kws, _vals),) 136 | 137 | # get any repeated query parameter array vals 138 | _array_kws = ("attr_keys",) 139 | _array_vals = (self.get_query_arguments(kw) or None for kw in _array_kws) 140 | itemss += (zip(_array_kws, _array_vals),) 141 | 142 | # filter all of the collected params and vals into a kwargs dict 143 | kwargs = {k: v if v else None for items in itemss for k, v in items} 144 | 145 | # do any needed type conversions of param vals 146 | _num_kws = ("min_ndim",) 147 | for k in (k for k in _num_kws if kwargs[k] is not None): 148 | kwargs[k] = int(kwargs[k]) 149 | 150 | try: 151 | self.finish(orjson_encode(self.manager.get(path, uri, **kwargs), default=jsonize)) 152 | except HTTPError as err: 153 | self.set_status(err.code) 154 | response = err.response.body if err.response else str(err.code) 155 | self.finish("\n".join((response, err.message))) 156 | 157 | # def getQueryArguments(self, key, func=None): 158 | # if func is not None: 159 | # return [func(x) for x in self.get_query_argument(key).split(',')] if key in self.request.query_arguments else None 160 | # else: 161 | # return [x for x in self.get_query_argument(key).split(',')] if key in self.request.query_arguments else None 162 | -------------------------------------------------------------------------------- /jupyterlab_hdf/config.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) Jupyter Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | from traitlets.config import Configurable 7 | from traitlets.traitlets import Bool 8 | 9 | 10 | class HdfConfig(Configurable): 11 | resolve_links = Bool(False, config=True, help=("Whether soft and external links should be resolved when exploring HDF5 files.")) 12 | -------------------------------------------------------------------------------- /jupyterlab_hdf/contents.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) Jupyter Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | from .baseHandler import HdfFileManager, HdfBaseHandler 6 | 7 | __all__ = ["HdfContentsManager", "HdfContentsHandler"] 8 | 9 | ## manager 10 | class HdfContentsManager(HdfFileManager): 11 | """Implements HDF5 contents handling""" 12 | 13 | def _getResponse(self, responseObj, ixstr=None, min_ndim=None, **kwargs): 14 | return responseObj.contents(content=True, ixstr=ixstr, min_ndim=min_ndim) 15 | 16 | 17 | ## handler 18 | class HdfContentsHandler(HdfBaseHandler): 19 | """A handler for HDF5 contents""" 20 | 21 | managerClass = HdfContentsManager 22 | -------------------------------------------------------------------------------- /jupyterlab_hdf/data.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) Jupyter Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | from .baseHandler import HdfFileManager, HdfBaseHandler 7 | 8 | 9 | __all__ = ["HdfDataManager", "HdfDataHandler"] 10 | 11 | ## manager 12 | class HdfDataManager(HdfFileManager): 13 | """Implements HDF5 data handling""" 14 | 15 | def _getResponse(self, responseObj, ixstr=None, subixstr=None, min_ndim=None, **kwargs): 16 | # # DEBUG: uncomment for logging 17 | # from .util import dsetContentDict, parseSubindex 18 | # logd = dsetContentDict(f[uri], ixstr=ixstr) 19 | # logd['subixstr'] = subixstr 20 | # if subixstr is not None: 21 | # logd['ixcompound'] = parseSubindex(ixstr, subixstr, f[uri].shape) 22 | # self.log.info('{}'.format(logd)) 23 | 24 | return responseObj.data(ixstr=ixstr, subixstr=subixstr, min_ndim=min_ndim) 25 | 26 | 27 | ## handler 28 | class HdfDataHandler(HdfBaseHandler): 29 | """A handler for HDF5 data""" 30 | 31 | managerClass = HdfDataManager 32 | -------------------------------------------------------------------------------- /jupyterlab_hdf/exception.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) Jupyter Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | __all__ = ['JhdfError'] 7 | 8 | class JhdfError(Exception): 9 | pass 10 | -------------------------------------------------------------------------------- /jupyterlab_hdf/meta.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) Jupyter Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | from .baseHandler import HdfFileManager, HdfBaseHandler 7 | 8 | __all__ = ["HdfMetaManager", "HdfMetaHandler"] 9 | 10 | ## manager 11 | class HdfMetaManager(HdfFileManager): 12 | """Implements HDF5 metadata handling""" 13 | 14 | def _getResponse(self, responseObj, ixstr=None, min_ndim=None, **kwargs): 15 | return responseObj.metadata(ixstr=ixstr, min_ndim=min_ndim) 16 | 17 | 18 | ## handler 19 | class HdfMetaHandler(HdfBaseHandler): 20 | """A handler for HDF5 metadata""" 21 | 22 | managerClass = HdfMetaManager 23 | -------------------------------------------------------------------------------- /jupyterlab_hdf/responses.py: -------------------------------------------------------------------------------- 1 | from typing import Generic, TypeVar 2 | from h5grove.content import DatasetContent, EntityContent, ExternalLinkContent, GroupContent, ResolvedEntityContent, SoftLinkContent 3 | from h5grove.utils import LinkError 4 | import h5py 5 | import h5grove 6 | from .util import attrMetaDict, dsetChunk, shapemeta, uriJoin 7 | 8 | 9 | H5GroveEntity = TypeVar("H5GroveEntity", DatasetContent, EntityContent, ExternalLinkContent, GroupContent, ResolvedEntityContent, SoftLinkContent) 10 | 11 | 12 | class EntityResponse(Generic[H5GroveEntity]): 13 | def __init__(self, h5grove_entity: H5GroveEntity): 14 | self.h5grove_entity = h5grove_entity 15 | 16 | def contents(self, content=False, ixstr=None, min_ndim=None): 17 | d = dict( 18 | ( 19 | ("name", self.name), 20 | ("uri", self.uri), 21 | ("type", self.type), 22 | ) 23 | ) 24 | if not content: 25 | return d 26 | 27 | return dict( 28 | ( 29 | ("content", self.metadata(ixstr=ixstr, min_ndim=min_ndim)), 30 | *d.items(), 31 | ) 32 | ) 33 | 34 | def metadata(self, **kwargs): 35 | return dict((("name", self.name), ("type", self.type))) 36 | 37 | @property 38 | def name(self): 39 | return self.h5grove_entity.name 40 | 41 | @property 42 | def uri(self): 43 | return self.h5grove_entity._path 44 | 45 | @property 46 | def type(self): 47 | return self.h5grove_entity.type 48 | 49 | 50 | class ExternalLinkResponse(EntityResponse[ExternalLinkContent]): 51 | def metadata(self, **kwargs): 52 | return dict( 53 | sorted( 54 | ( 55 | *super().metadata().items(), 56 | ("targetFile", self.h5grove_entity._target_file), 57 | ("targetUri", self.h5grove_entity._target_path), 58 | ) 59 | ) 60 | ) 61 | 62 | 63 | class SoftLinkResponse(EntityResponse[SoftLinkContent]): 64 | def metadata(self, **kwargs): 65 | return dict( 66 | sorted( 67 | ( 68 | *super().metadata().items(), 69 | ("targetUri", self.h5grove_entity._target_path), 70 | ) 71 | ) 72 | ) 73 | 74 | 75 | ResolvedH5GroveEntity = TypeVar("ResolvedH5GroveEntity", DatasetContent, GroupContent, ResolvedEntityContent) 76 | 77 | 78 | class ResolvedEntityResponse(EntityResponse[ResolvedH5GroveEntity]): 79 | @property 80 | def _hobj(self): 81 | return self.h5grove_entity._h5py_entity 82 | 83 | def attributes(self, attr_keys=None): 84 | return self.h5grove_entity.attributes(attr_keys) 85 | 86 | def metadata(self, **kwargs): 87 | attribute_names = sorted(self._hobj.attrs.keys()) 88 | return dict((*super().metadata().items(), ("attributes", [attrMetaDict(self._hobj.attrs.get_id(k)) for k in attribute_names]))) 89 | 90 | 91 | class DatasetResponse(ResolvedEntityResponse[DatasetContent]): 92 | def metadata(self, ixstr=None, min_ndim=None, is_child=False): 93 | d = super().metadata() 94 | shapekeys = ("shape") if is_child else ("labels", "ndim", "shape", "size") 95 | smeta = {k: v for k, v in shapemeta(self._hobj.shape, self._hobj.size, ixstr=ixstr, min_ndim=min_ndim).items() if k in shapekeys} 96 | 97 | return dict( 98 | sorted( 99 | ( 100 | ("dtype", self._hobj.dtype.str), 101 | *d.items(), 102 | *smeta.items(), 103 | ) 104 | ) 105 | ) 106 | 107 | def data(self, ixstr=None, subixstr=None, min_ndim=None): 108 | return dsetChunk(self._hobj, ixstr=ixstr, subixstr=subixstr, min_ndim=min_ndim) 109 | 110 | 111 | class GroupResponse(ResolvedEntityResponse[GroupContent]): 112 | def __init__(self, h5grove_entity: GroupContent, resolve_links: bool): 113 | super().__init__(h5grove_entity) 114 | self.resolve_links = resolve_links 115 | 116 | def contents(self, content=False, ixstr=None, min_ndim=None): 117 | if not content: 118 | return super().contents(ixstr=ixstr, min_ndim=min_ndim) 119 | 120 | # Recurse one level 121 | return [ 122 | create_response(self.h5grove_entity._h5file, uriJoin(self.uri, suburi), self.resolve_links).contents( 123 | content=False, 124 | ixstr=ixstr, 125 | min_ndim=min_ndim, 126 | ) 127 | for suburi in self._hobj.keys() 128 | ] 129 | 130 | def metadata(self, is_child=False, **kwargs): 131 | if is_child: 132 | return super().metadata() 133 | 134 | return dict( 135 | sorted( 136 | ( 137 | ("children", [create_response(self.h5grove_entity._h5file, uriJoin(self.uri, suburi), self.resolve_links).metadata(is_child=True, **kwargs) for suburi in self._hobj.keys()]), 138 | *super().metadata().items(), 139 | ) 140 | ) 141 | ) 142 | 143 | 144 | def create_response(h5file: h5py.File, uri: str, resolve_links: bool): 145 | try: 146 | h5grove_entity = h5grove.create_content(h5file, uri, resolve_links) 147 | except LinkError: 148 | h5grove_entity = h5grove.create_content(h5file, uri, resolve_links=False) 149 | 150 | if isinstance(h5grove_entity, h5grove.content.ExternalLinkContent): 151 | return ExternalLinkResponse(h5grove_entity) 152 | if isinstance(h5grove_entity, h5grove.content.SoftLinkContent): 153 | return SoftLinkResponse(h5grove_entity) 154 | if isinstance(h5grove_entity, h5grove.content.DatasetContent): 155 | return DatasetResponse(h5grove_entity) 156 | elif isinstance(h5grove_entity, h5grove.content.GroupContent): 157 | return GroupResponse(h5grove_entity, resolve_links) 158 | else: 159 | return ResolvedEntityResponse(h5grove_entity) 160 | -------------------------------------------------------------------------------- /jupyterlab_hdf/snippet.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) Jupyter Development Team. 4 | # Distributed under the terms of the Modified BSD License. 5 | 6 | import h5py 7 | 8 | from .baseHandler import HdfBaseManager, HdfBaseHandler 9 | from .util import hobjType 10 | 11 | __all__ = ['HdfSnippetManager', 'HdfSnippetHandler'] 12 | 13 | ## snippet templates 14 | dsetTemplate = '''with h5py.File('{fpath}', 'r') as f: 15 | dataset = f['{uri}']''' 16 | 17 | groupTemplate = '''with h5py.File('{fpath}', 'r') as f: 18 | group = f['{uri}']''' 19 | 20 | ixTemplate = '''[{ixstr}]''' 21 | 22 | ## manager 23 | class HdfSnippetManager(HdfBaseManager): 24 | """Implements HDF5 contents handling 25 | """ 26 | def _get(self, fpath, uri, ixstr=None, subixstr=None, **kwargs): 27 | with h5py.File(fpath, 'r') as f: 28 | tipe = hobjType(f[uri]) 29 | 30 | if tipe == 'dataset': 31 | return ''.join(( 32 | dsetTemplate.format(fpath=fpath, uri=uri), 33 | ixTemplate.format(ixstr=ixstr) if ixstr is not None else '', 34 | ixTemplate.format(ixstr=subixstr) if ixstr is not None and subixstr is not None else '', 35 | )) 36 | elif tipe == 'group': 37 | return groupTemplate.format(fpath=fpath, uri=uri) 38 | else: 39 | raise ValueError('the `hdf/snippet` endpoint currently only supports Dataset and Group objects') 40 | 41 | ## handler 42 | class HdfSnippetHandler(HdfBaseHandler): 43 | """A handler for HDF5 contents 44 | """ 45 | managerClass = HdfSnippetManager 46 | -------------------------------------------------------------------------------- /jupyterlab_hdf/tests/test_attrs.py: -------------------------------------------------------------------------------- 1 | import h5py 2 | import numpy as np 3 | import os 4 | from jupyterlab_hdf.tests.utils import ServerTest 5 | 6 | 7 | class TestAttrs(ServerTest): 8 | def setUp(self): 9 | super().setUp() 10 | 11 | with h5py.File(os.path.join(self.notebook_dir, "test_file.h5"), "w") as h5file: 12 | # Group with no attributes 13 | h5file.create_group("group_without_attrs") 14 | 15 | # Group with simple attributes 16 | attr_grp = h5file.create_group("group_with_attrs") 17 | attr_grp.attrs["string_attr"] = "I am a group" 18 | attr_grp.attrs["number_attr"] = 5676 19 | attr_grp.attrs["float64_attr"] = np.float64(3.1417) 20 | 21 | # Dataset with non-simple attributes 22 | attr_dset = h5file.create_dataset("dataset_with_attrs", shape=()) 23 | attr_dset.attrs["bool_attr"] = False 24 | attr_dset.attrs["list_attr"] = [0, 1, 2] 25 | attr_dset.attrs["complex_attr"] = 1 + 2j 26 | 27 | def test_group_without_attrs(self): 28 | response = self.tester.get(["attrs", "test_file.h5"], params={"uri": "/group_without_attrs"}) 29 | 30 | assert response.status_code == 200 31 | payload = response.json() 32 | assert payload == {} 33 | 34 | def test_group_with_simple_attrs(self): 35 | response = self.tester.get(["attrs", "test_file.h5"], params={"uri": "/group_with_attrs"}) 36 | 37 | assert response.status_code == 200 38 | payload = response.json() 39 | assert payload == {"string_attr": "I am a group", "number_attr": 5676, "float64_attr": 3.1417} 40 | 41 | def test_dset_with_non_simple_attrs(self): 42 | response = self.tester.get(["attrs", "test_file.h5"], params={"uri": "/dataset_with_attrs"}) 43 | 44 | assert response.status_code == 200 45 | payload = response.json() 46 | assert payload == {'bool_attr': False, 'list_attr': [0, 1, 2], 'complex_attr': [1, 2]} 47 | 48 | def test_one_attr_from_group(self): 49 | response = self.tester.get(["attrs", "test_file.h5"], params={"uri": "/group_with_attrs", "attr_keys": "string_attr"}) 50 | 51 | assert response.status_code == 200 52 | payload = response.json() 53 | assert payload == {"string_attr": "I am a group"} 54 | 55 | def test_two_attr_from_dset(self): 56 | response = self.tester.get(["attrs", "test_file.h5"], params={"uri": "/dataset_with_attrs", "attr_keys": ["bool_attr", "list_attr"]}) 57 | 58 | assert response.status_code == 200 59 | payload = response.json() 60 | assert payload == {"bool_attr": False, "list_attr": [0, 1, 2]} 61 | -------------------------------------------------------------------------------- /jupyterlab_hdf/tests/test_contents.py: -------------------------------------------------------------------------------- 1 | import h5py 2 | import os 3 | import numpy as np 4 | from jupyterlab_hdf.tests.utils import ServerTest 5 | 6 | 7 | class TestContents(ServerTest): 8 | def setUp(self): 9 | super().setUp() 10 | 11 | with h5py.File(os.path.join(self.notebook_dir, "test_file.h5"), "w") as h5file: 12 | # Empty group 13 | h5file.create_group("empty_group") 14 | 15 | # Group with 3 children 16 | grp = h5file.create_group("group_with_children") 17 | # A simple dataset 18 | grp["dataset_1"] = np.random.random((2, 3, 4)) 19 | # a group 20 | grp.create_group("nested_group") 21 | # and an external link 22 | grp["external"] = h5py.ExternalLink("another_file.h5", "path/in/the/file") 23 | 24 | def test_empty_group(self): 25 | response = self.tester.get(["contents", "test_file.h5"], params={"uri": "/empty_group"}) 26 | 27 | assert response.status_code == 200 28 | payload = response.json() 29 | assert payload == [] 30 | 31 | def test_group_with_children(self): 32 | response = self.tester.get(["contents", "test_file.h5"], params={"uri": "/group_with_children"}) 33 | 34 | assert response.status_code == 200 35 | payload = response.json() 36 | assert payload == [ 37 | dict((("name", "dataset_1"), ("type", "dataset"), ("uri", "/group_with_children/dataset_1"))), 38 | dict((("name", "external"), ("type", "external_link"), ("uri", "/group_with_children/external"))), 39 | dict((("name", "nested_group"), ("type", "group"), ("uri", "/group_with_children/nested_group"))), 40 | ] 41 | 42 | def test_full_dataset(self): 43 | uri = "/group_with_children/dataset_1" 44 | response = self.tester.get(["contents", "test_file.h5"], params={"uri": uri}) 45 | 46 | assert response.status_code == 200 47 | payload = response.json() 48 | assert payload == dict( 49 | ( 50 | ("name", "dataset_1"), 51 | ("type", "dataset"), 52 | ("uri", uri), 53 | ( 54 | "content", 55 | dict( 56 | ( 57 | ("attributes", []), 58 | ("dtype", "f8") 25 | 26 | def test_oneD_dataset(self): 27 | response = self.tester.get(["data", "test_file.h5"], params={"uri": "/oneD_dataset"}) 28 | 29 | assert response.status_code == 200 30 | payload = response.json() 31 | assert payload == ONE_D.tolist() 32 | 33 | def test_twoD_dataset(self): 34 | response = self.tester.get(["data", "test_file.h5"], params={"uri": "/twoD_dataset"}) 35 | 36 | assert response.status_code == 200 37 | payload = response.json() 38 | assert payload == TWO_D.tolist() 39 | 40 | def test_threeD_dataset(self): 41 | response = self.tester.get(["data", "test_file.h5"], params={"uri": "/threeD_dataset"}) 42 | 43 | assert response.status_code == 200 44 | payload = response.json() 45 | assert payload == THREE_D.tolist() 46 | 47 | def test_sliced_threeD_dataset(self): 48 | ixstr = ":,1:3, 2" 49 | sliced_dataset = THREE_D[:, 1:3, 2] 50 | response = self.tester.get(["data", "test_file.h5"], params={"uri": "/threeD_dataset", "ixstr": ixstr}) 51 | 52 | assert response.status_code == 200 53 | payload = response.json() 54 | assert payload == sliced_dataset.tolist() 55 | 56 | def test_complex_dataset(self): 57 | response = self.tester.get(["data", "test_file.h5"], params={"uri": "/complex"}) 58 | 59 | assert response.status_code == 200 60 | payload = response.json() 61 | # Complex are serialized as double-value array 62 | assert payload == [[c.real, c.imag] for c in COMPLEX] 63 | 64 | def test_complex_dataset_at_least2d(self): 65 | response = self.tester.get(["data", "test_file.h5"], params={"uri": "/complex", "min_ndim": 2, "ixstr": "0:2"}) 66 | 67 | assert response.status_code == 200 68 | payload = response.json() 69 | assert payload == [[[1, 1]], [[1, 2]]] 70 | 71 | def test_scalar_dataset(self): 72 | response = self.tester.get(["data", "test_file.h5"], params={"uri": "/scalar"}) 73 | 74 | assert response.status_code == 200 75 | payload = response.json() 76 | assert payload == SCALAR 77 | 78 | def test_scalar_dataset_at_least2d(self): 79 | response = self.tester.get(["data", "test_file.h5"], params={"uri": "/scalar", "min_ndim": 2}) 80 | 81 | assert response.status_code == 200 82 | payload = response.json() 83 | assert payload == [[SCALAR]] 84 | 85 | def test_empty_dataset(self): 86 | response = self.tester.get(["data", "test_file.h5"], params={"uri": "/empty"}) 87 | 88 | assert response.status_code == 200 89 | payload = response.json() 90 | assert payload is None 91 | -------------------------------------------------------------------------------- /jupyterlab_hdf/tests/test_meta.py: -------------------------------------------------------------------------------- 1 | import h5py 2 | import os 3 | import numpy as np 4 | from jupyterlab_hdf.tests.utils import ServerTest 5 | 6 | 7 | class TestMeta(ServerTest): 8 | def setUp(self): 9 | super().setUp() 10 | 11 | with h5py.File(os.path.join(self.notebook_dir, "test_file.h5"), "w") as h5file: 12 | # Empty group 13 | h5file.create_group("empty_group") 14 | 15 | # Group with 2 children 16 | grp = h5file.create_group("group_with_children") 17 | # A simple dataset 18 | grp["dataset_1"] = np.random.random((2, 3, 4)) 19 | # and a group with attributes 20 | attr_grp = grp.create_group("group_with_attrs") 21 | attr_grp.attrs["array_attr"] = np.arange(0, 1, 0.1, dtype=">f4") 22 | attr_grp.attrs["bool_attr"] = True 23 | attr_grp.attrs["complex_attr"] = np.complex64(1 + 2j) 24 | attr_grp.attrs["number_attr"] = np.int32(5676) 25 | attr_grp.attrs["string_attr"] = "I am a group" 26 | 27 | # Scalar dataset 28 | h5file["scalar"] = 56 29 | 30 | # Empty dataset 31 | h5file["empty"] = h5py.Empty(">f8") 32 | 33 | # External link 34 | h5file["external"] = h5py.ExternalLink("another_file.h5", "path/in/the/file") 35 | 36 | # Soft link 37 | h5file["soft"] = h5py.SoftLink("/scalar") 38 | 39 | def test_empty_group(self): 40 | response = self.tester.get(["meta", "test_file.h5"], params={"uri": "/empty_group"}) 41 | 42 | assert response.status_code == 200 43 | payload = response.json() 44 | assert payload["name"] == "empty_group" 45 | assert payload["type"] == "group" 46 | assert payload["attributes"] == [] 47 | assert payload["children"] == [] 48 | 49 | def test_group_with_children(self): 50 | response = self.tester.get(["meta", "test_file.h5"], params={"uri": "/group_with_children"}) 51 | 52 | assert response.status_code == 200 53 | payload = response.json() 54 | assert payload["name"] == "group_with_children" 55 | assert payload["type"] == "group" 56 | assert payload["attributes"] == [] 57 | assert payload["children"] == [ 58 | {"name": "dataset_1", "dtype": "f4", "shape": [10]}, 64 | {"name": "bool_attr", "dtype": "|b1", "shape": []}, 65 | {"name": "complex_attr", "dtype": "f4", "shape": [10]}, 81 | {"name": "bool_attr", "dtype": "|b1", "shape": []}, 82 | {"name": "complex_attr", "dtype": "f8"), ("labels", None), ("name", "empty"), ("ndim", 0), ("shape", None), ("size", 0), ("type", "dataset"))) 127 | 128 | def test_external_link(self): 129 | response = self.tester.get(["meta", "test_file.h5"], params={"uri": "/external"}) 130 | 131 | assert response.status_code == 200 132 | payload = response.json() 133 | 134 | assert payload == dict((("name", "external"), ("targetFile", "another_file.h5"), ("targetUri", "path/in/the/file"), ("type", "external_link"))) 135 | 136 | def test_soft_link(self): 137 | response = self.tester.get(["meta", "test_file.h5"], params={"uri": "/soft"}) 138 | 139 | assert response.status_code == 200 140 | payload = response.json() 141 | 142 | assert payload == dict((("name", "soft"), ("targetUri", "/scalar"), ("type", "soft_link"))) 143 | -------------------------------------------------------------------------------- /jupyterlab_hdf/tests/test_meta_links.py: -------------------------------------------------------------------------------- 1 | import h5py 2 | import numpy as np 3 | import os 4 | from jupyterlab_hdf.tests.utils import ServerTestWithLinkResolution 5 | 6 | 7 | class TestMetaWithLinkResolution(ServerTestWithLinkResolution): 8 | def setUp(self): 9 | super().setUp() 10 | 11 | with h5py.File(os.path.join(self.notebook_dir, "target_file.h5"), "w") as h5file: 12 | # Group with 2 children 13 | grp = h5file.create_group("group_with_children") 14 | # A simple dataset 15 | grp["dataset_1"] = np.random.random((2, 3, 4)) 16 | # and a group with attributes 17 | attr_grp = grp.create_group("group_with_attrs") 18 | attr_grp.attrs["array_attr"] = np.arange(0, 1, 0.1, dtype=">f4") 19 | attr_grp.attrs["bool_attr"] = True 20 | attr_grp.attrs["complex_attr"] = np.complex64(1 + 2j) 21 | attr_grp.attrs["number_attr"] = np.int32(5676) 22 | attr_grp.attrs["string_attr"] = "I am a group" 23 | 24 | with h5py.File(os.path.join(self.notebook_dir, "test_file.h5"), "w") as h5file: 25 | h5file["scalar"] = 56 26 | 27 | h5file["soft"] = h5py.SoftLink("/scalar") 28 | h5file["broken_soft"] = h5py.SoftLink("/not_existing") 29 | 30 | h5file["broken_external"] = h5py.ExternalLink("target_file.h5", "/not/a/path") 31 | h5file["external"] = h5py.ExternalLink("target_file.h5", "/group_with_children") 32 | 33 | def test_broken_external_link(self): 34 | response = self.tester.get(["meta", "test_file.h5"], params={"uri": "/broken_external"}) 35 | 36 | assert response.status_code == 200 37 | payload = response.json() 38 | 39 | assert payload == dict((("name", "broken_external"), ("targetFile", "target_file.h5"), ("targetUri", "/not/a/path"), ("type", "external_link"))) 40 | 41 | def test_working_external_link(self): 42 | response = self.tester.get(["meta", "test_file.h5"], params={"uri": "/external"}) 43 | 44 | assert response.status_code == 200 45 | payload = response.json() 46 | assert payload["name"] == "external" 47 | assert payload["type"] == "group" 48 | assert payload["attributes"] == [] 49 | assert payload["children"] == [ 50 | {"name": "dataset_1", "dtype": "f4", "shape": [10]}, 56 | {"name": "bool_attr", "dtype": "|b1", "shape": []}, 57 | {"name": "complex_attr", "dtype": "= ndim``. A view is returned for array 34 | inputs. Dimensions are prepended if `pos` is 0, so for example, 35 | a 1-D array of shape ``(N,)`` with ``ndim=4`` becomes a view of 36 | shape ``(1, 1, 1, N)``. Dimensions are appended if `pos` is -1, 37 | so for example a 2-D array of shape ``(M, N)`` becomes a view of 38 | shape ``(M, N, 1, 1)`` when ``ndim=4``. 39 | """ 40 | ary = np.array(ary, copy=False, subok=True) 41 | if ary.ndim: 42 | pos = np.core.multiarray.normalize_axis_index(pos, ary.ndim + 1) 43 | extra = ndim - ary.ndim 44 | if extra > 0: 45 | ind = pos * (slice(None),) + extra * (None,) + (Ellipsis,) 46 | ary = ary[ind] 47 | return ary 48 | 49 | 50 | ## chunk handling 51 | def dsetChunk(dset, ixstr=None, subixstr=None, min_ndim=None): 52 | if ixstr is None: 53 | chunk = dset[...] 54 | elif subixstr is None: 55 | chunk = dset[parseIndex(ixstr)] 56 | else: 57 | validateSubindex(dset.shape, dset.size, ixstr, subixstr) 58 | chunk = dset[parseSubindex(dset.shape, dset.size, ixstr, subixstr)] 59 | 60 | if min_ndim is not None: 61 | chunk = atleast_nd(chunk, min_ndim, pos=-1) 62 | 63 | return chunk 64 | 65 | 66 | def hobjType(hobj): 67 | if isinstance(hobj, h5py.Dataset): 68 | return "dataset" 69 | elif isinstance(hobj, h5py.Group): 70 | return "group" 71 | else: 72 | return "other" 73 | 74 | 75 | def attrMetaDict(attrId): 76 | return dict( 77 | ( 78 | ("name", attrId.name), 79 | ("dtype", attrId.dtype.str), 80 | ("shape", attrId.shape), 81 | ) 82 | ) 83 | 84 | 85 | ## index parsing and handling 86 | class _Guard: 87 | def __init__(self): 88 | self.val = False 89 | 90 | def __call__(self): 91 | if self.val: 92 | return True 93 | else: 94 | self.val = True 95 | return False 96 | 97 | 98 | def parseIndex(node_or_string): 99 | """Safely evaluate an expression node or a string containing 100 | a (limited subset) of valid numpy index or slice expressions. 101 | """ 102 | if isinstance(node_or_string, str): 103 | if "," not in node_or_string: 104 | # handle ndim <= 1 case 105 | node_or_string += "," 106 | node_or_string = ast.parse("dummy[{}]".format(node_or_string.lstrip(" \t")), mode="eval") 107 | if isinstance(node_or_string, ast.Expression): 108 | node_or_string = node_or_string.body 109 | if isinstance(node_or_string, ast.Subscript): 110 | node_or_string = node_or_string.slice 111 | 112 | def _raise_malformed_node(node): 113 | raise ValueError("malformed node or string: {}, {}".format(node, ast.dump(node))) 114 | 115 | def _raise_nested_tuple_node(node): 116 | raise ValueError("tuples inside of tuple indices are not supported: {}, {}".format(node, ast.dump(node))) 117 | 118 | # from cpy37, should work until they remove ast.Num (not until cpy310) 119 | def _convert_num(node): 120 | if isinstance(node, ast.Constant): 121 | if isinstance(node.value, (int, float, complex)): 122 | return node.value 123 | elif isinstance(node, ast.Num): 124 | # ast.Num was removed from ast grammar in cpy38 125 | return node.n # pragma: no cover 126 | _raise_malformed_node(node) 127 | 128 | def _convert_signed_num(node): 129 | if isinstance(node, ast.UnaryOp) and isinstance(node.op, (ast.UAdd, ast.USub)): 130 | operand = _convert_num(node.operand) 131 | if isinstance(node.op, ast.UAdd): 132 | return +operand 133 | else: 134 | return -operand 135 | return _convert_num(node) 136 | 137 | _nested_tuple_guard = _Guard() 138 | 139 | def _convert(node): 140 | if isinstance(node, ast.Tuple): 141 | if _nested_tuple_guard(): 142 | _raise_nested_tuple_node(node) 143 | 144 | return tuple(map(_convert, node.elts)) 145 | elif isinstance(node, ast.Slice): 146 | return slice( 147 | _convert(node.lower) if node.lower is not None else None, 148 | _convert(node.upper) if node.upper is not None else None, 149 | # for now, no step support 150 | # _convert(node.step) if node.step is not None else None, 151 | None, 152 | ) 153 | elif isinstance(node, ast.NameConstant) and node.value is None: 154 | # support for literal None in slices, eg 'slice(None, ...)' 155 | return None 156 | elif isinstance(node, ast.Ellipsis): 157 | # support for three dot '...' ellipsis syntax 158 | return ... 159 | elif isinstance(node, ast.Name) and node.id == "Ellipsis": 160 | # support for 'Ellipsis' ellipsis syntax 161 | return ... 162 | elif isinstance(node, ast.Index): 163 | # ast.Index was removed from ast grammar in cpy39 164 | return _convert(node.value) # pragma: no cover 165 | elif isinstance(node, ast.ExtSlice): 166 | # ast.ExtSlice was removed from ast grammar in cpy39 167 | _nested_tuple_guard() # pragma: no cover 168 | return tuple(map(_convert, node.dims)) 169 | 170 | return _convert_signed_num(node) 171 | 172 | return _convert(node_or_string) 173 | 174 | 175 | def parseSubindex(shape, size, ixstr, subixstr): 176 | ix = parseIndex(ixstr) 177 | meta = shapemeta(shape, size, ixstr) 178 | subix = parseIndex(subixstr) 179 | 180 | ixcompound = list(ix) 181 | for d, dlabel, subdix in zip(meta["visdims"], meta["labels"], subix): 182 | start = dlabel.start + (subdix.start * dlabel.step) 183 | stop = dlabel.start + (min(subdix.stop, dlabel.stop // dlabel.step) * dlabel.step) # dlabel.start + (subdix.stop*dlabel.step) 184 | ixcompound[d] = slice(start, stop) 185 | 186 | return tuple(ixcompound) 187 | 188 | 189 | def shapemeta(shape, size, ixstr=None, min_ndim=None): 190 | if shape is None: 191 | return dict( 192 | ( 193 | ("labels", None), 194 | ("ndim", 0), 195 | ("shape", None), 196 | ("size", 0), 197 | ("visdims", None), 198 | ) 199 | ) 200 | 201 | if ixstr is None: 202 | ix = (slice(None),) * len(shape) 203 | else: 204 | ix = parseIndex(ixstr) 205 | 206 | ndimIx = len([dix for dix in ix if isinstance(dix, slice)]) 207 | 208 | promote = 0 if min_ndim is None else max(0, min_ndim - ndimIx) 209 | if promote: 210 | ix = (slice(None),) * promote + ix 211 | ndimIx += promote 212 | shape = (1,) * promote + shape 213 | 214 | visdimsIx = tuple(d for d, dix in enumerate(ix) if isinstance(dix, slice)) 215 | 216 | labelsIx = [slice(*ix[d].indices(shape[d])) for d in visdimsIx] 217 | shapeIx = [slicelen(ix[d], shape[d]) for d in visdimsIx] 218 | 219 | sizeIx = np.prod(shapeIx) if ndimIx else size 220 | 221 | return dict( 222 | ( 223 | ("labels", labelsIx), 224 | ("ndim", ndimIx), 225 | ("shape", shapeIx), 226 | ("size", sizeIx), 227 | ("visdims", visdimsIx), 228 | ) 229 | ) 230 | 231 | 232 | def slicelen(slyce, seqlen): 233 | """Based on https://stackoverflow.com/a/36188683""" 234 | start, stop, step = slyce.indices(seqlen) 235 | return max(0, (stop - start + (step - (1 if step > 0 else -1))) // step) 236 | 237 | 238 | def validateSubindex(shape, size, ixstr, subixstr): 239 | meta = shapemeta(shape, size, ixstr) 240 | subix = parseIndex(subixstr) 241 | 242 | if len(subix) != len(meta["visdims"]): 243 | msg = dict( 244 | ( 245 | ("message", "malformed subixstr: number of visible dimensions in index not equal to number of dimensions in subindex."), 246 | ("debugVars", {"ixstr": ixstr, "subix": subix, "subixstr": subixstr, "visdims": meta["visdims"]}), 247 | ) 248 | ) 249 | raise JhdfError(msg) 250 | 251 | 252 | ## json handling 253 | def jsonize(v): 254 | """Turns a value into a JSON serializable version""" 255 | if isinstance(v, (int, float, str)) or v is None: 256 | return v 257 | if isinstance(v, bytes): 258 | return v.decode() 259 | if isinstance(v, dict): 260 | return {k: jsonize(v) for k, v in v.items()} 261 | if isinstance(v, (list, tuple)): 262 | return [jsonize(i) for i in v] 263 | if isinstance(v, np.generic) or isinstance(v, np.ndarray): 264 | return jsonize(v.tolist()) 265 | if isinstance(v, slice): 266 | return dict( 267 | ( 268 | ("start", v.start), 269 | ("stop", v.stop), 270 | ("step", v.step), 271 | ) 272 | ) 273 | if isinstance(v, complex): 274 | return [v.real, v.imag] 275 | if isinstance(v, h5py.Empty): 276 | return None 277 | raise TypeError("Cannot jsonize {}".format(type(v))) 278 | 279 | 280 | ## uri handling 281 | _emptyUriRe = re.compile("//") 282 | 283 | 284 | def uriJoin(*parts): 285 | return _emptyUriRe.sub("/", "/".join(parts)) 286 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@jupyterlab/hdf5", 3 | "version": "1.3.0", 4 | "description": "Open and explore (very large) HDF5 files in Jupyterlab.", 5 | "keywords": [ 6 | "jupyter", 7 | "jupyterlab", 8 | "jupyterlab-extension" 9 | ], 10 | "homepage": "https://github.com/jupyterlab/jupyterlab-hdf5", 11 | "bugs": { 12 | "url": "https://github.com/jupyterlab/jupyterlab-hdf5/issues" 13 | }, 14 | "repository": { 15 | "type": "git", 16 | "url": "https://github.com/jupyterlab/jupyterlab-hdf5.git" 17 | }, 18 | "license": "BSD-3-Clause", 19 | "author": "Project Jupyter", 20 | "files": [ 21 | "lib/**/*.{d.ts,eot,gif,html,jpg,js,js.map,json,png,svg,woff2,ttf}", 22 | "schema/**/*.{json,}", 23 | "src/**/*.{js,jsx,ts,tsx}", 24 | "style/**/*.{css,eot,gif,html,jpg,json,png,svg,woff2,ttf}" 25 | ], 26 | "main": "lib/index.js", 27 | "types": "lib/index.d.ts", 28 | "style": "style/index.css", 29 | "scripts": { 30 | "black:check": "black --check 'setup.py' 'jupyterlab_hdf'", 31 | "black:fix": "black 'setup.py' 'jupyterlab_hdf'", 32 | "build": "tsc", 33 | "build:dev": "jlpm build:integrity && jlpm run lab:install", 34 | "build:integrity": "jlpm install && jlpm build", 35 | "build:watch": "jlpm run build --watch", 36 | "clean": "rimraf lib && rimraf tsconfig.tsbuildinfo", 37 | "clean:more": "jlpm run clean && rimraf build && rimraf dist && rimraf package && rimraf *.tgz", 38 | "clean:slate": "jlpm run clean:more && rimraf node_modules", 39 | "deduplicate": "jlpm yarn-deduplicate -s fewer", 40 | "eslint:check": "eslint . --ext .js,.jsx,.ts,.tsx", 41 | "eslint:fix": "eslint . --ext .js,.jsx,.ts,.tsx --fix", 42 | "lab:install": "jupyter labextension link .", 43 | "lab:link": "jupyter labextension link . --no-build", 44 | "lab:uninstall": "jupyter labextension uninstall @jupyterlab/hdf5", 45 | "lab:unlink": "jupyter labextension uninstall @jupyterlab/hdf5 --no-build", 46 | "lint:check": "jlpm run eslint:check && jlpm run prettier:check && jlpm run black:check", 47 | "lint:fix": "jlpm run eslint:fix && jlpm run prettier:fix && jlpm run black:fix", 48 | "prepublishOnly": "jlpm build:integrity", 49 | "prettier:check": "prettier --check '**/*{.ts,.tsx,.js,.jsx,.css,.json,.md}'", 50 | "prettier:fix": "prettier --write '**/*{.ts,.tsx,.js,.jsx,.css,.json,.md}'", 51 | "watch": "tsc -w" 52 | }, 53 | "dependencies": { 54 | "@jupyterlab/application": "^3.0.0", 55 | "@jupyterlab/apputils": "^3.0.0", 56 | "@jupyterlab/coreutils": "^5.0.0", 57 | "@jupyterlab/docmanager": "^3.0.0", 58 | "@jupyterlab/docregistry": "^3.0.0", 59 | "@jupyterlab/filebrowser": "^3.0.0", 60 | "@jupyterlab/notebook": "^3.0.0", 61 | "@jupyterlab/services": "^6.0.0", 62 | "@lumino/algorithm": "^1.3.0", 63 | "@lumino/coreutils": "^1.5.0", 64 | "@lumino/datagrid": "^0.14.0", 65 | "@lumino/messaging": "^1.4.0", 66 | "@lumino/signaling": "^1.4.0", 67 | "@lumino/widgets": "^1.14.0" 68 | }, 69 | "devDependencies": { 70 | "@typescript-eslint/eslint-plugin": "^4.10.0", 71 | "@typescript-eslint/parser": "^4.10.0", 72 | "eslint": "^7.15.0", 73 | "eslint-config-prettier": "^7.0.0", 74 | "husky": "^3.0.1", 75 | "lint-staged": "^10.5.3", 76 | "prettier": "^2.2.1", 77 | "rimraf": "~2.6.2", 78 | "shell-quote": "^1.7.2", 79 | "typescript": "~4.1.0", 80 | "yarn-deduplicate": "^3.1.0" 81 | }, 82 | "publishConfig": { 83 | "access": "public" 84 | }, 85 | "husky": { 86 | "hooks": { 87 | "pre-commit": [ 88 | "lint-staged" 89 | ] 90 | } 91 | }, 92 | "jupyterlab": { 93 | "extension": true, 94 | "discovery": { 95 | "server": { 96 | "managers": [ 97 | "pip" 98 | ], 99 | "base": { 100 | "name": "jupyterlab_hdf" 101 | } 102 | } 103 | } 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | line-length = 200 3 | -------------------------------------------------------------------------------- /release.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | description = """if run with no args, this script will: 4 | 1. determine the true project version from jupyterlab_hdf/_version.py 5 | 2. "version" 6 | - force the version in package.json to agree 7 | 3. "tag" 8 | - create a git version tag 9 | - push said tag upstream 10 | 4. "pypi" 11 | - build/bundle the labextension together with the python package 12 | - do the pypi release 13 | 5. "npmjs" 14 | - build the labextension 15 | - do the npmjs release 16 | """ 17 | 18 | import argparse as argpar 19 | import json 20 | import subprocess 21 | from packaging.version import parse 22 | 23 | from setupbase import get_version 24 | 25 | VERSION_FILE_PY = "jupyterlab_hdf/_version.py" 26 | 27 | 28 | def assertEqualVersion(): 29 | serverVersion = parse(serverExtensionVersion()) 30 | frontendVersion = parse(labExtensionVersion()) 31 | 32 | error_msg = "Frontend ({}) and server ({}) version do not match".format(frontendVersion, serverVersion) 33 | assert serverVersion == frontendVersion, error_msg 34 | 35 | 36 | def prepLabextensionBundle(): 37 | subprocess.run(["jlpm", "install"], check=True) 38 | subprocess.run(["jlpm", "clean:slate"], check=True) 39 | 40 | 41 | def tag(version, dry_run=False, kind=None): 42 | """git tagging""" 43 | kw = {"version": version, "kind": kind} 44 | tag = "{kind}_v{version}".format(**kw) if kind else "v{version}".format(**kw) 45 | 46 | if dry_run: 47 | print("Would tag: {}".format(tag)) 48 | else: 49 | subprocess.run(["git", "tag", "-a", tag], check=True) 50 | subprocess.run(["git", "push", "--tags"], check=True) 51 | 52 | 53 | def pypi(wheel=True, dry_run=False): 54 | """release on pypi""" 55 | if wheel: 56 | subprocess.run(["python", "-m", "pip", "install", "--upgrade", "setuptools", "wheel"], check=True) 57 | 58 | # build the source (sdist) and binary wheel (bdist_wheel) releases 59 | subprocess.run(["python", "setup.py", "sdist", "bdist_wheel"], check=True) 60 | else: 61 | # build just the source release 62 | subprocess.run(["python", "setup.py", "sdist"], check=True) 63 | 64 | if dry_run: 65 | # check the dist 66 | subprocess.run(["twine", "check", "dist/*"], check=True) 67 | else: 68 | # release to the production pypi server 69 | subprocess.run(["twine", "upload", "dist/*"], check=True) 70 | 71 | 72 | def npmjs(dry_run=False): 73 | """release on npmjs""" 74 | if dry_run: 75 | # dry run build and release 76 | subprocess.run(["npm", "publish", "--access", "public", "--dry-run"], check=True) 77 | else: 78 | # build and release 79 | subprocess.run(["npm", "publish", "--access", "public"], check=True) 80 | 81 | 82 | def labExtensionVersion(dry_run=False, version=None): 83 | if version: 84 | if "rc" in version: 85 | version, rc = version.split("rc") 86 | version = version + "-rc.{}".format(rc) 87 | 88 | force_ver_cmd = ["npm", "--no-git-tag-version", "version", version, "--force", "--allow-same-version"] 89 | force_ver_info = " ".join(force_ver_cmd) 90 | 91 | if dry_run: 92 | print("Would force npm version with: {}".format(force_ver_info)) 93 | else: 94 | # force the labextension version to match the supplied version 95 | print("> {}".format(force_ver_info)) 96 | subprocess.run(force_ver_cmd, check=True) 97 | else: 98 | # get single source of truth from the Typescript labextension 99 | with open("package.json") as f: 100 | info = json.load(f) 101 | 102 | version = info["version"] 103 | 104 | return version 105 | 106 | 107 | def serverExtensionVersion(): 108 | # get single source of truth from the Python serverextension 109 | return get_version(VERSION_FILE_PY) 110 | 111 | 112 | def doRelease(actions, dry_run=False): 113 | # treat the serverextension version as the "real" single source of truth 114 | version = serverExtensionVersion() 115 | 116 | if "version" in actions: 117 | # force the labextension version to agree with the serverextension version 118 | labExtensionVersion(dry_run=dry_run, version=version) 119 | 120 | if "tag" in actions: 121 | # tag with version and push the tag 122 | tag(dry_run=dry_run, version=version) 123 | 124 | if "pypi" in actions or "npmjs" in actions: 125 | # prep the build area for the labextension bundle 126 | prepLabextensionBundle() 127 | 128 | if "pypi" in actions: 129 | # release to pypi 130 | pypi(dry_run=dry_run) 131 | 132 | if "npmjs" in actions: 133 | # release to npmjs 134 | npmjs(dry_run=dry_run) 135 | 136 | 137 | def main(): 138 | parser = argpar.ArgumentParser(description=description) 139 | 140 | parser.add_argument("--dry-run", action="store_true", help="Performs a dry run of all release actions") 141 | parser.add_argument( 142 | "--actions", nargs="*", choices={"version", "tag", "pypi", "npmjs"}, default={"version", "tag", "pypi", "npmjs"}, help="optionally select a subset of the release actions to perform" 143 | ) 144 | 145 | parsed = vars(parser.parse_args()) 146 | actions = parsed["actions"] 147 | dry_run = parsed["dry_run"] 148 | 149 | doRelease(actions, dry_run=dry_run) 150 | 151 | 152 | if __name__ == "__main__": 153 | main() 154 | -------------------------------------------------------------------------------- /scratch/dataset.ai: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyterlab/jupyterlab-hdf5/31ef7b732dbfe98c85d54dfab0f28699a1ab6f62/scratch/dataset.ai -------------------------------------------------------------------------------- /scratch/genNested.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import h5py 3 | import numpy as np 4 | from pathlib import Path 5 | 6 | def genData(n, func=None, shape=None, umin=None, umax=None): 7 | if func is None: func = np.random.uniform 8 | if shape is None: shape = (1000, 1000) 9 | if umin is None: umin = 0 10 | if umax is None: umax = 1000 11 | 12 | print(shape) 13 | 14 | data = np.empty(shape) 15 | data[:, 0] = n 16 | data[0, :] = n 17 | data[1:, 1:] = func(umin, umax, size=np.array(shape) - 1) 18 | return data 19 | 20 | def genLeaf(group, data, ext=None, n=None): 21 | if n is None: n = 0 22 | 23 | leaf = group.create_group('leaf%02d' % (n+1)) 24 | 25 | # data = np.full(shape, n) 26 | dpath = ('data%02d' % n) if ext is None else str(Path('data%02d' % n).with_suffix(ext)) 27 | 28 | if data is None: 29 | data=genData(n, **dataKwargs) 30 | group.create_dataset(dpath, data=data) 31 | 32 | return leaf 33 | 34 | def genNested(name, N=None, ext=None, fillRange=False, func=None, shape=None, suffix='.hdf5', umin=None, umax=None): 35 | if N is None: N = 5 36 | 37 | with h5py.File(Path(name).with_suffix(suffix), 'w') as f: 38 | group = f 39 | 40 | for n in range(N): 41 | if fillRange: 42 | data = np.arange(np.prod(shape)).reshape(shape) + n*.1 43 | else: 44 | data = genData(n, func=func, shape=shape, umin=umin, umax=umax) 45 | 46 | group = genLeaf(group, data, ext=ext, n=n) 47 | 48 | if __name__=='__main__': 49 | genNested('nested', shape=(10,10)) 50 | # genNested('nested_int_one_d', N=2, fillRange=True, shape=(1000,)*1) 51 | # genNested('nested_int_high_d', N=2, fillRange=True, shape=(50,)*4) 52 | -------------------------------------------------------------------------------- /scratch/nested-contents.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "%reload_ext autoreload\n", 10 | "%autoreload 2\n", 11 | "\n", 12 | "from collections import namedtuple\n", 13 | "import h5py\n", 14 | "import os\n", 15 | "import re\n", 16 | "\n", 17 | "from genNested import genNested" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": 7, 23 | "metadata": {}, 24 | "outputs": [ 25 | { 26 | "name": "stdout", 27 | "output_type": "stream", 28 | "text": [ 29 | "data00.data\n", 30 | "data01.data\n", 31 | "data02.data\n", 32 | "data03.data\n", 33 | "data04.data\n" 34 | ] 35 | } 36 | ], 37 | "source": [ 38 | "genNested(ext='.data')" 39 | ] 40 | }, 41 | { 42 | "cell_type": "markdown", 43 | "metadata": {}, 44 | "source": [ 45 | "# misc" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": 11, 51 | "metadata": {}, 52 | "outputs": [ 53 | { 54 | "name": "stdout", 55 | "output_type": "stream", 56 | "text": [ 57 | "['__array__', '__bool__', '__class__', '__delattr__', '__dict__', '__dir__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__getitem__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__iter__', '__le__', '__len__', '__lt__', '__module__', '__ne__', '__new__', '__nonzero__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__setitem__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__', '_d', '_dcpl', '_dxpl', '_e', '_filters', '_id', '_lapl', '_lcpl', '_local', 'astype', 'attrs', 'chunks', 'compression', 'compression_opts', 'dims', 'dtype', 'external', 'file', 'fillvalue', 'fletcher32', 'flush', 'id', 'is_virtual', 'len', 'maxshape', 'name', 'ndim', 'parent', 'read_direct', 'ref', 'refresh', 'regionref', 'resize', 'scaleoffset', 'shape', 'shuffle', 'size', 'value', 'virtual_sources', 'write_direct']\n", 58 | "{}\n", 59 | "float64\n", 60 | "2\n", 61 | "(1000, 1000)\n" 62 | ] 63 | } 64 | ], 65 | "source": [ 66 | "import h5py\n", 67 | "\n", 68 | "dsetPth='/leaf01/leaf02/data02.data'\n", 69 | "grpPth = '/leaf01/leaf02'\n", 70 | "\n", 71 | "with h5py.File('nested.hdf5', 'r') as f:\n", 72 | " dset = f[dsetPth]\n", 73 | " grp = f[grpPth]\n", 74 | " \n", 75 | " print(dir(dset))\n", 76 | " print(dict(*dset.attrs.items()))\n", 77 | " print(dset.dtype)\n", 78 | " print(dset.ndim)\n", 79 | " print(dset.shape)" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": 1, 85 | "metadata": {}, 86 | "outputs": [ 87 | { 88 | "name": "stdout", 89 | "output_type": "stream", 90 | "text": [ 91 | "float64\n", 92 | "['__bool__', '__class__', '__delattr__', '__dir__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__getitem__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__le__', '__len__', '__lt__', '__mul__', '__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__rmul__', '__setattr__', '__setstate__', '__sizeof__', '__str__', '__subclasshook__', 'alignment', 'base', 'byteorder', 'char', 'descr', 'fields', 'flags', 'hasobject', 'isalignedstruct', 'isbuiltin', 'isnative', 'itemsize', 'kind', 'metadata', 'name', 'names', 'ndim', 'newbyteorder', 'num', 'shape', 'str', 'subdtype', 'type']\n" 93 | ] 94 | } 95 | ], 96 | "source": [ 97 | "import h5py\n", 98 | "\n", 99 | "dsetPth='/leaf01/leaf02/data02.data'\n", 100 | "grpPth = '/leaf01/leaf02'\n", 101 | "\n", 102 | "with h5py.File('nested.hdf5', 'r') as f:\n", 103 | " dset = f[dsetPth]\n", 104 | " grp = f[grpPth]\n", 105 | " \n", 106 | " dtype = dset.dtype\n", 107 | " print(dtype)\n", 108 | " print(dir(dtype))" 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": 8, 114 | "metadata": {}, 115 | "outputs": [ 116 | { 117 | "name": "stdout", 118 | "output_type": "stream", 119 | "text": [ 120 | "[('', '" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": 2, 69 | "metadata": {}, 70 | "outputs": [], 71 | "source": [ 72 | "genNested('nested_int', func=np.random.randint, suffix='.h5', **genkws)" 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": 3, 78 | "metadata": {}, 79 | "outputs": [ 80 | { 81 | "ename": "TypeError", 82 | "evalue": "unsupported operand type(s) for ** or pow(): 'str' and 'dict'", 83 | "output_type": "error", 84 | "traceback": [ 85 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", 86 | "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", 87 | "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mgenNested\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'nested_data_ext'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mext\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'.data'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msuffix\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'.h5,'\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mgenkws\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", 88 | "\u001b[0;31mTypeError\u001b[0m: unsupported operand type(s) for ** or pow(): 'str' and 'dict'" 89 | ] 90 | } 91 | ], 92 | "source": [ 93 | "genNested('nested_data_ext', ext='.data', suffix='.h5,' **genkws)" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": 3, 99 | "metadata": {}, 100 | "outputs": [], 101 | "source": [ 102 | "# genNested('nested_int_one_d', N=2, fillRange=True, shape=(1000,)*1)\n", 103 | "genNested('nested_int_high_d', N=2, fillRange=True, shape=(40, 50, 60, 70))" 104 | ] 105 | }, 106 | { 107 | "cell_type": "markdown", 108 | "metadata": {}, 109 | "source": [ 110 | "
" 111 | ] 112 | }, 113 | { 114 | "cell_type": "code", 115 | "execution_count": 2, 116 | "metadata": {}, 117 | "outputs": [], 118 | "source": [ 119 | "def datasetDict(data):\n", 120 | " return dict([\n", 121 | " ('data', data)\n", 122 | " ])\n", 123 | "\n", 124 | "def apiSplit(apipath):\n", 125 | " fpath,uri = apipath.split('::')\n", 126 | " uri = '/' + uri.lstrip('/')\n", 127 | " return fpath,uri\n", 128 | "\n", 129 | "_emptyUriRe = re.compile('//')\n", 130 | "def uriJoin(*parts):\n", 131 | " return _emptyUriRe.sub('/', '/'.join(parts))" 132 | ] 133 | }, 134 | { 135 | "cell_type": "markdown", 136 | "metadata": {}, 137 | "source": [ 138 | "# get data as slice of dataset" 139 | ] 140 | }, 141 | { 142 | "cell_type": "code", 143 | "execution_count": 4, 144 | "metadata": {}, 145 | "outputs": [], 146 | "source": [ 147 | "def getDatasetHdf(dset, rslice, cslice):\n", 148 | " return [datasetDict(dset[rslice, cslice].tolist())]" 149 | ] 150 | }, 151 | { 152 | "cell_type": "code", 153 | "execution_count": 5, 154 | "metadata": {}, 155 | "outputs": [], 156 | "source": [ 157 | "uriPath='/leaf01/leaf02/data02'\n", 158 | "row = [0, 100]\n", 159 | "col = [300, 400]\n", 160 | "\n", 161 | "with h5py.File('nested.hdf5', 'r') as f:\n", 162 | " dsetDicts = getDatasetHdf(f[uriPath], slice(*row), slice(*col))" 163 | ] 164 | }, 165 | { 166 | "cell_type": "code", 167 | "execution_count": 6, 168 | "metadata": {}, 169 | "outputs": [ 170 | { 171 | "name": "stdout", 172 | "output_type": "stream", 173 | "text": [ 174 | "[2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0]\n", 175 | "[354.0801205661843, 263.3045041027946, 915.3626615907067, 895.6892912969805, 199.078683331102, 936.5571186083694, 843.3847828064879, 529.9872286550377, 638.9560465999808, 244.6667289546418]\n", 176 | "[71.89232190411676, 127.3325714968102, 759.8493181720801, 239.95158737540612, 535.3828295801447, 471.41321741625364, 667.8442122960569, 916.7827814606713, 463.04704059719217, 26.19853606088851]\n" 177 | ] 178 | } 179 | ], 180 | "source": [ 181 | "print(dsetDicts[0]['data'][0][:10],\n", 182 | " dsetDicts[0]['data'][1][:10],\n", 183 | " dsetDicts[0]['data'][-1][:10],\n", 184 | " sep='\\n')" 185 | ] 186 | }, 187 | { 188 | "cell_type": "code", 189 | "execution_count": null, 190 | "metadata": {}, 191 | "outputs": [], 192 | "source": [] 193 | }, 194 | { 195 | "cell_type": "markdown", 196 | "metadata": {}, 197 | "source": [ 198 | "# misc" 199 | ] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "execution_count": 12, 204 | "metadata": {}, 205 | "outputs": [ 206 | { 207 | "name": "stdout", 208 | "output_type": "stream", 209 | "text": [ 210 | "['_MutableMapping__marker', '__abstractmethods__', '__bool__', '__class__', '__contains__', '__delattr__', '__delitem__', '__dict__', '__dir__', '__doc__', '__enter__', '__eq__', '__exit__', '__format__', '__ge__', '__getattribute__', '__getitem__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__iter__', '__le__', '__len__', '__lt__', '__module__', '__ne__', '__new__', '__nonzero__', '__reduce__', '__reduce_ex__', '__repr__', '__reversed__', '__setattr__', '__setitem__', '__sizeof__', '__slots__', '__str__', '__subclasshook__', '__weakref__', '_abc_impl', '_d', '_e', '_gcpl_crt_order', '_id', '_lapl', '_lcpl', 'attrs', 'clear', 'close', 'copy', 'create_dataset', 'create_dataset_like', 'create_group', 'create_virtual_dataset', 'driver', 'fid', 'file', 'filename', 'flush', 'get', 'id', 'items', 'keys', 'libver', 'mode', 'move', 'name', 'parent', 'pop', 'popitem', 'ref', 'regionref', 'require_dataset', 'require_group', 'setdefault', 'swmr_mode', 'update', 'userblock_size', 'values', 'visit', 'visititems']\n", 211 | "['__bool__', '__class__', '__contains__', '__copy__', '__delattr__', '__dir__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__iter__', '__le__', '__len__', '__lt__', '__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__setstate__', '__sizeof__', '__str__', '__subclasshook__', '_close', 'close', 'fileno', 'get_access_plist', 'get_comment', 'get_create_plist', 'get_file_image', 'get_filesize', 'get_freespace', 'get_intent', 'get_linkval', 'get_mdc_config', 'get_mdc_hit_rate', 'get_mdc_size', 'get_num_objs', 'get_objname_by_idx', 'get_objtype_by_idx', 'get_vfd_handle', 'id', 'link', 'links', 'locked', 'move', 'name', 'reopen', 'reset_mdc_hit_rate_stats', 'set_comment', 'set_mdc_config', 'start_swmr_write', 'unlink', 'valid']\n", 212 | "['__bool__', '__class__', '__contains__', '__copy__', '__delattr__', '__dir__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__iter__', '__le__', '__len__', '__lt__', '__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__setstate__', '__sizeof__', '__str__', '__subclasshook__', '_close', 'close', 'fileno', 'get_comment', 'get_create_plist', 'get_linkval', 'get_num_objs', 'get_objname_by_idx', 'get_objtype_by_idx', 'id', 'link', 'links', 'locked', 'move', 'set_comment', 'unlink', 'valid']\n" 213 | ] 214 | } 215 | ], 216 | "source": [ 217 | "import h5py\n", 218 | "\n", 219 | "dsetPth='/leaf01/leaf02/data02'\n", 220 | "grpPth = '/leaf01/leaf02'\n", 221 | "\n", 222 | "with h5py.File('nested.hdf5', 'r') as f:\n", 223 | " dset = f[dsetPth]\n", 224 | " grp = f[grpPth]\n", 225 | " \n", 226 | "# print(dir(grp))\n", 227 | " print(dir(grp.file))\n", 228 | " print(dir(grp.file.id))\n", 229 | " print(dir(grp.id))" 230 | ] 231 | }, 232 | { 233 | "cell_type": "code", 234 | "execution_count": null, 235 | "metadata": {}, 236 | "outputs": [], 237 | "source": [] 238 | } 239 | ], 240 | "metadata": { 241 | "kernelspec": { 242 | "display_name": "Python 3", 243 | "language": "python", 244 | "name": "python3" 245 | }, 246 | "language_info": { 247 | "codemirror_mode": { 248 | "name": "ipython", 249 | "version": 3 250 | }, 251 | "file_extension": ".py", 252 | "mimetype": "text/x-python", 253 | "name": "python", 254 | "nbconvert_exporter": "python", 255 | "pygments_lexer": "ipython3", 256 | "version": "3.7.6" 257 | } 258 | }, 259 | "nbformat": 4, 260 | "nbformat_minor": 4 261 | } 262 | -------------------------------------------------------------------------------- /scratch/nested-meta.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "%reload_ext autoreload\n", 10 | "%autoreload 2\n", 11 | "\n", 12 | "from collections import namedtuple\n", 13 | "import h5py\n", 14 | "import os\n", 15 | "import re\n", 16 | "\n", 17 | "from genNested import genNested" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": 4, 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "# genNested('nested', shape=(10,10))\n", 27 | "genNested('nested_int_one_d', N=2, fillRange=True, shape=(1000,)*1)\n", 28 | "# genNested('nested_int_high_d', N=2, fillRange=True, shape=(50,)*4)" 29 | ] 30 | }, 31 | { 32 | "cell_type": "markdown", 33 | "metadata": {}, 34 | "source": [ 35 | "# get metadata as list" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": 19, 41 | "metadata": {}, 42 | "outputs": [], 43 | "source": [ 44 | "Meta = namedtuple('Meta', ('kind', 'name', 'uri'))\n", 45 | "\n", 46 | "def getMeta(group, prefix='/'): \n", 47 | " return [Meta(\n", 48 | " 'group' if isinstance(val, h5py.Group) else 'dataset', \n", 49 | " key,\n", 50 | " os.path.join(prefix, key)\n", 51 | " ) for key,val in group.items()]\n", 52 | "\n", 53 | "def getMetaAll(group, prefix='/', meta=None):\n", 54 | " if meta is None: meta = []\n", 55 | " \n", 56 | " for key,val in group.items():\n", 57 | " uri = os.path.join(prefix, key)\n", 58 | " if isinstance(val, h5py.Group):\n", 59 | " meta.append(Meta('group', key, uri))\n", 60 | " getMetaAll(val, uri, meta)\n", 61 | " else:\n", 62 | " meta.append(Meta('dataset', key, uri))\n", 63 | " \n", 64 | " return meta" 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": 17, 70 | "metadata": {}, 71 | "outputs": [ 72 | { 73 | "name": "stdout", 74 | "output_type": "stream", 75 | "text": [ 76 | "[Meta(kind='dataset', name='data02', uri='/leaf01/leaf02/data02'), Meta(kind='group', name='leaf03', uri='/leaf01/leaf02/leaf03')]\n" 77 | ] 78 | } 79 | ], 80 | "source": [ 81 | "prefix='/leaf01/leaf02'\n", 82 | "\n", 83 | "with h5py.File('nested.hdf5', 'r') as f:\n", 84 | " meta = getMeta(f[prefix], prefix)\n", 85 | "\n", 86 | "print(meta)" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": 20, 92 | "metadata": {}, 93 | "outputs": [ 94 | { 95 | "name": "stdout", 96 | "output_type": "stream", 97 | "text": [ 98 | "[Meta(kind='dataset', name='data00', uri='/data00'), Meta(kind='group', name='leaf01', uri='/leaf01'), Meta(kind='dataset', name='data01', uri='/leaf01/data01'), Meta(kind='group', name='leaf02', uri='/leaf01/leaf02'), Meta(kind='dataset', name='data02', uri='/leaf01/leaf02/data02'), Meta(kind='group', name='leaf03', uri='/leaf01/leaf02/leaf03'), Meta(kind='dataset', name='data03', uri='/leaf01/leaf02/leaf03/data03'), Meta(kind='group', name='leaf04', uri='/leaf01/leaf02/leaf03/leaf04'), Meta(kind='dataset', name='data04', uri='/leaf01/leaf02/leaf03/leaf04/data04'), Meta(kind='group', name='leaf05', uri='/leaf01/leaf02/leaf03/leaf04/leaf05')]\n" 99 | ] 100 | } 101 | ], 102 | "source": [ 103 | "with h5py.File('nested.hdf5', 'r') as f:\n", 104 | " metaAll = getMetaAll(f)\n", 105 | "\n", 106 | "print(metaAll)" 107 | ] 108 | }, 109 | { 110 | "cell_type": "markdown", 111 | "metadata": {}, 112 | "source": [ 113 | "# metadata generator" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": 30, 119 | "metadata": {}, 120 | "outputs": [], 121 | "source": [ 122 | "MetaHdf = namedtuple('Meta', ('kind', 'name', 'uri'))\n", 123 | "\n", 124 | "_emptyUriRe = re.compile('//')\n", 125 | "def uriJoin(*parts):\n", 126 | " return _emptyUriRe.sub('/', '/'.join(parts))\n", 127 | "\n", 128 | "def genMetaHdf(group, prefix='/'):\n", 129 | " return (MetaHdf(\n", 130 | " 'group' if isinstance(val, h5py.Group) else 'dataset',\n", 131 | " key,\n", 132 | " uriJoin(prefix, key)\n", 133 | " ) for key,val in group.items())\n", 134 | "\n", 135 | "def genMetaAllHdf(group, prefix='/'):\n", 136 | " yield from genMetaHdf(group, prefix)\n", 137 | " \n", 138 | " for key,val in group.items():\n", 139 | " if isinstance(val, h5py.Group):\n", 140 | " yield from genMetaAllHdf(val, uriJoin(prefix, key))" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": 27, 146 | "metadata": {}, 147 | "outputs": [ 148 | { 149 | "name": "stdout", 150 | "output_type": "stream", 151 | "text": [ 152 | "Meta(kind='dataset', name='data02', uri='/leaf01/leaf02/data02')\n", 153 | "Meta(kind='group', name='leaf03', uri='/leaf01/leaf02/leaf03')\n" 154 | ] 155 | } 156 | ], 157 | "source": [ 158 | "prefix='/leaf01/leaf02'\n", 159 | "\n", 160 | "with h5py.File('nested.hdf5', 'r') as f:\n", 161 | " for m in genMetaHdf(f[prefix], prefix):\n", 162 | " print(m)" 163 | ] 164 | }, 165 | { 166 | "cell_type": "code", 167 | "execution_count": 31, 168 | "metadata": {}, 169 | "outputs": [ 170 | { 171 | "name": "stdout", 172 | "output_type": "stream", 173 | "text": [ 174 | "Meta(kind='dataset', name='data00', uri='/data00')\n", 175 | "Meta(kind='group', name='leaf01', uri='/leaf01')\n", 176 | "Meta(kind='dataset', name='data01', uri='/leaf01/data01')\n", 177 | "Meta(kind='group', name='leaf02', uri='/leaf01/leaf02')\n", 178 | "Meta(kind='dataset', name='data02', uri='/leaf01/leaf02/data02')\n", 179 | "Meta(kind='group', name='leaf03', uri='/leaf01/leaf02/leaf03')\n", 180 | "Meta(kind='dataset', name='data03', uri='/leaf01/leaf02/leaf03/data03')\n", 181 | "Meta(kind='group', name='leaf04', uri='/leaf01/leaf02/leaf03/leaf04')\n", 182 | "Meta(kind='dataset', name='data04', uri='/leaf01/leaf02/leaf03/leaf04/data04')\n", 183 | "Meta(kind='group', name='leaf05', uri='/leaf01/leaf02/leaf03/leaf04/leaf05')\n" 184 | ] 185 | } 186 | ], 187 | "source": [ 188 | "with h5py.File('nested.hdf5', 'r') as f:\n", 189 | " for m in genMetaAllHdf(f):\n", 190 | " print(m)" 191 | ] 192 | }, 193 | { 194 | "cell_type": "markdown", 195 | "metadata": {}, 196 | "source": [ 197 | "# misc" 198 | ] 199 | }, 200 | { 201 | "cell_type": "code", 202 | "execution_count": 5, 203 | "metadata": {}, 204 | "outputs": [ 205 | { 206 | "name": "stdout", 207 | "output_type": "stream", 208 | "text": [ 209 | "/\n", 210 | "data01\n", 211 | "\n", 212 | "leaf02\n", 213 | "\n" 214 | ] 215 | } 216 | ], 217 | "source": [ 218 | "with h5py.File('nested.hdf5', 'r') as f:\n", 219 | " print(f.name)\n", 220 | " for key,val in f['/leaf01'].items():\n", 221 | " print(key)\n", 222 | " print(val)" 223 | ] 224 | }, 225 | { 226 | "cell_type": "code", 227 | "execution_count": 11, 228 | "metadata": {}, 229 | "outputs": [ 230 | { 231 | "ename": "AttributeError", 232 | "evalue": "can't set attribute", 233 | "output_type": "error", 234 | "traceback": [ 235 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", 236 | "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", 237 | "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mm\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mMeta\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mm\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mkind\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'group'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mm\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", 238 | "\u001b[0;31mAttributeError\u001b[0m: can't set attribute" 239 | ] 240 | } 241 | ], 242 | "source": [ 243 | "m = Meta()\n", 244 | "m.kind = 'group'\n", 245 | "print(m)" 246 | ] 247 | }, 248 | { 249 | "cell_type": "code", 250 | "execution_count": null, 251 | "metadata": {}, 252 | "outputs": [], 253 | "source": [] 254 | } 255 | ], 256 | "metadata": { 257 | "kernelspec": { 258 | "display_name": "Python 3.7", 259 | "language": "python", 260 | "name": "python3" 261 | }, 262 | "language_info": { 263 | "codemirror_mode": { 264 | "name": "ipython", 265 | "version": 3 266 | }, 267 | "file_extension": ".py", 268 | "mimetype": "text/x-python", 269 | "name": "python", 270 | "nbconvert_exporter": "python", 271 | "pygments_lexer": "ipython3", 272 | "version": "3.7.9" 273 | } 274 | }, 275 | "nbformat": 4, 276 | "nbformat_minor": 4 277 | } 278 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal=1 3 | 4 | # ref: https://flake8.pycqa.org/en/2.5.5/warnings.html#error-codes 5 | # E302: "expected 2 blank lines, found 0" 6 | [flake8] 7 | extend-ignore = E301, E302, E305, E306 8 | max-line-length=200 9 | 10 | [metadata] 11 | description_file = README.md 12 | license_file = LICENSE 13 | 14 | # config for autopep8 15 | # autopep8 doesn't seem to know about extend-ignore 16 | [pycodestyle] 17 | ignore = E301, E302, E305, E306 18 | max-line-length=200 19 | 20 | [tool:pytest] 21 | junit_family=xunit2 22 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """ jupyterLab_hdf : HDF5 api for Jupyter/Jupyterlab 2 | 3 | Copyright (c) Max Klein. 4 | Distributed under the terms of the Modified BSD License. 5 | """ 6 | 7 | import setuptools 8 | from setupbase import create_cmdclass, find_packages, get_version 9 | 10 | with open("README.md", "r") as fh: 11 | long_description = fh.read() 12 | 13 | data_files_spec = [ 14 | ("etc/jupyter/jupyter_notebook_config.d", "jupyter-config/jupyter_notebook_config.d", "jupyterlab_hdf.json"), 15 | ] 16 | 17 | cmdclass = create_cmdclass(data_files_spec=data_files_spec) 18 | 19 | setup_dict = dict( 20 | name="jupyterlab_hdf", 21 | description="A Jupyter Notebook server extension that provides APIs for fetching hdf5 contents and data. Built on h5py.", 22 | long_description=long_description, 23 | long_description_content_type="text/markdown", 24 | packages=find_packages(), 25 | cmdclass=cmdclass, 26 | author="Max Klein", 27 | url="https://github.com/jupyterlab/jupyterlab-hdf5", 28 | license="BSD", 29 | platforms="Linux, Mac OS X, Windows", 30 | keywords=["Jupyter", "JupyterLab", "hdf5"], 31 | python_requires=">=3.6", 32 | classifiers=[ 33 | "Intended Audience :: Developers", 34 | "Intended Audience :: System Administrators", 35 | "Intended Audience :: Science/Research", 36 | "License :: OSI Approved :: BSD License", 37 | "Programming Language :: Python", 38 | "Programming Language :: Python :: 3", 39 | ], 40 | install_requires=[ 41 | "h5grove==0.0.14", 42 | "h5py", 43 | "notebook<7", 44 | "numpy", 45 | "tornado", 46 | ], 47 | extras_require={ 48 | "dev": [ 49 | "black", 50 | "bump2version", 51 | "pytest", 52 | "requests", 53 | ] 54 | }, 55 | ) 56 | 57 | 58 | setuptools.setup(version=get_version("jupyterlab_hdf/_version.py"), **setup_dict) 59 | -------------------------------------------------------------------------------- /src/AttributeViewer.tsx: -------------------------------------------------------------------------------- 1 | // Copyright (c) Jupyter Development Team. 2 | // Distributed under the terms of the Modified BSD License. 3 | 4 | import React from 'react'; 5 | import { ReactWidget } from '@jupyterlab/apputils'; 6 | import { convertValuesToString, isComplexDtype } from './complex'; 7 | import { AttributeValue } from './hdf'; 8 | 9 | interface IAttribute { 10 | dtype: string; 11 | name: string; 12 | value: AttributeValue; 13 | } 14 | 15 | class AttributeViewer extends ReactWidget { 16 | readonly attributes: IAttribute[]; 17 | 18 | constructor(attributes: IAttribute[]) { 19 | super(); 20 | this.attributes = attributes; 21 | this.addClass('jhdf-attribute-table-container'); 22 | } 23 | 24 | render(): JSX.Element { 25 | return ( 26 | 27 | 28 | 29 | 32 | 33 | 34 | 35 | {this.attributes.length === 0 ? ( 36 | 37 | 38 | 39 | ) : ( 40 | this.attributes.map(({ name, value, dtype }): JSX.Element => { 41 | const valueToDisplay = isComplexDtype(dtype) 42 | ? convertValuesToString(value) 43 | : value; 44 | return ( 45 | 46 | 47 | 48 | 49 | ); 50 | }) 51 | )} 52 | 53 |
30 | Attributes 31 |
No attributes.
{name}{JSON.stringify(valueToDisplay, null, ' ')}
54 | ); 55 | } 56 | } 57 | 58 | export default AttributeViewer; 59 | -------------------------------------------------------------------------------- /src/browser.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) Jupyter Development Team. 2 | // Distributed under the terms of the Modified BSD License. 3 | 4 | import { Message } from "@lumino/messaging"; 5 | 6 | import { ISignal, Signal } from "@lumino/signaling"; 7 | 8 | import { PanelLayout, Widget } from "@lumino/widgets"; 9 | 10 | // import { ToolbarButton } from '@jupyterlab/apputils'; 11 | 12 | import { FileBrowser } from "@jupyterlab/filebrowser"; 13 | 14 | import { HdfDrive } from "./contents"; 15 | import { localAbsPath, parseHdfQuery } from "./hdf"; 16 | 17 | const FACTORY = "HDF Dataset"; 18 | const DATA_MIME = "application/x-hdf5.dataset"; 19 | 20 | /** 21 | * Widget for hosting the Hdf filebrowser. 22 | */ 23 | export class HdfSidepanel extends Widget { 24 | constructor(browser: FileBrowser, drive: HdfDrive) { 25 | super(); 26 | this.addClass("jhdf-sidepanel"); 27 | this.layout = new PanelLayout(); 28 | (this.layout as PanelLayout).addWidget(browser); 29 | this._browser = browser; 30 | this._drive = drive; 31 | 32 | this._monkeyPatch(); 33 | 34 | // Create an editable name for the Hdf file path. 35 | this.fpathInput = new hdfFpathInput(browser); 36 | this.fpathInput.node.title = "Click to edit file path"; 37 | this._browser.toolbar.addItem("fpathInput", this.fpathInput); 38 | this.fpathInput.pathChanged.connect(this._onFpathChanged, this); 39 | 40 | // // Add our own refresh button, since the other one is hidden 41 | // // via CSS. 42 | // let refresher = new ToolbarButton({ 43 | // iconClassName: 'jp-RefreshIcon jp-Icon jp-Icon-16', 44 | // onClick: () => { 45 | // this._browser.model.refresh(); 46 | // }, 47 | // tooltip: 'Refresh File List' 48 | // }); 49 | // refresher.addClass('jhdf-toolbar-item'); 50 | // this._browser.toolbar.addItem('gh-refresher', refresher); 51 | } 52 | 53 | /** 54 | * An editable widget hosting the current file path. 55 | */ 56 | readonly fpathInput: hdfFpathInput; 57 | 58 | /** 59 | * The inner filebrowser widget that HdfSidepanel wraps 60 | */ 61 | get browser(): FileBrowser { 62 | return this._browser; 63 | } 64 | 65 | private _monkeyPatch() { 66 | const handleDblClick = async (evt: Event): Promise => { 67 | const event = evt as MouseEvent; 68 | // Do nothing if it's not a left mouse press. 69 | if (event.button !== 0) { 70 | return; 71 | } 72 | 73 | // Do nothing if any modifier keys are pressed. 74 | if (event.ctrlKey || event.shiftKey || event.altKey || event.metaKey) { 75 | return; 76 | } 77 | 78 | // Stop the event propagation. 79 | event.preventDefault(); 80 | event.stopPropagation(); 81 | 82 | const item = this._browser.modelForClick(event); 83 | if (!item) { 84 | return; 85 | } 86 | if (item.type === "directory") { 87 | this._browser.model 88 | .cd(localAbsPath(item.path)) 89 | .catch(error => console.error(error)); 90 | } else { 91 | const factory = item.mimetype === DATA_MIME ? FACTORY : "default"; 92 | this._browser.model.manager.openOrReveal(item.path, factory); 93 | } 94 | }; 95 | 96 | const listing = this._browser.layout.widgets[3]; 97 | listing.node.addEventListener("dblclick", handleDblClick, true); 98 | } 99 | 100 | /** 101 | * React to a change in fpath. 102 | */ 103 | private _onFpathChanged() { 104 | if (this._changeGuard) { 105 | return; 106 | } 107 | this._changeGuard = true; 108 | 109 | this._browser.model 110 | .cd(`/${this.fpathInput.path}`) 111 | .then(() => { 112 | this._changeGuard = false; 113 | this._updateErrorPanel(); 114 | // Once we have the new listing, maybe give the file listing 115 | // focus. Once the input element is removed, the active element 116 | // appears to revert to document.body. If the user has subsequently 117 | // focused another element, don't focus the browser listing. 118 | if (document.activeElement === document.body) { 119 | const listing = this._browser.layout.widgets[3]; 120 | listing.node.focus(); 121 | } 122 | }) 123 | .catch((err: Error) => { 124 | const msg = 125 | `Failed to open HDF5 file at ${this.fpathInput.path}` + err.message; 126 | console.error(msg); 127 | this._updateErrorPanel(err); 128 | }); 129 | } 130 | 131 | /** 132 | * React to a change in the validity of the hdf file. 133 | */ 134 | private _updateErrorPanel(err?: Error): void { 135 | const localPath = this._browser.model.manager.services.contents.localPath( 136 | this._browser.model.path 137 | ); 138 | const params = parseHdfQuery(localPath); 139 | 140 | // If we currently have an error panel, remove it. 141 | if (this._errorPanel) { 142 | const listing = this._browser.layout.widgets[3]; 143 | listing.node.removeChild(this._errorPanel.node); 144 | this._errorPanel.dispose(); 145 | this._errorPanel = null; 146 | } 147 | 148 | if (err) { 149 | const msg = 150 | `Failed to open HDF5 file at ${this.fpathInput.path}` + err.message; 151 | this._initErrorPanel(msg); 152 | return; 153 | } 154 | 155 | if (!this._drive.validFile) { 156 | // If we have an invalid file path, make an error msg. 157 | const msg = `No file found at path: ${params.fpath}`; 158 | this._initErrorPanel(msg); 159 | return; 160 | } 161 | } 162 | 163 | private _initErrorPanel(msg: string) { 164 | this._errorPanel = new HdfErrorPanel(msg); 165 | const listing = this._browser.layout.widgets[3]; 166 | listing.node.appendChild(this._errorPanel.node); 167 | } 168 | 169 | private _browser: FileBrowser; 170 | private _drive: HdfDrive; 171 | private _errorPanel: HdfErrorPanel | null; 172 | private _changeGuard = false; 173 | } 174 | 175 | /** 176 | * A widget that hosts an editable field, 177 | * used to host the currently active Hdf 178 | * file path. 179 | */ 180 | export class hdfFpathInput extends Widget { 181 | constructor(browser: FileBrowser) { 182 | super(); 183 | this._browser = browser; 184 | 185 | this.addClass("jhdf-userInput"); 186 | const layout = (this.layout = new PanelLayout()); 187 | const wrapper = new Widget(); 188 | wrapper.addClass("jhdf-userInput-wrapper"); 189 | this._input = document.createElement("input"); 190 | this._input.placeholder = "HDF5 Path"; 191 | this._input.className = "jhdf-userInput-input"; 192 | wrapper.node.appendChild(this._input); 193 | layout.addWidget(wrapper); 194 | 195 | // restore the input from browser path 196 | this._syncInputToBrowser(); 197 | 198 | // sync to future changes to browser path 199 | this._browser.model.pathChanged.connect(this._onBrowserPathChanged, this); 200 | } 201 | 202 | /** 203 | * The current name of the field. 204 | */ 205 | get path(): string { 206 | return this._input.value; 207 | } 208 | set path(val: string) { 209 | if (val === this._browser.model.path) { 210 | return; 211 | } 212 | 213 | const old = this._path; 214 | this._input.value = val; 215 | 216 | this._pathChanged.emit({ 217 | oldValue: old, 218 | newValue: val 219 | }); 220 | } 221 | 222 | /** 223 | * A signal for when the name changes. 224 | */ 225 | get pathChanged(): ISignal { 226 | return this._pathChanged; 227 | } 228 | 229 | /** 230 | * Handle the DOM events for the widget. 231 | * 232 | * @param event - The DOM event sent to the widget. 233 | * 234 | * #### Notes 235 | * This method implements the DOM `EventListener` interface and is 236 | * called in response to events on the main area widget's node. It should 237 | * not be called directly by user code. 238 | */ 239 | handleEvent(event: KeyboardEvent): void { 240 | switch (event.type) { 241 | case "keydown": 242 | if (event.key === "Enter") { 243 | event.stopPropagation(); 244 | event.preventDefault(); 245 | this.path = this._input.value; 246 | this._input.blur(); 247 | } 248 | break; 249 | case "blur": 250 | event.stopPropagation(); 251 | event.preventDefault(); 252 | this.path = this._input.value; 253 | break; 254 | case "focus": 255 | event.stopPropagation(); 256 | event.preventDefault(); 257 | this._input.select(); 258 | break; 259 | default: 260 | break; 261 | } 262 | } 263 | 264 | /** 265 | * Handle `after-attach` messages for the widget. 266 | */ 267 | protected onAfterAttach(msg: Message): void { 268 | this._input.addEventListener("keydown", this); 269 | this._input.addEventListener("blur", this); 270 | this._input.addEventListener("focus", this); 271 | } 272 | 273 | /** 274 | * Handle `before-detach` messages for the widget. 275 | */ 276 | protected onBeforeDetach(msg: Message): void { 277 | this._input.removeEventListener("keydown", this); 278 | this._input.removeEventListener("blur", this); 279 | this._input.removeEventListener("focus", this); 280 | } 281 | 282 | private _syncInputToBrowser() { 283 | const { fpath } = parseHdfQuery(this._browser.model.path); 284 | this._path = fpath; 285 | this._input.value = fpath; 286 | } 287 | 288 | private _onBrowserPathChanged() { 289 | if (this._path === this._browser.model.path) { 290 | return; 291 | } 292 | 293 | this._syncInputToBrowser(); 294 | } 295 | 296 | private _browser: FileBrowser; 297 | private _path = ""; 298 | private _pathChanged = new Signal< 299 | this, 300 | { newValue: string; oldValue: string } 301 | >(this); 302 | private _input: HTMLInputElement; 303 | } 304 | 305 | /** 306 | * A widget hosting an error panel for the browser, 307 | * used if there is an invalid file path 308 | */ 309 | export class HdfErrorPanel extends Widget { 310 | constructor(message: string) { 311 | super(); 312 | this.addClass("jhdf-errorPanel"); 313 | const image = document.createElement("div"); 314 | const text = document.createElement("div"); 315 | image.className = "jhdf-errorPanel-image"; 316 | text.className = "jhdf-errorPanel-text"; 317 | text.textContent = message; 318 | this.node.appendChild(image); 319 | this.node.appendChild(text); 320 | } 321 | } 322 | -------------------------------------------------------------------------------- /src/complex.ts: -------------------------------------------------------------------------------- 1 | export type Complex = [number, number]; 2 | 3 | type ComplexArrayOrVal = ComplexArrayOrVal[] | Complex; 4 | type StringArrayOrVal = StringArrayOrVal[] | string; 5 | 6 | export function convertValuesToString(c: ComplexArrayOrVal): StringArrayOrVal { 7 | if (isComplexValue(c)) { 8 | return `${c[0]}${c[1] >= 0 ? '+' : ''}${c[1]}i`; 9 | } 10 | 11 | return c.map(inner => convertValuesToString(inner)); 12 | } 13 | 14 | export function isComplexDtype(dtype: string): boolean { 15 | return dtype.includes('c'); 16 | } 17 | 18 | export function isComplexArray( 19 | data: (number | Complex)[][], 20 | dtype: string 21 | ): data is Complex[][] { 22 | return isComplexDtype(dtype); 23 | } 24 | 25 | function isComplexValue(c: ComplexArrayOrVal): c is Complex { 26 | return typeof c[0] === 'number'; 27 | } 28 | -------------------------------------------------------------------------------- /src/contents.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) Jupyter Development Team. 2 | // Distributed under the terms of the Modified BSD License. 3 | 4 | import { Signal, ISignal } from '@lumino/signaling'; 5 | 6 | import { PathExt, URLExt } from '@jupyterlab/coreutils'; 7 | 8 | import { DocumentRegistry } from '@jupyterlab/docregistry'; 9 | 10 | import { Contents, ServerConnection } from '@jupyterlab/services'; 11 | 12 | import { 13 | hdfContentsRequest, 14 | HdfDirectoryListing, 15 | parseHdfQuery, 16 | HdfContents, 17 | } from './hdf'; 18 | 19 | /** 20 | * A Contents.IDrive implementation that serves as a read-only 21 | * view onto HDF5 files. 22 | */ 23 | export class HdfDrive implements Contents.IDrive { 24 | /** 25 | * Construct a new drive object. 26 | * 27 | * @param options - The options used to initialize the object. 28 | */ 29 | constructor(registry: DocumentRegistry) { 30 | this._serverSettings = ServerConnection.makeSettings(); 31 | } 32 | 33 | /** 34 | * The name of the drive. 35 | */ 36 | get name(): 'Hdf' { 37 | return 'Hdf'; 38 | } 39 | 40 | /** 41 | * State for whether the file is valid. 42 | */ 43 | get validFile(): boolean { 44 | return this._validFile; 45 | } 46 | 47 | /** 48 | * Settings for the notebook server. 49 | */ 50 | readonly serverSettings: ServerConnection.ISettings; 51 | 52 | /** 53 | * A signal emitted when a file operation takes place. 54 | */ 55 | get fileChanged(): ISignal { 56 | return this._fileChanged; 57 | } 58 | 59 | /** 60 | * Test whether the manager has been disposed. 61 | */ 62 | get isDisposed(): boolean { 63 | return this._isDisposed; 64 | } 65 | 66 | /** 67 | * Dispose of the resources held by the manager. 68 | */ 69 | dispose(): void { 70 | if (this.isDisposed) { 71 | return; 72 | } 73 | this._isDisposed = true; 74 | Signal.clearData(this); 75 | } 76 | 77 | /** 78 | * Get a file or directory. 79 | * 80 | * @param path: The path to the file. 81 | * 82 | * @param options: The options used to fetch the file. 83 | * 84 | * @returns A promise which resolves with the file content. 85 | */ 86 | get( 87 | path: string, 88 | options?: HdfDrive.IFetchOptions 89 | ): Promise { 90 | const params = parseHdfQuery(path); 91 | 92 | if (!params.fpath || !PathExt.normalize(params.fpath)) { 93 | return Promise.resolve(Private.dummyDirectory); 94 | } 95 | 96 | return hdfContentsRequest(params, this._serverSettings) 97 | .then(contents => { 98 | this._validFile = true; 99 | return Private.hdfContentsToJupyterContents(path, contents); 100 | }) 101 | .catch((err: ServerConnection.ResponseError) => { 102 | this._validFile = false; 103 | if (err.response.status === 403) { 104 | console.warn(err.message); 105 | return Private.dummyDirectory; 106 | } else { 107 | console.error(err.message); 108 | return Promise.reject(err); 109 | } 110 | }); 111 | } 112 | 113 | /** 114 | * Get an encoded download url given a file path. 115 | * 116 | * @param path - An absolute POSIX file path on the server. 117 | * 118 | * #### Notes 119 | * It is expected that the path contains no relative paths, 120 | * use [[ContentsManager.getAbsolutePath]] to get an absolute 121 | * path if necessary. 122 | */ 123 | getDownloadUrl(path: string): Promise { 124 | // Parse the path into user/repo/path 125 | // const resource = parsePath(path); 126 | 127 | return Promise.resolve( 128 | URLExt.join(this._serverSettings.baseUrl, 'hdf', 'contents', path) 129 | ); 130 | } 131 | 132 | /** 133 | * Create a new untitled file or directory in the specified directory path. 134 | * 135 | * @param options: The options used to create the file. 136 | * 137 | * @returns A promise which resolves with the created file content when the 138 | * file is created. 139 | */ 140 | newUntitled(options: Contents.ICreateOptions = {}): Promise { 141 | return Promise.reject('Hdf file is read only'); 142 | } 143 | 144 | /** 145 | * Delete a file. 146 | * 147 | * @param path - The path to the file. 148 | * 149 | * @returns A promise which resolves when the file is deleted. 150 | */ 151 | delete(path: string): Promise { 152 | return Promise.reject('Hdf file is read only'); 153 | } 154 | 155 | /** 156 | * Rename a file or directory. 157 | * 158 | * @param path - The original file path. 159 | * 160 | * @param newPath - The new file path. 161 | * 162 | * @returns A promise which resolves with the new file contents model when 163 | * the file is renamed. 164 | */ 165 | rename(path: string, newPath: string): Promise { 166 | return Promise.reject('Hdf file is read only'); 167 | } 168 | 169 | /** 170 | * Save a file. 171 | * 172 | * @param path - The desired file path. 173 | * 174 | * @param options - Optional overrides to the model. 175 | * 176 | * @returns A promise which resolves with the file content model when the 177 | * file is saved. 178 | */ 179 | save( 180 | path: string, 181 | options: Partial 182 | ): Promise { 183 | return Promise.reject('Hdf file is read only'); 184 | } 185 | 186 | /** 187 | * Copy a file into a given directory. 188 | * 189 | * @param path - The original file path. 190 | * 191 | * @param toDir - The destination directory path. 192 | * 193 | * @returns A promise which resolves with the new contents model when the 194 | * file is copied. 195 | */ 196 | copy(fromFile: string, toDir: string): Promise { 197 | return Promise.reject('Hdf file is read only'); 198 | } 199 | 200 | /** 201 | * Create a checkpoint for a file. 202 | * 203 | * @param path - The path of the file. 204 | * 205 | * @returns A promise which resolves with the new checkpoint model when the 206 | * checkpoint is created. 207 | */ 208 | createCheckpoint(path: string): Promise { 209 | return Promise.reject('Hdf file is read only'); 210 | } 211 | 212 | /** 213 | * List available checkpoints for a file. 214 | * 215 | * @param path - The path of the file. 216 | * 217 | * @returns A promise which resolves with a list of checkpoint models for 218 | * the file. 219 | */ 220 | listCheckpoints(path: string): Promise { 221 | return Promise.resolve([]); 222 | } 223 | 224 | /** 225 | * Restore a file to a known checkpoint state. 226 | * 227 | * @param path - The path of the file. 228 | * 229 | * @param checkpointID - The id of the checkpoint to restore. 230 | * 231 | * @returns A promise which resolves when the checkpoint is restored. 232 | */ 233 | restoreCheckpoint(path: string, checkpointID: string): Promise { 234 | return Promise.reject('Hdf file is read only'); 235 | } 236 | 237 | /** 238 | * Delete a checkpoint for a file. 239 | * 240 | * @param path - The path of the file. 241 | * 242 | * @param checkpointID - The id of the checkpoint to delete. 243 | * 244 | * @returns A promise which resolves when the checkpoint is deleted. 245 | */ 246 | deleteCheckpoint(path: string, checkpointID: string): Promise { 247 | return Promise.reject('Read only'); 248 | } 249 | 250 | private _validFile = false; 251 | private _serverSettings: ServerConnection.ISettings; 252 | private _isDisposed = false; 253 | private _fileChanged = new Signal(this); 254 | } 255 | 256 | export namespace HdfDrive { 257 | export interface IFetchOptions extends Contents.IFetchOptions { 258 | uri?: string; 259 | } 260 | } 261 | 262 | /** 263 | * Private namespace for utility functions. 264 | */ 265 | namespace Private { 266 | /** 267 | * A dummy contents model indicating an invalid or 268 | * nonexistent repository. 269 | */ 270 | export const dummyDirectory: Contents.IModel = { 271 | type: 'directory', 272 | path: '', 273 | name: '', 274 | format: 'json', 275 | content: [], 276 | created: '', 277 | writable: false, 278 | last_modified: '', 279 | mimetype: '', 280 | }; 281 | 282 | /** 283 | * Given a JSON HdfContents object returned by our Hdf api, 284 | * convert it to the Jupyter Contents.IModel. 285 | * 286 | * @param path - the path to the contents model in the repository. 287 | * 288 | * @param contents - the HdfContents object. 289 | * 290 | * @returns a Contents.IModel object. 291 | */ 292 | export function hdfContentsToJupyterContents( 293 | path: string, 294 | contents: HdfContents | HdfDirectoryListing 295 | ): Contents.IModel { 296 | if (Array.isArray(contents)) { 297 | // If we have an array, it is a directory of HdfContents. 298 | // Iterate over that and convert all of the items in the array 299 | const { fpath } = parseHdfQuery(path); 300 | return { 301 | name: PathExt.basename(fpath), 302 | path: path, 303 | format: 'json', 304 | type: 'directory', 305 | writable: false, 306 | created: '', 307 | last_modified: '', 308 | mimetype: '', 309 | content: contents.map(c => { 310 | return hdfContentsToJupyterContents(fpath + `?uri=${c.uri}`, c); 311 | }), 312 | }; 313 | } else if (contents.type === 'dataset') { 314 | return { 315 | name: contents.name, 316 | path: path, 317 | format: 'json', 318 | type: 'file', 319 | created: '', 320 | writable: false, 321 | last_modified: '', 322 | mimetype: 'application/x-hdf5.dataset', 323 | content: contents.content, 324 | }; 325 | } else if (contents.type === 'external_link') { 326 | return { 327 | name: contents.name, 328 | path: path, 329 | format: 'json', 330 | type: 'file', 331 | created: '', 332 | writable: false, 333 | last_modified: '', 334 | mimetype: 'application/json', 335 | content: contents.content, 336 | }; 337 | } else if (contents.type === 'soft_link') { 338 | return { 339 | name: contents.name, 340 | path: path, 341 | format: 'json', 342 | type: 'file', 343 | created: '', 344 | writable: false, 345 | last_modified: '', 346 | mimetype: 'application/json', 347 | content: contents.content, 348 | }; 349 | } else if (contents.type === 'group') { 350 | // If it is a directory, convert to that. 351 | return { 352 | name: contents.name, 353 | path: path, 354 | format: 'json', 355 | type: 'directory', 356 | created: '', 357 | writable: false, 358 | last_modified: '', 359 | mimetype: '', 360 | content: null, 361 | }; 362 | } else { 363 | throw makeError( 364 | 500, 365 | `"${(contents as any).name}" has an unexpected type: ${ 366 | (contents as any).type 367 | }` 368 | ); 369 | } 370 | } 371 | 372 | /** 373 | * Wrap an API error in a hacked-together error object 374 | * masquerading as an `ServerConnection.ResponseError`. 375 | */ 376 | export function makeError( 377 | code: number, 378 | message: string 379 | ): ServerConnection.ResponseError { 380 | const response = new Response(message, { 381 | status: code, 382 | statusText: message, 383 | }); 384 | return new ServerConnection.ResponseError(response, message); 385 | } 386 | } 387 | -------------------------------------------------------------------------------- /src/dataregistry.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) Jupyter Development Team. 2 | // Distributed under the terms of the Modified BSD License. 3 | 4 | // import { from, of } from "rxjs"; 5 | 6 | // import { map } from "rxjs/operators"; 7 | 8 | // import { ServerConnection } from "@jupyterlab/services"; 9 | 10 | // import { 11 | // createConverter, 12 | // relativeNestedDataType, 13 | // resolveDataType 14 | // } from "@jupyterlab/dataregistry"; 15 | 16 | // import { 17 | // widgetDataType, 18 | // IRegistry, 19 | // labelDataType 20 | // } from "@jupyterlab/dataregistry-extension"; 21 | 22 | // import { HdfContents, hdfContentsRequest, HdfDirectoryListing } from "./hdf"; 23 | 24 | // import { createHdfGrid } from "./dataset"; 25 | 26 | // /** 27 | // * Settings for the notebook server. 28 | // */ 29 | // const serverSettings = ServerConnection.makeSettings(); 30 | 31 | // export function parseHdfRegistryUrl(url: URL): { fpath: string } & HdfContents { 32 | // if ( 33 | // url.protocol === "file:" && 34 | // (url.pathname.endsWith(".hdf5") || url.pathname.endsWith(".h5")) 35 | // ) { 36 | // return { 37 | // fpath: url.pathname, 38 | // type: url.searchParams.get("type") === "dataset" ? "dataset" : "group", 39 | // name: url.searchParams.get("name") || "", 40 | // uri: url.searchParams.get("uri") || "/", 41 | // content: JSON.parse(url.searchParams.get("content") || null) 42 | // }; 43 | // } 44 | // } 45 | 46 | // const groupConverter = createConverter( 47 | // { from: resolveDataType, to: relativeNestedDataType }, 48 | // ({ url }) => { 49 | // const params = parseHdfRegistryUrl(url); 50 | // if (!params) { 51 | // return null; 52 | // } 53 | 54 | // const { fpath, uri, type } = params; 55 | // if (type === "group") { 56 | // return from(hdfContentsRequest({ fpath, uri }, serverSettings)).pipe( 57 | // map((hdfContents: HdfDirectoryListing) => 58 | // hdfContents.map( 59 | // hdfContent => 60 | // `?uri=${hdfContent.uri}&type=${hdfContent.type}&content=${hdfContent.content}` 61 | // ) 62 | // ) 63 | // ); 64 | // } 65 | 66 | // return null; 67 | // } 68 | // ); 69 | 70 | // const labelConverter = createConverter( 71 | // { from: resolveDataType, to: labelDataType }, 72 | // ({ url }) => { 73 | // const params = parseHdfRegistryUrl(url); 74 | // if (!params) { 75 | // return null; 76 | // } 77 | // // Return the last part of the path as the label 78 | // // or the last part of the file path, if that is empty 79 | // const lastPath = params.uri.split("/").pop(); 80 | // const lastFilePath = params.fpath.split("/").pop(); 81 | // return of(lastPath || lastFilePath); 82 | // } 83 | // ); 84 | 85 | // const datasetConverter = createConverter( 86 | // { from: resolveDataType, to: widgetDataType }, 87 | // ({ url }) => { 88 | // const params = parseHdfRegistryUrl(url); 89 | // if (!params) { 90 | // return null; 91 | // } 92 | 93 | // const { fpath, uri, type } = params; 94 | // if (type === "dataset") { 95 | // return { 96 | // data: () => createHdfGrid({ fpath, uri }), 97 | // type: "Grid" 98 | // }; 99 | // } 100 | 101 | // return null; 102 | // } 103 | // ); 104 | 105 | // export function addHdfConverters(dataRegistry: IRegistry): void { 106 | // dataRegistry.addConverter(groupConverter, datasetConverter, labelConverter); 107 | // } 108 | -------------------------------------------------------------------------------- /src/dataset.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) Jupyter Development Team. 2 | // Distributed under the terms of the Modified BSD License. 3 | 4 | import { PromiseDelegate, Token } from '@lumino/coreutils'; 5 | 6 | import { 7 | BasicKeyHandler, 8 | BasicMouseHandler, 9 | BasicSelectionModel, 10 | DataGrid, 11 | DataModel, 12 | } from '@lumino/datagrid'; 13 | 14 | import { Signal } from '@lumino/signaling'; 15 | 16 | import { 17 | IWidgetTracker, 18 | MainAreaWidget, 19 | Toolbar, 20 | ToolbarButton, 21 | } from '@jupyterlab/apputils'; 22 | 23 | import { 24 | ABCWidgetFactory, 25 | DocumentRegistry, 26 | DocumentWidget, 27 | IDocumentWidget, 28 | } from '@jupyterlab/docregistry'; 29 | 30 | import { ServerConnection } from '@jupyterlab/services'; 31 | 32 | import { 33 | HdfResponseError, 34 | modalHdfError, 35 | modalResponseError, 36 | modalValidationFail, 37 | ModalResult, 38 | } from './exception'; 39 | 40 | import { 41 | datasetMetaEmpty, 42 | hdfDataRequest, 43 | hdfMetaRequest, 44 | IDataParameters, 45 | IMetaParameters, 46 | IDatasetMeta, 47 | parseHdfQuery, 48 | } from './hdf'; 49 | 50 | import { noneSlice, slice } from './slice'; 51 | 52 | import { IxInput } from './toolbar'; 53 | import { convertValuesToString, isComplexArray } from './complex'; 54 | import { isDatasetMeta } from './utils'; 55 | 56 | /** 57 | * The CSS class for the data grid widget. 58 | */ 59 | export const HDF_CLASS = 'jhdf-dataGrid'; 60 | 61 | /** 62 | * The CSS class for our HDF5 container. 63 | */ 64 | export const HDF_CONTAINER_CLASS = 'jhdf-container'; 65 | 66 | /** 67 | * Base implementation of the hdf dataset model. 68 | */ 69 | export abstract class HdfDatasetModel extends DataModel { 70 | /** 71 | * Handle actions that should be taken when the context is ready. 72 | */ 73 | init({ 74 | fpath, 75 | uri, 76 | meta, 77 | }: { 78 | fpath: string; 79 | uri: string; 80 | meta: IDatasetMeta; 81 | }): void { 82 | this._fpath = fpath; 83 | this._uri = uri; 84 | this._meta = meta; 85 | 86 | // create a default index string 87 | if (this._meta.ndim < 1) { 88 | this._ixstr = ''; 89 | } else if (this._meta.ndim < 2) { 90 | this._ixstr = ':'; 91 | } else { 92 | this._ixstr = [...Array(this._meta.ndim - 2).fill('0'), ':', ':'].join( 93 | ', ' 94 | ); 95 | } 96 | 97 | // derive metadata for the default ixstr (eg ':, :, ...') from the metadata for no ixstr (eg '...') 98 | const metaIx: IDatasetMeta = { 99 | ...meta, 100 | labels: meta.labels.slice(-2), 101 | ndim: Math.max(meta.ndim, 2), 102 | shape: meta.shape.slice(-2), 103 | size: meta.shape.length 104 | ? meta.shape.slice(-2).reduce((x, y) => x * y) 105 | : meta.size, 106 | }; 107 | 108 | // Refresh wrt the newly set ix and then resolve the ready promise. 109 | this._refresh(metaIx); 110 | this._ready.resolve(undefined); 111 | } 112 | 113 | data(region: DataModel.CellRegion, row: number, col: number): any { 114 | if (region === 'row-header') { 115 | return `${this._labels[0].start + row * this._labels[0].step}`; 116 | } 117 | if (region === 'column-header') { 118 | return `${this._labels[1].start + col * this._labels[1].step}`; 119 | } 120 | if (region === 'corner-header') { 121 | return null; 122 | } 123 | 124 | const relRow = row % this._blockSize; 125 | const relCol = col % this._blockSize; 126 | const rowBlock = (row - relRow) / this._blockSize; 127 | const colBlock = (col - relCol) / this._blockSize; 128 | 129 | if (this._blocks[rowBlock]) { 130 | const block = this._blocks[rowBlock][colBlock]; 131 | if (block !== 'busy') { 132 | if (block) { 133 | // This data has already been loaded. 134 | return this._blocks[rowBlock][colBlock][relRow][relCol]; 135 | } else { 136 | // This data has not yet been loaded, load it. 137 | this._blocks[rowBlock][colBlock] = 'busy'; 138 | this._fetchBlock(rowBlock, colBlock); 139 | } 140 | } 141 | } else { 142 | // This data has not yet been loaded, load it. 143 | this._blocks[rowBlock] = Object(); 144 | this._blocks[rowBlock][colBlock] = 'busy'; 145 | this._fetchBlock(rowBlock, colBlock); 146 | } 147 | 148 | return null; 149 | } 150 | 151 | get ixstr(): string { 152 | return this._ixstr; 153 | } 154 | set ixstr(ixstr: string) { 155 | this.refresh(ixstr); 156 | } 157 | 158 | get meta(): IDatasetMeta { 159 | return this._meta; 160 | } 161 | get metaIx(): IDatasetMeta { 162 | return this._metaIx; 163 | } 164 | 165 | /** 166 | * A promise that resolves when the file editor is ready. 167 | */ 168 | get ready(): Promise { 169 | return this._ready.promise; 170 | } 171 | 172 | async refresh(ixstr: string): Promise { 173 | const meta = await this.getMeta({ 174 | fpath: this._fpath, 175 | uri: this._uri, 176 | ixstr, 177 | }); 178 | if (!isDatasetMeta(meta) || !this.validateMeta(ixstr, meta)) { 179 | this._refreshed.emit(this._ixstr); 180 | return; 181 | } 182 | 183 | this._ixstr = ixstr; 184 | this._refresh(meta); 185 | } 186 | 187 | get refreshed(): Signal { 188 | return this._refreshed; 189 | } 190 | 191 | rowCount(region: DataModel.RowRegion): number { 192 | if (region === 'body') { 193 | return this._n[0]; 194 | } 195 | 196 | return this._nheader[0]; 197 | } 198 | columnCount(region: DataModel.ColumnRegion): number { 199 | if (region === 'body') { 200 | return this._n[1]; 201 | } 202 | 203 | return this._nheader[1]; 204 | } 205 | 206 | protected async getData( 207 | params: IDataParameters 208 | ): Promise { 209 | try { 210 | const data = await hdfDataRequest(params, this._serverSettings); 211 | const { dtype } = this.meta; 212 | if (isComplexArray(data, dtype)) { 213 | return convertValuesToString(data) as string[][]; 214 | } 215 | return data; 216 | } catch (err) { 217 | // on any error, reduce displayed shape to [] in order to prevent unending failed data requests 218 | this._refresh(datasetMetaEmpty()); 219 | 220 | if (err instanceof HdfResponseError) { 221 | return modalHdfError(err); 222 | } else if (err instanceof ServerConnection.ResponseError) { 223 | return modalResponseError(err); 224 | } else { 225 | throw err; 226 | } 227 | } 228 | } 229 | 230 | protected async getMeta( 231 | params: IMetaParameters 232 | ): Promise { 233 | try { 234 | return (await hdfMetaRequest( 235 | params, 236 | this._serverSettings 237 | )) as IDatasetMeta; 238 | } catch (err) { 239 | // on any error, reduce displayed shape to [] in order to prevent unending failed data requests 240 | this._refresh(datasetMetaEmpty()); 241 | 242 | if (err instanceof HdfResponseError) { 243 | return modalHdfError(err); 244 | } else if (err instanceof ServerConnection.ResponseError) { 245 | return modalResponseError(err); 246 | } else { 247 | throw err; 248 | } 249 | } 250 | } 251 | 252 | protected validateMeta(ixstr: string, meta: IDatasetMeta): boolean { 253 | if (meta.ndim > 2) { 254 | modalValidationFail( 255 | `index has too many dimensions. Please specify an index with 2 or fewer slices. ixstr: ${ixstr}, ndim: ${meta.ndim}` 256 | ); 257 | return false; 258 | } 259 | 260 | return true; 261 | } 262 | 263 | /** 264 | * fetch a data block. When data is received, 265 | * the grid will be updated by emitChanged. 266 | */ 267 | private _fetchBlock = async (rowBlock: number, colBlock: number) => { 268 | const row = rowBlock * this._blockSize; 269 | const rowStop: number = Math.min( 270 | row + this._blockSize, 271 | this.rowCount('body') 272 | ); 273 | 274 | const column = colBlock * this._blockSize; 275 | const colStop: number = Math.min( 276 | column + this._blockSize, 277 | this.columnCount('body') 278 | ); 279 | 280 | const subixstr = [ 281 | this._hassubix[0] ? `${row}:${rowStop}` : '', 282 | this._hassubix[1] ? `${column}:${colStop}` : '', 283 | ] 284 | .filter(x => x) 285 | .join(', '); 286 | 287 | const params = { 288 | fpath: this._fpath, 289 | uri: this._uri, 290 | ixstr: this._ixstr, 291 | min_ndim: 2, 292 | subixstr, 293 | }; 294 | 295 | const data = await this.getData(params); 296 | this._blocks[rowBlock][colBlock] = data; 297 | 298 | const msg = { 299 | type: 'cells-changed', 300 | region: 'body', 301 | row, 302 | column, 303 | rowSpan: rowStop - row, 304 | columnSpan: colStop - column, 305 | }; 306 | this.emitChanged(msg as DataModel.ChangedArgs); 307 | }; 308 | 309 | private _refresh(meta: IDatasetMeta) { 310 | const oldRowCount = this.rowCount('body'); 311 | const oldColCount = this.columnCount('body'); 312 | 313 | // changing the index meta will also change the result of the row/colCount methods 314 | this._setMetaIx(meta); 315 | 316 | this._blocks = Object(); 317 | 318 | this.emitChanged({ 319 | type: 'rows-removed', 320 | region: 'body', 321 | index: 0, 322 | span: oldRowCount, 323 | }); 324 | this.emitChanged({ 325 | type: 'columns-removed', 326 | region: 'body', 327 | index: 0, 328 | span: oldColCount, 329 | }); 330 | 331 | this.emitChanged({ 332 | type: 'rows-inserted', 333 | region: 'body', 334 | index: 0, 335 | span: this.rowCount('body'), 336 | }); 337 | this.emitChanged({ 338 | type: 'columns-inserted', 339 | region: 'body', 340 | index: 0, 341 | span: this.columnCount('body'), 342 | }); 343 | 344 | this.emitChanged({ 345 | type: 'model-reset', 346 | }); 347 | 348 | this._refreshed.emit(this.ixstr); 349 | } 350 | 351 | private _setMetaIx(meta: IDatasetMeta) { 352 | this._metaIx = meta; 353 | 354 | // all reasoning about 0d vs 1d vs nd goes here 355 | if (this._metaIx.size <= 0) { 356 | // for 0d (empty), use (0, 0) 357 | this._hassubix = [false, false]; 358 | this._n = [0, 0]; 359 | this._nheader = [0, 0]; 360 | this._labels = [noneSlice(), noneSlice()]; 361 | } else if (this._metaIx.shape.length < 1) { 362 | // for 0d (scalar), use (1, 1) 363 | this._hassubix = [false, false]; 364 | this._n = [1, 1]; 365 | this._nheader = [0, 0]; 366 | this._labels = [slice(0, 1), slice(0, 1)]; 367 | } else if (this._metaIx.shape.length < 2) { 368 | // for 1d, use (size, 1) 369 | this._hassubix = [true, false]; 370 | this._n = [this._metaIx.size, 1]; 371 | this._nheader = [0, 1]; 372 | this._labels = [this._metaIx.labels[0], slice(0, 1)]; 373 | } else { 374 | // for 2d up, use standard shape 375 | this._hassubix = [true, true]; 376 | this._n = this._metaIx.shape; 377 | this._nheader = [1, 1]; 378 | this._labels = this._metaIx.labels; 379 | } 380 | } 381 | 382 | protected _fpath: string = ''; 383 | protected _uri: string = ''; 384 | 385 | protected _serverSettings: ServerConnection.ISettings = 386 | ServerConnection.makeSettings(); 387 | 388 | protected _hassubix = [false, false]; 389 | protected _n = [0, 0]; 390 | protected _nheader = [0, 0]; 391 | protected _labels = [noneSlice(), noneSlice()]; 392 | 393 | private _meta: IDatasetMeta; 394 | private _metaIx: IDatasetMeta; 395 | private _ixstr: string = ''; 396 | 397 | private _blocks: any = Object(); 398 | private _blockSize: number = 100; 399 | 400 | private _ready = new PromiseDelegate(); 401 | private _refreshed = new Signal(this); 402 | } 403 | 404 | /** 405 | * Subclass that constructs a dataset model from a document context 406 | */ 407 | class HdfDatasetModelFromContext extends HdfDatasetModel { 408 | constructor(context: DocumentRegistry.Context) { 409 | super(); 410 | 411 | this._context = context; 412 | 413 | void context.ready.then(() => { 414 | this._onContextReady(); 415 | }); 416 | } 417 | 418 | /** 419 | * Get the context for the editor widget. 420 | */ 421 | get context(): DocumentRegistry.Context { 422 | return this._context; 423 | } 424 | 425 | /** 426 | * Handle actions that should be taken when the context is ready. 427 | */ 428 | private _onContextReady(): void { 429 | // get the fpath and the uri for this dataset 430 | const { contentsModel } = this.context; 431 | if (!contentsModel) { 432 | return; 433 | } 434 | 435 | const { fpath, uri } = parseHdfQuery(contentsModel.path); 436 | 437 | // unpack the content 438 | const content: IDatasetMeta = this._context.model.toJSON() as any; 439 | 440 | // // Wire signal connections. 441 | // contextModel.contentChanged.connect(this._onContentChanged, this); 442 | 443 | this.init({ fpath, uri, meta: content }); 444 | } 445 | 446 | protected _context: DocumentRegistry.Context; 447 | } 448 | 449 | /** 450 | * Subclass that constructs a dataset model from simple parameters 451 | */ 452 | export class HdfDatasetModelFromPath extends HdfDatasetModel { 453 | constructor(params: IMetaParameters) { 454 | super(); 455 | 456 | this.getMeta(params).then(meta => { 457 | if (isDatasetMeta(meta)) { 458 | this._onMetaReady(params, meta); 459 | } 460 | }); 461 | } 462 | 463 | /** 464 | * Handle actions that should be taken when the model is ready. 465 | */ 466 | private _onMetaReady( 467 | { fpath, uri }: IMetaParameters, 468 | meta: IDatasetMeta 469 | ): void { 470 | this.init({ fpath, uri, meta }); 471 | } 472 | } 473 | 474 | function createHdfGrid(dataModel: HdfDatasetModel): { 475 | grid: DataGrid; 476 | toolbar: Toolbar; 477 | } { 478 | const grid = new DataGrid(); 479 | grid.dataModel = dataModel; 480 | grid.keyHandler = new BasicKeyHandler(); 481 | grid.mouseHandler = new BasicMouseHandler(); 482 | grid.selectionModel = new BasicSelectionModel({ dataModel }); 483 | 484 | const repainter = grid as any; 485 | const boundRepaint = repainter.repaintContent.bind(repainter); 486 | dataModel.refreshed.connect(boundRepaint); 487 | 488 | const toolbar = Private.createToolbar(grid); 489 | 490 | return { grid, toolbar }; 491 | } 492 | 493 | export function createHdfGridFromContext(context: DocumentRegistry.Context): { 494 | grid: DataGrid; 495 | reveal: Promise; 496 | toolbar: Toolbar; 497 | } { 498 | const model = new HdfDatasetModelFromContext(context); 499 | const reveal = context.ready; 500 | 501 | const { grid, toolbar } = createHdfGrid(model); 502 | 503 | return { grid, reveal, toolbar }; 504 | } 505 | 506 | export function createHdfGridFromPath(params: { fpath: string; uri: string }): { 507 | grid: DataGrid; 508 | reveal: Promise; 509 | toolbar: Toolbar; 510 | } { 511 | const model = new HdfDatasetModelFromPath(params); 512 | const reveal = model.ready; 513 | 514 | const { grid, toolbar } = createHdfGrid(model); 515 | 516 | return { grid, reveal, toolbar }; 517 | } 518 | 519 | /** 520 | * A mainarea widget for HDF content widgets. 521 | */ 522 | export class HdfDatasetMain extends MainAreaWidget { 523 | constructor(params: { fpath: string; uri: string }) { 524 | const { grid: content, reveal, toolbar } = createHdfGridFromPath(params); 525 | 526 | super({ content, reveal, toolbar }); 527 | } 528 | } 529 | 530 | /** 531 | * A document widget for HDF content widgets. 532 | */ 533 | export class HdfDatasetDoc 534 | extends DocumentWidget 535 | implements IDocumentWidget 536 | { 537 | constructor(context: DocumentRegistry.Context) { 538 | const { 539 | grid: content, 540 | reveal, 541 | toolbar, 542 | } = createHdfGridFromContext(context); 543 | 544 | super({ content, context, reveal, toolbar }); 545 | } 546 | } 547 | 548 | /** 549 | * A widget factory for HDF5 data grids. 550 | */ 551 | export class HdfDatasetDocFactory extends ABCWidgetFactory { 552 | /** 553 | * Create a new widget given a context. 554 | */ 555 | protected createNewWidget(context: DocumentRegistry.Context): HdfDatasetDoc { 556 | return new HdfDatasetDoc(context); 557 | } 558 | } 559 | 560 | /** 561 | * A class that tracks hdf5 dataset document widgets. 562 | */ 563 | export interface IHdfDatasetDocTracker extends IWidgetTracker {} 564 | 565 | export const IHdfDatasetDocTracker = new Token( 566 | 'jupyterlab-hdf:IHdfDatasetTracker' 567 | ); 568 | 569 | /** 570 | * A namespace for HDFViewer statics. 571 | */ 572 | export namespace HDFViewer { 573 | /** 574 | * The options for a SyncTeX edit command, 575 | * mapping the hdf position to an editor position. 576 | */ 577 | export interface IPosition { 578 | /** 579 | * The page of the hdf. 580 | */ 581 | page: number; 582 | 583 | /** 584 | * The x-position on the page, in pts, where 585 | * the HDF is assumed to be 72dpi. 586 | */ 587 | x: number; 588 | 589 | /** 590 | * The y-position on the page, in pts, where 591 | * the HDF is assumed to be 72dpi. 592 | */ 593 | y: number; 594 | } 595 | } 596 | 597 | /** 598 | * A namespace for HDF widget private data. 599 | */ 600 | namespace Private { 601 | /** 602 | * Create the node for the HDF widget. 603 | */ 604 | export function createNode(): HTMLElement { 605 | let node = document.createElement('div'); 606 | node.className = HDF_CONTAINER_CLASS; 607 | let hdf = document.createElement('div'); 608 | hdf.className = HDF_CLASS; 609 | node.appendChild(hdf); 610 | node.tabIndex = -1; 611 | return node; 612 | } 613 | 614 | /** 615 | * Create the toolbar for the HDF viewer. 616 | */ 617 | export function createToolbar(grid: DataGrid): Toolbar { 618 | const toolbar = new Toolbar(); 619 | 620 | toolbar.addClass('jp-Toolbar'); 621 | toolbar.addClass('jhdf-toolbar'); 622 | 623 | toolbar.addItem('slice input', new IxInput(grid)); 624 | 625 | // toolbar.addItem( 626 | // 'previous', 627 | // new ToolbarButton({ 628 | // iconClassName: 'jp-PreviousIcon jp-Icon jp-Icon-16', 629 | // onClick: () => { 630 | // hdfViewer.currentPageNumber = Math.max( 631 | // hdfViewer.currentPageNumber - 1, 632 | // 1 633 | // ); 634 | // }, 635 | // tooltip: 'Previous Page' 636 | // }) 637 | // ); 638 | // toolbar.addItem( 639 | // 'next', 640 | // new ToolbarButton({ 641 | // iconClassName: 'jp-NextIcon jp-Icon jp-Icon-16', 642 | // onClick: () => { 643 | // hdfViewer.currentPageNumber = Math.min( 644 | // hdfViewer.currentPageNumber + 1, 645 | // hdfViewer.pagesCount 646 | // ); 647 | // }, 648 | // tooltip: 'Next Page' 649 | // }) 650 | // ); 651 | // 652 | // toolbar.addItem('spacer', Toolbar.createSpacerItem()); 653 | 654 | return toolbar; 655 | } 656 | } 657 | -------------------------------------------------------------------------------- /src/exception.tsx: -------------------------------------------------------------------------------- 1 | // Copyright (c) Jupyter Development Team. 2 | // Distributed under the terms of the Modified BSD License. 3 | 4 | import { Dialog, showDialog } from '@jupyterlab/apputils'; 5 | import { ServerConnection } from '@jupyterlab/services'; 6 | import { ReadonlyJSONObject } from '@lumino/coreutils'; 7 | 8 | import * as React from 'react'; 9 | 10 | const HDF_MODAL_TEXT_CLASS = 'jhdf-errorModal-text'; 11 | 12 | export type ModalResult = Dialog.IResult<{ 13 | title: string; 14 | body: Dialog.Body; 15 | buttons: [Dialog.IButton]; 16 | }>; 17 | 18 | export class HdfResponseError extends ServerConnection.ResponseError { 19 | /** 20 | * Create a new response error. 21 | */ 22 | constructor({ 23 | response, 24 | message = `Invalid response: ${response.status} ${response.statusText}`, 25 | debugVars = {}, 26 | traceback = '', 27 | }: { 28 | response: Response; 29 | message: string; 30 | debugVars: ReadonlyJSONObject; 31 | traceback: string; 32 | }) { 33 | super(response, message); 34 | this.debugVars = debugVars; 35 | this.traceback = traceback; 36 | } 37 | 38 | debugVars: ReadonlyJSONObject; 39 | traceback: string; 40 | } 41 | 42 | export function modalHdfError( 43 | error: HdfResponseError, 44 | buttons: ReadonlyArray = [ 45 | Dialog.okButton({ label: 'Dismiss' }), 46 | ] 47 | ): Promise { 48 | const { message, debugVars, traceback } = error; 49 | console.warn({ message, debugVars, traceback }); 50 | 51 | return showDialog({ 52 | title: 'jupyterlab-hdf error', 53 | body: ( 54 |
55 |
{message}
56 |
57 | ), 58 | buttons: buttons, 59 | }); 60 | } 61 | 62 | export function modalResponseError( 63 | error: ServerConnection.ResponseError, 64 | buttons: ReadonlyArray = [ 65 | Dialog.okButton({ label: 'Dismiss' }), 66 | ] 67 | ): Promise { 68 | const { message, traceback } = error; 69 | console.warn({ message, traceback }); 70 | 71 | return showDialog({ 72 | title: 'jupyterlab-hdf error', 73 | body: ( 74 |
75 |
message
76 |
{message}
77 |
traceback
78 |
{traceback}
79 |
80 | ), 81 | buttons: buttons, 82 | }); 83 | } 84 | 85 | export function modalValidationFail( 86 | message: string, 87 | buttons: ReadonlyArray = [ 88 | Dialog.okButton({ label: 'Dismiss' }), 89 | ] 90 | ): Promise { 91 | return showDialog({ 92 | title: 'jupyterlab-hdf error', 93 | body: ( 94 |
95 |
{message}
96 |
97 | ), 98 | buttons: buttons, 99 | }); 100 | } 101 | -------------------------------------------------------------------------------- /src/hdf.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) Jupyter Development Team. 2 | // Distributed under the terms of the Modified BSD License. 3 | 4 | import { PathExt, URLExt } from '@jupyterlab/coreutils'; 5 | import { ServerConnection } from '@jupyterlab/services'; 6 | import { JSONObject, PartialJSONObject } from '@lumino/coreutils'; 7 | import { Complex } from './complex'; 8 | 9 | import { HdfResponseError } from './exception'; 10 | import { ISlice } from './slice'; 11 | 12 | /** 13 | * Hdf mime types 14 | */ 15 | export const HDF_MIME_TYPE = 'application/x-hdf5'; 16 | export const HDF_DATASET_MIME_TYPE = `${HDF_MIME_TYPE}.dataset`; 17 | // export const HDF_GROUP_MIME_TYPE = `${HDF_MIME_TYPE}.group`; 18 | 19 | /** 20 | * A helper function that copies an object without any null or undefined props 21 | */ 22 | function filterNull(obj: T): Partial { 23 | return (Object.entries(obj) as [keyof T, any]).reduce( 24 | (a, [k, v]) => (v != null ? ((a[k] = v), a) : a), 25 | {} 26 | ); 27 | } 28 | 29 | /** 30 | * objectToQueryString that excludes parameters with null/undefined values 31 | */ 32 | function objectToQueryString(value: PartialJSONObject) { 33 | return URLExt.objectToQueryString(filterNull(value)); 34 | } 35 | 36 | /** 37 | * A static version of the localPath method from ContentsManager 38 | */ 39 | export function localAbsPath(path: string): string { 40 | const parts = path.split('/'); 41 | const firstParts = parts[0].split(':'); 42 | if (firstParts.length === 1) { 43 | return '/' + path; 44 | } 45 | return '/' + PathExt.join(firstParts.slice(1).join(':'), ...parts.slice(1)); 46 | } 47 | 48 | /** 49 | * Parse a path into hdf contents request parameters. 50 | */ 51 | export function parseHdfQuery(path: string): IContentsParameters { 52 | // deal with the possibility of leading "Hdf:" drive specifier via localPath 53 | const parts = localAbsPath(path).split('?'); 54 | 55 | // list some defaults in return value, which may be overridden 56 | // by params in input query string 57 | return { 58 | fpath: parts[0], 59 | uri: '/', 60 | ...(parts[1] ? URLExt.queryStringToObject(parts[1]) : {}), 61 | }; 62 | } 63 | 64 | export function nameFromPath(path: string): string { 65 | if (path === '' || path === '/') { 66 | return '/'; 67 | } 68 | 69 | const parts = path.split('/'); 70 | return parts[parts.length - 1]; 71 | } 72 | 73 | /** 74 | * make a parameterized request to the `hdf/attrs` api 75 | */ 76 | export function hdfAttrsRequest( 77 | parameters: IAttrsParameters, 78 | settings: ServerConnection.ISettings 79 | ): Promise> { 80 | // allow the query parameters to be optional 81 | const { fpath, uri } = parameters; 82 | 83 | const fullUrl = 84 | URLExt.join(settings.baseUrl, 'hdf', 'attrs', fpath).split('?')[0] + 85 | objectToQueryString({ uri }); 86 | 87 | return hdfApiRequest(fullUrl, {}, settings); 88 | } 89 | 90 | /** 91 | * make a parameterized request to the `hdf/contents` api 92 | */ 93 | export function hdfContentsRequest( 94 | parameters: IContentsParameters, 95 | settings: ServerConnection.ISettings 96 | ): Promise { 97 | // allow the query parameters to be optional 98 | const { fpath, uri } = parameters; 99 | 100 | const fullUrl = 101 | URLExt.join(settings.baseUrl, 'hdf', 'contents', fpath).split('?')[0] + 102 | objectToQueryString({ uri }); 103 | 104 | return hdfApiRequest(fullUrl, {}, settings); 105 | } 106 | 107 | /** 108 | * make a parameterized request to the `hdf/data` api 109 | */ 110 | export function hdfDataRequest( 111 | parameters: IDataParameters, 112 | settings: ServerConnection.ISettings 113 | ): Promise { 114 | // require the uri, row, and col query parameters 115 | const { fpath, uri, ixstr, min_ndim, subixstr } = parameters; 116 | 117 | const fullUrl = 118 | URLExt.join(settings.baseUrl, 'hdf', 'data', fpath).split('?')[0] + 119 | objectToQueryString({ uri, ixstr, min_ndim, subixstr }); 120 | 121 | return hdfApiRequest(fullUrl, {}, settings); 122 | } 123 | 124 | /** 125 | * make a parameterized request to the `hdf/meta` api 126 | */ 127 | export function hdfMetaRequest( 128 | parameters: IMetaParameters, 129 | settings: ServerConnection.ISettings 130 | ): Promise { 131 | // allow the query parameters to be optional 132 | const { fpath, uri, ixstr, min_ndim } = parameters; 133 | 134 | const fullUrl = 135 | URLExt.join(settings.baseUrl, 'hdf', 'meta', fpath).split('?')[0] + 136 | objectToQueryString({ uri, ixstr, min_ndim }); 137 | 138 | return hdfApiRequest(fullUrl, {}, settings); 139 | } 140 | 141 | /** 142 | * Send a parameterized request to the `hdf/snippet` api, and 143 | * return the result. 144 | */ 145 | export function hdfSnippetRequest( 146 | parameters: IDataParameters, 147 | settings: ServerConnection.ISettings 148 | ): Promise { 149 | // require the uri, row, and col query parameters 150 | const { fpath, uri, ixstr, subixstr } = parameters; 151 | 152 | const fullUrl = 153 | URLExt.join(settings.baseUrl, 'hdf', 'snippet', fpath).split('?')[0] + 154 | objectToQueryString({ uri, ixstr, subixstr }); 155 | 156 | return hdfApiRequest(fullUrl, {}, settings); 157 | } 158 | 159 | /** 160 | * Send a parameterized request to one of the hdf api endpoints, 161 | * and return the result. 162 | */ 163 | export async function hdfApiRequest( 164 | url: string, 165 | body: JSONObject, 166 | settings: ServerConnection.ISettings 167 | ): Promise { 168 | const response = await ServerConnection.makeRequest(url, body, settings); 169 | if (response.status !== 200) { 170 | const data = await response.text(); 171 | let json; 172 | if (data.length > 0) { 173 | try { 174 | // HTTPError on the python side adds some leading cruft, strip it 175 | json = JSON.parse(data.substring(data.indexOf('{'))); 176 | } catch (error) {} // eslint-disable-line no-empty 177 | } 178 | 179 | if (json?.type === 'JhdfError') { 180 | const { message, debugVars, traceback } = json; 181 | throw new HdfResponseError({ response, message, debugVars, traceback }); 182 | } else { 183 | throw new ServerConnection.ResponseError(response, data); 184 | } 185 | } 186 | return response.json(); 187 | } 188 | 189 | /** 190 | * common parameters for all hdf api requests 191 | */ 192 | interface IParameters { 193 | /** 194 | * path on disk to an hdf5 file 195 | */ 196 | fpath: string; 197 | 198 | /** 199 | * path within an hdf5 file to a specific group or dataset 200 | */ 201 | uri: string; 202 | } 203 | 204 | /** 205 | * parameters for an hdf attributes request 206 | */ 207 | interface IAttrsParameters extends IParameters {} 208 | 209 | /** 210 | * parameters for an hdf contents request 211 | */ 212 | export interface IContentsParameters extends IParameters {} 213 | 214 | /** 215 | * parameters for an hdf array data (ie from a dataset) request 216 | */ 217 | export interface IDataParameters extends IMetaParameters { 218 | /** 219 | * string specifying slice of dataset slab to be fetched as array data, using numpy-style index syntax 220 | */ 221 | subixstr?: string; 222 | } 223 | 224 | /** 225 | * parameters for an hdf metadata request 226 | */ 227 | export interface IMetaParameters extends IParameters { 228 | /** 229 | * string specifying dataset slab, using numpy-style index (or "slice") syntax 230 | */ 231 | ixstr?: string; 232 | 233 | /** 234 | * promote any shape metadata or array data that is fetched to have at least this many dimensions 235 | */ 236 | min_ndim?: number; 237 | } 238 | 239 | type HdfType = 'dataset' | 'group' | 'soft_link' | 'external_link'; 240 | 241 | /** 242 | * typings representing contents from an object in an hdf5 file 243 | */ 244 | interface IHdfBaseContents { 245 | /** 246 | * The name of the object. 247 | */ 248 | name: string; 249 | 250 | /** 251 | * The type of the object. 252 | */ 253 | type: HdfType; 254 | 255 | /** 256 | * The path to the object in the hdf5 file. 257 | */ 258 | uri: string; 259 | } 260 | 261 | interface IHdfDatasetContents extends IHdfBaseContents { 262 | content: IDatasetMeta; 263 | type: 'dataset'; 264 | } 265 | 266 | interface IHdfGroupContents extends IHdfBaseContents { 267 | content: IGroupMeta; 268 | type: 'group'; 269 | } 270 | 271 | interface IHdfExternalLinkContents extends IHdfBaseContents { 272 | content: IExternalLinkMeta; 273 | type: 'external_link'; 274 | } 275 | 276 | interface IHdfSoftLinkContents extends IHdfBaseContents { 277 | content: ISoftLinkMeta; 278 | type: 'soft_link'; 279 | } 280 | 281 | export type HdfContents = 282 | | IHdfDatasetContents 283 | | IHdfGroupContents 284 | | IHdfExternalLinkContents 285 | | IHdfSoftLinkContents; 286 | 287 | /** 288 | * Typings representing directory contents 289 | */ 290 | export type HdfDirectoryListing = HdfContents[]; 291 | 292 | export type AttributeValue = any; 293 | 294 | interface IAttrMeta { 295 | name: string; 296 | dtype: string; 297 | type: HdfType; 298 | } 299 | 300 | interface IBaseMeta { 301 | attributes: IAttrMeta[]; 302 | 303 | name: string; 304 | 305 | type: HdfType; 306 | } 307 | 308 | export interface IDatasetMeta extends IBaseMeta { 309 | dtype: string; 310 | 311 | labels: ISlice[]; 312 | 313 | ndim: number; 314 | 315 | shape: number[]; 316 | 317 | size: number; 318 | 319 | type: 'dataset'; 320 | } 321 | 322 | interface IGroupMeta extends IBaseMeta { 323 | type: 'group'; 324 | } 325 | 326 | interface ISoftLinkMeta extends IBaseMeta { 327 | targetUri: string; 328 | type: 'soft_link'; 329 | } 330 | 331 | interface IExternalLinkMeta extends IBaseMeta { 332 | targetUri: string; 333 | targetFile: string; 334 | 335 | type: 'external_link'; 336 | } 337 | 338 | export type IMeta = 339 | | IDatasetMeta 340 | | IGroupMeta 341 | | ISoftLinkMeta 342 | | IExternalLinkMeta; 343 | 344 | export function datasetMetaEmpty(): IDatasetMeta { 345 | return { 346 | attributes: [], 347 | dtype: 'int64', 348 | labels: [], 349 | name: '', 350 | ndim: 0, 351 | shape: [], 352 | size: 0, 353 | type: 'dataset', 354 | }; 355 | } 356 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) Jupyter Development Team. 2 | // Distributed under the terms of the Modified BSD License. 3 | 4 | import { 5 | JupyterFrontEnd, 6 | JupyterFrontEndPlugin, 7 | ILabShell, 8 | ILayoutRestorer, 9 | } from '@jupyterlab/application'; 10 | import { MainAreaWidget, WidgetTracker } from '@jupyterlab/apputils'; 11 | import { PathExt } from '@jupyterlab/coreutils'; 12 | import { IDocumentManager } from '@jupyterlab/docmanager'; 13 | import { DocumentRegistry } from '@jupyterlab/docregistry'; 14 | import { FileBrowser, IFileBrowserFactory } from '@jupyterlab/filebrowser'; 15 | import { INotebookTracker } from '@jupyterlab/notebook'; 16 | import { ServerConnection } from '@jupyterlab/services'; 17 | import { map, toArray } from '@lumino/algorithm'; 18 | import { searchIcon } from '@jupyterlab/ui-components'; 19 | 20 | // import { IRegistry } from "@jupyterlab/dataregistry-extension"; 21 | 22 | import AttributeViewer from './AttributeViewer'; 23 | import { HdfSidepanel } from './browser'; 24 | import { HdfDrive } from './contents'; 25 | // import { addHdfConverters } from "./dataregistry"; 26 | import { 27 | HdfDatasetDoc, 28 | HdfDatasetDocFactory, 29 | IHdfDatasetDocTracker, 30 | } from './dataset'; 31 | import { 32 | IContentsParameters, 33 | HDF_DATASET_MIME_TYPE, 34 | HDF_MIME_TYPE, 35 | hdfContentsRequest, 36 | hdfSnippetRequest, 37 | parseHdfQuery, 38 | hdfAttrsRequest, 39 | nameFromPath, 40 | hdfMetaRequest, 41 | } from './hdf'; 42 | 43 | /** 44 | * hdf plugins state namespace 45 | */ 46 | const HDF_BROWSER_NAMESPACE = 'hdf-file-browser'; 47 | const HDF_FILE_BROWSER_NAMESPACE = 'hdf-filebrowser'; 48 | const HDF_DATASET_NAMESPACE = 'hdf-dataset'; 49 | 50 | /** 51 | * the IDs for the plugins 52 | */ 53 | const hdf5BrowserPluginId = 'jupyterlab-hdf:browser'; 54 | const hdf5DatasetPluginId = 'jupyterlab-hdf:dataset'; 55 | // const hdf5DataRegistryPluginId = "jupyterlab-hdf:dataregistry"; 56 | 57 | /** 58 | * hdf icon classnames 59 | */ 60 | const HDF_ICON = 'jhdf-icon'; 61 | const HDF_FILE_ICON = `jp-MaterialIcon ${HDF_ICON}`; 62 | const HDF_DATASET_ICON = 'jp-MaterialIcon jp-SpreadsheetIcon'; // jhdf-datasetIcon; 63 | 64 | namespace CommandIDs { 65 | /** 66 | * fetch metadata from an hdf5 file 67 | */ 68 | export const fetchContents = 'hdf:fetch-contents'; 69 | 70 | export const openInBrowser = 'hdf:open-in-browser'; 71 | 72 | export const openSnippet = 'hdf:open-snippet'; 73 | 74 | export const viewAttributes = 'hdf:view-attributes'; 75 | } 76 | 77 | /** 78 | * initialization data for the jupyterlab-hdf5 extension 79 | */ 80 | const hdfBrowserPlugin: JupyterFrontEndPlugin = { 81 | id: hdf5BrowserPluginId, 82 | requires: [ 83 | IDocumentManager, 84 | IFileBrowserFactory, 85 | ILabShell, 86 | ILayoutRestorer, 87 | INotebookTracker, 88 | ], 89 | 90 | activate: activateHdfBrowserPlugin, 91 | autoStart: true, 92 | }; 93 | 94 | /** 95 | * the HTML file handler extension 96 | */ 97 | const hdfDatasetPlugin: JupyterFrontEndPlugin = { 98 | id: hdf5DatasetPluginId, 99 | provides: IHdfDatasetDocTracker, 100 | optional: [ILayoutRestorer], 101 | 102 | activate: activateHdfDatasetPlugin, 103 | autoStart: true, 104 | }; 105 | 106 | // /** 107 | // * Provides hdf5 support for the @jupyterlab/dataregistry 108 | // * extension, if it is installed. 109 | // */ 110 | // const hdfDataRegistryPlugin: JupyterFrontEndPlugin = { 111 | // id: hdf5DataRegistryPluginId, 112 | // optional: [IRegistry], 113 | 114 | // activate: activateHdfDataRegistryPlugin, 115 | // autoStart: true 116 | // }; 117 | 118 | /** 119 | * activate the hdf file browser extension 120 | */ 121 | function activateHdfBrowserPlugin( 122 | app: JupyterFrontEnd, 123 | manager: IDocumentManager, 124 | browserFactory: IFileBrowserFactory, 125 | labShell: ILabShell, 126 | restorer: ILayoutRestorer, 127 | notebookTracker: INotebookTracker 128 | // settingRegistry: ISettingRegistry 129 | ): void { 130 | const { createFileBrowser, defaultBrowser } = browserFactory; 131 | 132 | // Add an hdf5 file type to the docregistry. 133 | const ft: DocumentRegistry.IFileType = { 134 | // driveName: 'Hdf', 135 | contentType: 'directory', 136 | displayName: 'HDF File', 137 | extensions: ['.hdf5', '.h5'], 138 | fileFormat: 'json', 139 | iconClass: HDF_FILE_ICON, 140 | mimeTypes: [HDF_MIME_TYPE], 141 | name: 'hdf:file', 142 | }; 143 | app.docRegistry.addFileType(ft); 144 | 145 | // Add the Hdf backend to the contents manager. 146 | const hdfDrive = new HdfDrive(app.docRegistry); 147 | manager.services.contents.addDrive(hdfDrive); 148 | 149 | // Create the embedded filebrowser. Hdf files likely 150 | // don't need as often of a refresh interval as standard 151 | // filesystem dirs, so we give a 5 second refresh interval. 152 | const _hdfBrowser = createFileBrowser(HDF_BROWSER_NAMESPACE, { 153 | driveName: hdfDrive.name, 154 | refreshInterval: 5000, 155 | }); 156 | 157 | const hdfSidepanel = new HdfSidepanel(_hdfBrowser, hdfDrive); 158 | 159 | hdfSidepanel.title.iconClass = `${HDF_ICON} jp-SideBar-tabIcon`; 160 | hdfSidepanel.title.caption = 'Browse Hdf'; 161 | 162 | hdfSidepanel.id = HDF_BROWSER_NAMESPACE; 163 | 164 | // Add the file browser widget to the application restorer. 165 | if (restorer) { 166 | restorer.add(hdfSidepanel, HDF_FILE_BROWSER_NAMESPACE); 167 | } 168 | app.shell.add(hdfSidepanel, 'left', { rank: 103 }); 169 | 170 | addBrowserCommands( 171 | app, 172 | browserFactory, 173 | hdfSidepanel, 174 | labShell, 175 | notebookTracker 176 | ); 177 | monkeyPatchBrowser(app, defaultBrowser); 178 | 179 | return; 180 | } 181 | 182 | function monkeyPatchBrowser(app: JupyterFrontEnd, browser: FileBrowser) { 183 | const { commands } = app; 184 | 185 | const handleDblClick = async (evt: Event): Promise => { 186 | const event = evt as MouseEvent; 187 | // Do nothing if it's not a left mouse press. 188 | if (event.button !== 0) { 189 | return; 190 | } 191 | 192 | // Do nothing if any modifier keys are pressed. 193 | if (event.ctrlKey || event.shiftKey || event.altKey || event.metaKey) { 194 | return; 195 | } 196 | 197 | // Stop the event propagation. 198 | event.preventDefault(); 199 | event.stopPropagation(); 200 | 201 | const item = browser.modelForClick(event); 202 | if (!item) { 203 | return; 204 | } 205 | 206 | const { contents } = browser.model.manager.services; 207 | const extname = PathExt.extname(item.path); 208 | if (extname === '.hdf5' || extname === '.h5') { 209 | // special handling for .hdf5 files 210 | await commands.execute(CommandIDs.openInBrowser); 211 | } else if (item.type === 'directory') { 212 | browser.model 213 | .cd('/' + contents.localPath(item.path)) 214 | .catch(error => console.error(error)); 215 | } else { 216 | browser.model.manager.openOrReveal(item.path); 217 | } 218 | }; 219 | 220 | browser.node.addEventListener('dblclick', handleDblClick, true); 221 | } 222 | 223 | function addBrowserCommands( 224 | app: JupyterFrontEnd, 225 | browserFactory: IFileBrowserFactory, 226 | hdfSidepanel: HdfSidepanel, 227 | labShell: ILabShell, 228 | notebookTracker: INotebookTracker 229 | ): void { 230 | const { tracker } = browserFactory; 231 | const { commands } = app; 232 | const serverSettings = ServerConnection.makeSettings(); 233 | 234 | commands.addCommand(CommandIDs.fetchContents, { 235 | execute: args => { 236 | let params: IContentsParameters = { 237 | fpath: args['fpath'] as string, 238 | uri: args['uri'] as string, 239 | }; 240 | 241 | return hdfContentsRequest(params, serverSettings); 242 | }, 243 | label: 'For an HDF5 file at `fpath`, fetch the contents at `uri`', 244 | }); 245 | 246 | commands.addCommand(CommandIDs.openInBrowser, { 247 | label: 'Open as HDF5', 248 | execute: args => { 249 | const widget = tracker.currentWidget; 250 | 251 | if (!widget) { 252 | return; 253 | } 254 | 255 | const fpaths = map(widget.selectedItems(), item => { 256 | const { fpath } = parseHdfQuery(item.path); 257 | return fpath; 258 | }); 259 | 260 | labShell.activateById(hdfSidepanel.id); 261 | 262 | return Promise.all( 263 | toArray( 264 | map(fpaths, fpath => { 265 | return hdfSidepanel.browser.model.cd(fpath); 266 | }) 267 | ) 268 | ); 269 | }, 270 | }); 271 | 272 | commands.addCommand(CommandIDs.openSnippet, { 273 | label: 'Snippet', 274 | execute: async () => { 275 | const widget = tracker.currentWidget; 276 | if (!widget) { 277 | return; 278 | } 279 | 280 | const items = toArray( 281 | map(widget.selectedItems(), item => { 282 | return item; 283 | }) 284 | ); 285 | const params = parseHdfQuery(items[0].path); 286 | 287 | if (!notebookTracker.activeCell) { 288 | console.error('No cell available to paste the snippet'); 289 | return; 290 | } 291 | 292 | try { 293 | notebookTracker.activeCell.model.value.insert( 294 | 0, 295 | await hdfSnippetRequest(params, serverSettings) 296 | ); 297 | } catch (error) { 298 | console.error(error); 299 | } 300 | }, 301 | }); 302 | 303 | commands.addCommand(CommandIDs.viewAttributes, { 304 | label: 'View attributes', 305 | iconClass: HDF_FILE_ICON, 306 | execute: async () => { 307 | const widget = tracker.currentWidget; 308 | if (!widget) { 309 | return; 310 | } 311 | 312 | const items = toArray( 313 | map(widget.selectedItems(), item => { 314 | return item; 315 | }) 316 | ); 317 | // Selected item takes precedence on current path 318 | const selectedItem = items[0] || widget.model; 319 | const params = parseHdfQuery(selectedItem.path); 320 | 321 | try { 322 | const [{ attributes }, attrValues] = await Promise.all([ 323 | hdfMetaRequest(params, serverSettings), 324 | hdfAttrsRequest(params, serverSettings), 325 | ]); 326 | const attrs = Object.entries(attrValues).map(([name, value], i) => ({ 327 | name, 328 | value, 329 | dtype: attributes[i].dtype, 330 | })); 331 | const widget = new MainAreaWidget({ 332 | content: new AttributeViewer(attrs), 333 | }); 334 | widget.title.label = nameFromPath(params.uri); 335 | widget.title.icon = searchIcon; 336 | app.shell.add(widget, 'main'); 337 | } catch (error) { 338 | console.error(error); 339 | } 340 | }, 341 | }); 342 | 343 | // add context menu items for commands 344 | 345 | // matches all hdf filebrowser items 346 | const selectorDefaultItem = 347 | '#hdf-file-browser .jp-DirListing-item[data-isdir]'; 348 | const selectorDefaultContent = '#hdf-file-browser .jp-DirListing-content'; 349 | 350 | app.contextMenu.addItem({ 351 | command: CommandIDs.openSnippet, 352 | rank: 3, 353 | selector: selectorDefaultItem, 354 | }); 355 | app.contextMenu.addItem({ 356 | command: CommandIDs.viewAttributes, 357 | rank: 4, 358 | selector: selectorDefaultContent, 359 | }); 360 | 361 | return; 362 | } 363 | 364 | /** 365 | * activate the hdf dataset viewer extension 366 | */ 367 | function activateHdfDatasetPlugin( 368 | app: JupyterFrontEnd, 369 | restorer: ILayoutRestorer | null 370 | ): IHdfDatasetDocTracker { 371 | // Add an hdf dataset file type to the docregistry. 372 | const ft: DocumentRegistry.IFileType = { 373 | contentType: 'file', 374 | displayName: 'HDF Dataset', 375 | extensions: ['.data'], 376 | fileFormat: 'json', 377 | iconClass: HDF_DATASET_ICON, 378 | mimeTypes: [HDF_DATASET_MIME_TYPE], 379 | name: 'hdf:dataset', 380 | }; 381 | app.docRegistry.addFileType(ft); 382 | 383 | // Create a new dataset viewer factory. 384 | const factory = new HdfDatasetDocFactory({ 385 | defaultFor: ['hdf:dataset'], 386 | fileTypes: ['hdf:dataset'], 387 | name: 'HDF Dataset', 388 | readOnly: true, 389 | }); 390 | 391 | // Create a widget tracker for hdf documents. 392 | const tracker = new WidgetTracker({ 393 | namespace: HDF_DATASET_NAMESPACE, 394 | }); 395 | 396 | // Handle state restoration. 397 | if (restorer) { 398 | void restorer.restore(tracker, { 399 | command: 'docmanager:open', 400 | args: widget => ({ path: widget.context.path, factory: 'HDF Dataset' }), 401 | name: widget => widget.context.path, 402 | }); 403 | } 404 | 405 | app.docRegistry.addWidgetFactory(factory); 406 | factory.widgetCreated.connect((sender, widget) => { 407 | // Track the widget. 408 | void tracker.add(widget); 409 | // Notify the widget tracker if restore data needs to update. 410 | widget.context.pathChanged.connect(() => { 411 | void tracker.save(widget); 412 | }); 413 | 414 | widget.title.iconClass = ft.iconClass || ''; 415 | widget.title.iconLabel = ft.iconLabel || ''; 416 | }); 417 | 418 | return tracker; 419 | } 420 | 421 | // /** 422 | // * activate the hdf dataregistry extension 423 | // */ 424 | // function activateHdfDataRegistryPlugin( 425 | // app: JupyterFrontEnd, 426 | // dataRegistry: IRegistry | null 427 | // ): void { 428 | // if (!dataRegistry) { 429 | // // bail 430 | // return; 431 | // } 432 | 433 | // addHdfConverters(dataRegistry); 434 | // } 435 | 436 | /** 437 | * export the plugins as default 438 | */ 439 | const plugins: JupyterFrontEndPlugin[] = [ 440 | hdfBrowserPlugin, 441 | hdfDatasetPlugin, 442 | // hdfDataRegistryPlugin 443 | ]; 444 | export default plugins; 445 | -------------------------------------------------------------------------------- /src/slice.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) Jupyter Development Team. 2 | // Distributed under the terms of the Modified BSD License. 3 | 4 | export interface ISlice { 5 | start: number; 6 | stop?: number | null; 7 | step: number; 8 | } 9 | 10 | /** 11 | * analogous to the python `slice` constructor 12 | */ 13 | export function slice( 14 | start: number, 15 | stop?: number | null, 16 | step: number | null = null 17 | ): ISlice { 18 | if (stop === undefined) { 19 | return { start: 0, stop: start, step: 1 }; 20 | } 21 | 22 | return { start, stop, step: step === null ? 1 : step }; 23 | } 24 | 25 | export function allSlice(): ISlice { 26 | return slice(0, null); 27 | } 28 | 29 | export function noneSlice(): ISlice { 30 | return slice(0, 0); 31 | } 32 | 33 | const allSlices: ISlice[] = [allSlice(), allSlice()]; 34 | const noneSlices: ISlice[] = [noneSlice(), noneSlice()]; 35 | 36 | export function parseSlices(strSlices: string): ISlice[] { 37 | if (!strSlices) { 38 | return allSlices; 39 | } 40 | 41 | const slices = strSlices 42 | .split(/\s*,\s*/) 43 | .map(dim => dim.split(/\s*:\s*/)) 44 | .reduce((slices: ISlice[], strSliceArr: string[]) => { 45 | const start = parseInt(strSliceArr[0]); 46 | 47 | if (strSliceArr.length === 1 && start) { 48 | // single index in place of a slice 49 | slices.push(slice(start, start + 1)); 50 | } else if (strSliceArr.length === 2 || strSliceArr.length === 3) { 51 | // ignore strides 52 | slices.push(slice(start, parseInt(strSliceArr[1]))); 53 | } 54 | return slices; 55 | }, []); 56 | 57 | if (slices.length != 2 || !slices[0] || !slices[1]) { 58 | // invalidate the slices 59 | console.warn( 60 | `Error parsing slices: invalid slices string input. strSlices: "${strSlices}"` 61 | ); 62 | 63 | return [...noneSlices]; 64 | } 65 | 66 | return slices; 67 | } 68 | -------------------------------------------------------------------------------- /src/toolbar.tsx: -------------------------------------------------------------------------------- 1 | // Copyright (c) Jupyter Development Team. 2 | // Distributed under the terms of the Modified BSD License. 3 | 4 | import { DataGrid } from '@lumino/datagrid'; 5 | 6 | import { ISignal } from '@lumino/signaling'; 7 | 8 | import { ReactWidget } from '@jupyterlab/apputils'; 9 | 10 | import * as React from 'react'; 11 | 12 | import { HdfDatasetModel } from './dataset'; 13 | 14 | const TOOLBAR_IX_INPUT_CLASS = '.jp-IxInputToolbar'; 15 | const TOOLBAR_IX_INPUT_BOX_CLASS = '.jp-IxInputToolbar-box'; 16 | 17 | /** 18 | * a namespace for IxInputBox statics 19 | */ 20 | namespace IxInputBox { 21 | /** 22 | * the props for IxInputBox 23 | */ 24 | export interface IProps { 25 | /** 26 | * function run when enter key is pressed in input box 27 | */ 28 | handleEnter: (val: string) => void; 29 | 30 | /** 31 | * initial value shown in input box 32 | */ 33 | initialValue?: string; 34 | 35 | /** 36 | * signal by which input value can be updated. 37 | * Updates the value in an isolated way, without 38 | * triggering eg handleEnter 39 | */ 40 | signal: ISignal; 41 | } 42 | 43 | /** 44 | * the state for IxInputBox 45 | */ 46 | export interface IState { 47 | /** 48 | * the current value of the input box 49 | */ 50 | value: string; 51 | 52 | /** 53 | * whether the input box has focus 54 | */ 55 | hasFocus: boolean; 56 | } 57 | } 58 | 59 | export class IxInput extends ReactWidget { 60 | /** 61 | * construct a new text input for an index 62 | */ 63 | constructor(widget: DataGrid) { 64 | super(); 65 | this.addClass(TOOLBAR_IX_INPUT_CLASS); 66 | 67 | this._grid = widget; 68 | this._model = this._grid.dataModel as HdfDatasetModel; 69 | } 70 | 71 | render(): JSX.Element { 72 | return ( 73 | (this._model.ixstr = val)} 75 | initialValue={this._model.ixstr} 76 | signal={this._model.refreshed} 77 | /> 78 | ); 79 | } 80 | 81 | private _grid: DataGrid; 82 | private _model: HdfDatasetModel; 83 | } 84 | 85 | export class IxInputBox extends React.Component< 86 | IxInputBox.IProps, 87 | IxInputBox.IState 88 | > { 89 | /** 90 | * construct a new input box for an index 91 | */ 92 | constructor(props: IxInputBox.IProps) { 93 | super(props); 94 | this.state = { 95 | value: this.props.initialValue || '', 96 | hasFocus: false, 97 | }; 98 | } 99 | 100 | /** 101 | * attach the value change signal and focus the element on mount 102 | */ 103 | componentDidMount(): void { 104 | this.props.signal.connect(this._slot); 105 | this._textInput!.focus(); 106 | } 107 | 108 | /** 109 | * detach the value change signal on unmount 110 | */ 111 | componentWillUnmount(): void { 112 | this.props.signal.disconnect(this._slot); 113 | } 114 | 115 | /** 116 | * handle `keydown` events for the HTMLSelect component 117 | */ 118 | private _handleKeyDown = ( 119 | event: React.KeyboardEvent 120 | ): void => { 121 | if (event.key === 'Enter') { 122 | this.props.handleEnter(event.currentTarget.value); 123 | } 124 | }; 125 | 126 | /** 127 | * handle a change to the value in the input field 128 | */ 129 | private _handleChange = (event: React.ChangeEvent) => { 130 | this.setState({ value: event.currentTarget.value }); 131 | }; 132 | 133 | /** 134 | * handle focusing of the input field 135 | */ 136 | private _handleFocus = () => { 137 | this.setState({ hasFocus: true }); 138 | }; 139 | 140 | /** 141 | * handle blurring of the input field 142 | */ 143 | private _handleBlur = () => { 144 | this.setState({ hasFocus: false }); 145 | }; 146 | 147 | /** 148 | * update value on signal emit 149 | */ 150 | private _slot = (_: any, args: string) => { 151 | // skip setting new state if incoming val is equal to existing value 152 | if (args === this.state.value) { 153 | return; 154 | } 155 | 156 | this.setState({ value: args }); 157 | }; 158 | 159 | render(): JSX.Element { 160 | return ( 161 | 177 | ); 178 | } 179 | 180 | private _textInput: HTMLInputElement | null = null; 181 | } 182 | -------------------------------------------------------------------------------- /src/utils.ts: -------------------------------------------------------------------------------- 1 | import { IDatasetMeta } from './hdf'; 2 | import { ModalResult } from './exception'; 3 | 4 | export function isDatasetMeta( 5 | meta: IDatasetMeta | ModalResult 6 | ): meta is IDatasetMeta { 7 | return Object.getOwnPropertyNames(meta).includes('labels'); 8 | } 9 | -------------------------------------------------------------------------------- /style/AttributeViewer.css: -------------------------------------------------------------------------------- 1 | /*----------------------------------------------------------------------------- 2 | | Copyright (c) Jupyter Development Team. 3 | | Distributed under the terms of the Modified BSD License. 4 | |----------------------------------------------------------------------------*/ 5 | 6 | .jhdf-attribute-table-container { 7 | overflow: scroll; 8 | } 9 | 10 | .jhdf-attribute-table { 11 | width: 100%; 12 | margin-bottom: 0.25rem; 13 | text-align: left; 14 | border-collapse: collapse; 15 | color: var(--jp-ui-font-color1); 16 | } 17 | 18 | .jhdf-attribute-table:not(:first-child) { 19 | margin-top: 1rem; 20 | } 21 | 22 | .jhdf-attribute-table thead th { 23 | font-weight: var(--jp-content-font-weight); 24 | padding: 0.675rem 1rem; 25 | background-color: var(--jp-layout-color2); 26 | font-size: var(--jp-content-font-size3); 27 | } 28 | 29 | .jhdf-attribute-table tbody th, 30 | .jhdf-attribute-table tbody td { 31 | padding: 0.5rem 1rem; 32 | vertical-align: top; 33 | } 34 | 35 | .jhdf-attribute-table tbody th { 36 | width: 15%; /* acts as min-width */ 37 | } 38 | 39 | .jhdf-attribute-table tbody tr:first-child > th, 40 | .jhdf-attribute-table tbody tr:first-child > td { 41 | padding-top: 1rem; 42 | } 43 | -------------------------------------------------------------------------------- /style/bad.svg: -------------------------------------------------------------------------------- 1 | 2 | 7 | 8 | -------------------------------------------------------------------------------- /style/dataset.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /style/hdf.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /style/hdf_large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyterlab/jupyterlab-hdf5/31ef7b732dbfe98c85d54dfab0f28699a1ab6f62/style/hdf_large.png -------------------------------------------------------------------------------- /style/index.css: -------------------------------------------------------------------------------- 1 | /*----------------------------------------------------------------------------- 2 | | Copyright (c) Jupyter Development Team. 3 | | Distributed under the terms of the Modified BSD License. 4 | |----------------------------------------------------------------------------*/ 5 | @import "./AttributeViewer.css"; 6 | 7 | .jhdf-icon { 8 | background-image: url(hdf.svg); 9 | } 10 | 11 | .jhdf-datasetIcon { 12 | background-image: url(dataset.svg); 13 | } 14 | 15 | .jhdf-sidepanel { 16 | background-color: var(--jp-layout-color1); 17 | height: 100%; 18 | } 19 | 20 | .jhdf-sidepanel .jp-FileBrowser { 21 | flex-grow: 1; 22 | height: 100%; 23 | } 24 | 25 | .jhdf-userInput { 26 | overflow: hidden; 27 | white-space: nowrap; 28 | text-align: center; 29 | font-size: large; 30 | padding: 0px; 31 | background-color: var(--jp-layout-color1); 32 | } 33 | 34 | .jp-FileBrowser-toolbar.jp-Toolbar .jp-Toolbar-item.jhdf-userInput { 35 | flex: 8 8; 36 | } 37 | 38 | .jhdf-userInput-wrapper { 39 | background-color: var(--jp-input-active-background); 40 | border: var(--jp-border-width) solid var(--jp-border-color2); 41 | height: 30px; 42 | width: 100%; 43 | padding: 0 0 0 8px; 44 | margin: 0 4px 0 0; 45 | } 46 | 47 | .jhdf-userInput-wrapper:focus-within { 48 | border: var(--jp-border-width) solid var(--md-blue-500); 49 | box-shadow: inset 0 0 4px var(--md-blue-300); 50 | } 51 | 52 | .jhdf-userInput-wrapper input { 53 | background: transparent; 54 | float: left; 55 | border: none; 56 | outline: none; 57 | font-size: var(--jp-ui-font-size3); 58 | color: var(--jp-ui-font-color0); 59 | width: 100%; 60 | line-height: 28px; 61 | } 62 | 63 | .jhdf-userInput-wrapper input::placeholder { 64 | color: var(--jp-ui-font-color3); 65 | font-size: var(--jp-ui-font-size1); 66 | text-transform: uppercase; 67 | } 68 | 69 | .jhdf-sidepanel .jp-ToolbarButton.jp-Toolbar-item.jhdf-toolbar-item { 70 | display: block; 71 | } 72 | 73 | .jhdf-sidepanel .jp-ToolbarButton.jp-Toolbar-item { 74 | display: none; 75 | } 76 | 77 | .jhdf-sidepanel .jp-DirListing-headerItem.jp-id-modified { 78 | display: none; 79 | } 80 | 81 | .jhdf-sidepanel .jp-DirListing-itemModified { 82 | display: none; 83 | } 84 | 85 | .jhdf-errorPanel { 86 | position: absolute; 87 | display: flex; 88 | flex-direction: column; 89 | justify-content: center; 90 | align-items: center; 91 | z-index: 10; 92 | left: 0; 93 | top: 0; 94 | width: 100%; 95 | height: 100%; 96 | background: var(--jp-layout-color2); 97 | } 98 | 99 | .jhdf-errorPanel-image { 100 | background-size: 100%; 101 | width: 200px; 102 | height: 165px; 103 | background-image: url(bad.svg); 104 | } 105 | 106 | .jhdf-errorPanel-text { 107 | font-size: var(--jp-ui-font-size3); 108 | color: var(--jp-ui-font-color1); 109 | text-align: center; 110 | padding: 12px; 111 | } 112 | 113 | .jhdf-errorModal-text { 114 | white-space: pre-line; 115 | } 116 | -------------------------------------------------------------------------------- /tsconfig.eslint.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfigbase", 3 | "include": ["src/**/*", ".eslintrc.js", ".lintstagedrc.js"] 4 | } 5 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfigbase", 3 | "compilerOptions": { 4 | // "inlineSources": true, 5 | "outDir": "lib", 6 | "rootDir": "src" 7 | // "sourceRoot": "./@jupyterlab/hdf5/src" 8 | }, 9 | "include": ["src/**/*"] 10 | } 11 | -------------------------------------------------------------------------------- /tsconfigbase.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json.schemastore.org/tsconfig", 3 | "compilerOptions": { 4 | "allowSyntheticDefaultImports": true, 5 | "composite": true, 6 | "declaration": true, 7 | "esModuleInterop": true, 8 | "incremental": true, 9 | "jsx": "react", 10 | "module": "esnext", 11 | "moduleResolution": "node", 12 | "noEmitOnError": true, 13 | "noImplicitAny": true, 14 | "noUnusedLocals": true, 15 | "preserveWatchOutput": true, 16 | "resolveJsonModule": true, 17 | "sourceMap": true, 18 | "strictNullChecks": true, 19 | "target": "es2017", 20 | "types": [] 21 | } 22 | } 23 | --------------------------------------------------------------------------------