├── .babelrc ├── .editorconfig ├── .eslintrc.js ├── .gitignore ├── .gitmodules ├── .npmignore ├── .travis.yml ├── LICENSE ├── README.md ├── bin ├── arctic-data-downloader.js ├── arctic-dataset-list-builder.js ├── arctic-viewer-cli.js ├── bundle.js ├── cinema-2-query-data-model-cli.js ├── cinema │ ├── convert-spec-b.py │ ├── specA.js │ └── specB.js ├── network.js ├── query-data-model-2-cinema-cli.js ├── run-dev.js └── server.js ├── dist ├── 1dc35d25e61d819a9c357074014867ab.ttf ├── 25a32416abee198dd821b0b17a198a8f.eot ├── 401b1af1d2627cc7eaec1708dff00bde.png ├── c8ddf1e5e5bf3682bc7bebf30f394148.woff ├── d7c639084f684d66a1bc66855d193ed8.svg ├── e6cf7c6ec7c2d6f670ae9d762604cb0b.woff2 ├── favicon.ico ├── index.html ├── plotly.min.js ├── sample-data.json ├── three.js └── viewer.js ├── docs ├── .gitignore ├── config.js ├── content │ ├── api │ │ └── index.md │ ├── docs │ │ ├── CompositeFormat.md │ │ ├── DepthSortedCompositeFormat.md │ │ ├── EnsembleFormat.md │ │ ├── ImageBasedFormat.md │ │ ├── ProbeFormat.md │ │ ├── composite-sprite.jpg │ │ ├── contributing.md │ │ ├── electron.md │ │ ├── index.md │ │ ├── probe-sprite.png │ │ ├── tonic-volume-data-format.png │ │ ├── tools.md │ │ └── troubleshooting.md │ ├── icon │ │ ├── favicon-160x160.png │ │ ├── favicon-16x16.png │ │ ├── favicon-196x196.png │ │ ├── favicon-32x32.png │ │ └── favicon-96x96.png │ ├── index.jade │ ├── logo.png │ ├── logo.svg │ └── screenshots │ │ ├── ArcticViewer-Earth.jpg │ │ ├── ArcticViewer-EarthMagicLens.jpg │ │ ├── ArcticViewer-JetPropulsion.jpg │ │ ├── ArcticViewer-ListDataSets.jpg │ │ ├── ArcticViewer-Medical.jpg │ │ ├── ArcticViewer-Prober.jpg │ │ └── ArcticViewer.png ├── data │ └── menu.yml └── tpl │ ├── __en__ │ └── __sidebar__ ├── electron ├── README.md ├── package-lock.json ├── package.json └── src │ ├── aboutPage.js │ ├── icon.icns │ ├── icon.png │ ├── index.html │ ├── index.js │ └── renderer.js ├── lib ├── arctic-viewer.js ├── factory.js ├── types │ ├── CDF.js │ ├── Chart.js │ ├── CompositeImageQueryDataModel.js │ ├── CompositePipeline.js │ ├── DataProber.js │ ├── DepthComposite.js │ ├── FloatImage.js │ ├── Geometry.js │ ├── Histogram2D.js │ ├── Histogram2DPlotly.js │ ├── ImageQueryDataModel.js │ ├── SortedComposite.js │ ├── TimeFloatImage.js │ ├── VTKGeometry.js │ ├── VTKVolume.js │ ├── VTKVolumeSLIC.js │ ├── VTKVolumeSLICDebug.js │ └── index.js └── viewer │ ├── DataSetView.js │ ├── icon.png │ ├── index.js │ └── style.css ├── package-lock.json ├── package.json ├── prettier.config.js ├── scripts └── examples │ ├── paraview │ ├── mpas │ │ ├── earth-contours-sorted-composite.py │ │ └── raw-probe-flat-earth.py │ ├── samples │ │ ├── FloatImage-diskout.py │ │ ├── Geometry-can.py │ │ ├── Geometry-diskout.py │ │ ├── Geometry-multicontour-diskout.py │ │ ├── MPI-sphere.py │ │ ├── VTKGeometry-can.py │ │ ├── camera-cylindrical.py │ │ ├── camera-cylindrical_v2.py │ │ ├── camera-spherical.py │ │ ├── composite-diskout.py │ │ ├── composite-wavelet.py │ │ └── time-management.py │ └── tests │ │ └── extract-scalar.py │ ├── tests │ ├── data-handler.py │ └── off-center-rotation.py │ └── vtk │ ├── medical │ ├── head-ct-volume-step-func.py │ ├── head-ct-volume.py │ ├── head-ct.py │ └── head-mri.py │ ├── mpas │ └── raw-probe-flat-earth.py │ ├── samples │ ├── cone.py │ ├── diskout-volume.py │ ├── multi-spheres-volume.py │ ├── syntax-evolution-volume_v2.py │ ├── syntax-evolution-volume_v3.py │ ├── syntax-evolution-volume_v4.py │ └── wavelet-raw-data-prober.py │ └── tests │ └── convert-stack-to-sorted.py ├── site.config.js └── webpack.config.js /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | presets: [ 3 | 'react', 4 | ['env', { 5 | targets: { 6 | browsers: ['last 2 versions'], 7 | }, 8 | }], 9 | ], 10 | env: { 11 | test: { 12 | plugins: ['istanbul'], 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # editorconfig.org 2 | root = true 3 | 4 | [*] 5 | indent_style = space 6 | indent_size = 2 7 | end_of_line = lf 8 | charset = utf-8 9 | trim_trailing_whitespace = true 10 | insert_final_newline = true 11 | 12 | [*.md] 13 | trim_trailing_whitespace = false 14 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | var prettierConf = require('./prettier.config.js'); 2 | 3 | module.exports = { 4 | extends: ['airbnb', 'prettier'], 5 | rules: { 6 | 'prettier/prettier': ['error', prettierConf], 7 | 8 | // But we want the following 9 | 'no-multi-spaces': ["error", { exceptions: { "ImportDeclaration": true } }], 10 | 'no-param-reassign': ["error", { props: false }], 11 | 'no-unused-vars': ["error", { args: 'none' }], 12 | 'prefer-destructuring': ["error", { VariableDeclarator: { array: false, object: true }, AssignmentExpression: { array: false, object: false } }, { enforceForRenamedProperties: false }], 13 | 'import/no-extraneous-dependencies': 0, // Needed for tests 14 | // 'no-mixed-operators': 'error', // Wish we can put it back with prettier 15 | 16 | // Not for us 17 | 'jsx-a11y/label-has-for': 0, 18 | 'no-console': 0, 19 | 'no-plusplus': 0, 20 | 'import/no-named-as-default': 0, 21 | 'import/no-named-as-default-member': 0, 22 | 'prefer-destructuring': 0, // Can have unwanted side effect 23 | 'react/jsx-filename-extension': 0, 24 | 'jsx-a11y/no-static-element-interactions': 0, 25 | 'jsx-a11y/click-events-have-key-events': 0, 26 | 'jsx-a11y/no-noninteractive-element-interactions': 0, 27 | 28 | // Introduced with new eslint 29 | // and no time to fix them... 30 | // [...] 31 | 'linebreak-style': 0, 32 | 'no-useless-escape': 0, 33 | 'no-nested-ternary': 0, 34 | 'react/forbid-prop-types': 0, 35 | 'react/no-array-index-key': 0, 36 | }, 37 | plugins: [ 38 | 'prettier' 39 | ], 40 | globals: { 41 | ArcticViewer: true, 42 | }, 43 | 'settings': { 44 | 'import/resolver': 'webpack' 45 | }, 46 | env: { 47 | es6: true, 48 | browser: true, 49 | }, 50 | }; 51 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .npmrc 3 | .sass-cache 4 | bundles 5 | node_modules/ 6 | electron/bin 7 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "docs/www"] 2 | path = docs/www 3 | url = https://github.com/Kitware/in-situ-data-viewer.git 4 | branch = gh-pages 5 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | bundles 2 | docs 3 | .travis.yml 4 | .gitmodule 5 | .npmrc 6 | site.config.js 7 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | sudo: false 3 | notifications: 4 | email: 5 | recipients: 6 | - sebastien.jourdain@kitware.com 7 | on_success: change 8 | on_failure: always 9 | cache: 10 | directories: 11 | - node_modules 12 | node_js: 13 | - '8' 14 | script: 15 | - npm run build:release 16 | - git config --global user.name "Travis CI" 17 | - git config --global user.email "sebastien.jourdain@kitware.com" 18 | - export GIT_PUBLISH_URL=https://${GH_TOKEN}@github.com/Kitware/arctic-viewer.git 19 | - if [ "$TRAVIS_BRANCH" == "master" ]; then npm run doc:publish; fi 20 | after_success: 21 | - npm run semantic-release 22 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015, Kitware Inc. 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | * Redistributions of source code must retain the above copyright 7 | notice, this list of conditions and the following disclaimer. 8 | * Redistributions in binary form must reproduce the above copyright 9 | notice, this list of conditions and the following disclaimer in the 10 | documentation and/or other materials provided with the distribution. 11 | * Neither the name of the nor the 12 | names of its contributors may be used to endorse or promote products 13 | derived from this software without specific prior written permission. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY 19 | DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 22 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## [ArcticViewer](http://kitware.github.io/arctic-viewer/) 2 | 3 | [![Build Status](https://travis-ci.org/Kitware/arctic-viewer.svg)](https://travis-ci.org/Kitware/arctic-viewer) 4 | [![Dependency Status](https://david-dm.org/kitware/arctic-viewer.svg)](https://david-dm.org/kitware/arctic-viewer) 5 | [![semantic-release](https://img.shields.io/badge/%20%20%F0%9F%93%A6%F0%9F%9A%80-semantic--release-e10079.svg)](https://github.com/semantic-release/semantic-release) 6 | ![npm-download](https://img.shields.io/npm/dm/arctic-viewer.svg) 7 | ![npm-version-requirement](https://img.shields.io/badge/npm->=3.0.0-brightgreen.svg) 8 | ![node-version-requirement](https://img.shields.io/badge/node->=5.0.0-brightgreen.svg) 9 | 10 | ### Goal ### 11 | 12 | Provide a Data Viewer (ArcticView) based on Web technologies and relying on the 13 | user browser to navigate and explore data generated InSitu or in batch mode. 14 | 15 | ## Installation 16 | 17 | ``` 18 | $ npm install -g arctic-viewer 19 | ``` 20 | 21 | After installing the package you will get one executable **ArcticViewer** with 22 | the following set of options. 23 | 24 | ``` 25 | $ ArcticViewer 26 | 27 | Usage: ArcticViewer [options] 28 | 29 | Options: 30 | 31 | -h, --help output usage information 32 | -V, --version output the version number 33 | -p, --port [3000] Start web server with given port 34 | -d, --data [directory/http] Data directory to serve 35 | -s, --server-only Do not open the web browser 36 | 37 | -o, --output-pattern [path/pattern] Provide a path/pattern for the exported images 38 | 39 | --download-sample-data Choose data to download inside current directory 40 | --download [http://remote-host/data] Download remote data inside current directory 41 | 42 | -M, --magic-lens Enable MagicLens inside client configuration 43 | -S, --single-view Enable SingleView inside client configuration 44 | -R, --recording Enable Recording inside client configuration 45 | -D, --development Enable Development inside client configuration 46 | 47 | ``` 48 | 49 | In order to try it out, you should download some sample datasets 50 | (unless you already have some ;-) and run the data viewer on them. 51 | 52 | Here is an example on how to download some sample data: 53 | 54 | ```sh 55 | $ mkdir sample-data && cd $_ 56 | $ ArcticViewer --download-sample-data 57 | 58 | | Available datasets for download (path: /tmp) 59 | | (1) 40.0 MB - diskout-composite 60 | | (2) 94.2 MB - ensemble 61 | | (3) 292 KB - garfield 62 | | (4) 13.7 MB - head_ct_3_features 63 | | (5) 13.1 MB - head_ct_4_features 64 | | (6) 50.8 MB - hydra-image-fluid-velocity 65 | | (7) 162.3 MB - mpas-composite-earth 66 | | (8) 37.5 MB - mpas-flat-earth-prober 67 | | (9) 552.5 MB - mpas-hd-500-7t 68 | | 69 | | Press Enter to quit or the dataset number to download: 1 70 | | Press Enter to quit or the dataset number to download: 5 71 | | Press Enter to quit or the dataset number to download: 8 72 | | Press Enter to quit or the dataset number to download: 73 | | 74 | | => You will be able to try ArcticViewer with these commands: 75 | | 76 | | $ ArcticViewer -d /tmp/head_ct_4_features 77 | | $ ArcticViewer -d /tmp/diskout-composite 78 | | $ ArcticViewer -d /tmp/mpas-flat-earth-prober 79 | | 80 | | Thank you for trying this out... 81 | 82 | ``` 83 | 84 | Then you can view them using the provided feedback or by running the following 85 | command lines: 86 | 87 | ```sh 88 | $ ArcticViewer -d ./sample-data/mpas-probe-flat-earth 89 | ``` 90 | 91 | This will load a MPAS oceanic simulation data that represent a 3D volume of a 92 | flattened version of the Earth with temperature and salinity information on the oceans. 93 | 94 | From that data you can look at a slice of the data along any axis and move the 95 | slice back and forth using the scroll of your input device. 96 | 97 | If you want to zoom or pan, you will have to scroll+[any modifier key] or drag+[any modifier key]. 98 | 99 | ```sh 100 | $ ArcticViewer -d ./sample-data/hydra-image-fluid-velocity 101 | ``` 102 | 103 | This will load an Hydra CFD simulation data that represent the fluid velocity 104 | using some volume rendering techniques. 105 | 106 | ## Documentation 107 | 108 | See the [documentation](https://kitware.github.io/arctic-viewer) for a 109 | getting started guide, advanced documentation, and API descriptions. 110 | 111 | #### Licensing 112 | 113 | **arctic-viewer** aka ArcticViewer is licensed under [BSD Clause 3](LICENSE). 114 | 115 | #### Getting Involved 116 | 117 | Fork our repository and do great things. At [Kitware](http://www.kitware.com), 118 | we've been contributing to open-source software for 15 years and counting, and 119 | want to make **arctic-viewer** useful to as many people as possible. 120 | -------------------------------------------------------------------------------- /bin/arctic-dataset-list-builder.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | 3 | var fs = require('fs'), 4 | path = require('path'), 5 | du = require('du'); 6 | 7 | function formatFileSize(size) { 8 | var units = [' B', ' KB', ' MB', ' GB', ' TB'], 9 | unitIdx = 0; 10 | 11 | while(size > 1000) { 12 | unitIdx++; 13 | size /= 1000; 14 | } 15 | 16 | // Truncate decimals + add unit 17 | return size.toFixed(1) + units[unitIdx]; 18 | } 19 | 20 | function updateDatasetSize(basepath) { 21 | var indexPath = path.join(basepath, 'index.json'), 22 | originalData = require(indexPath); 23 | du(basepath, function(err, size) { 24 | if(err) { 25 | return console.log('Error computing size of', basepath); 26 | } 27 | 28 | // Make sure we have a metadata section 29 | if(!originalData.metadata) { 30 | originalData.metadata = {}; 31 | } 32 | 33 | // Add size section 34 | originalData.metadata.size = formatFileSize(size); 35 | 36 | // Save to disk 37 | fs.writeFile(indexPath, JSON.stringify(originalData, null, 2)); 38 | }); 39 | } 40 | 41 | function addDataset(listToFill, fullpath, dirName, json) { 42 | var dataset = {}, 43 | metadata = json.metadata || {}; 44 | 45 | // Extract everything 46 | dataset.name = metadata.title || dirName; 47 | dataset.description = metadata.description || 'No description available'; 48 | dataset.size = metadata.size || ''; 49 | dataset.thumbnail = metadata.thumbnail; 50 | dataset.type = json.type; 51 | dataset.path = dirName + '/index.json'; 52 | 53 | if(!metadata.size) { 54 | // Update size for next time 55 | updateDatasetSize(fullpath); 56 | } 57 | 58 | // Find thumbnail if any 59 | if(!dataset.thumbnail) { 60 | ['thumbnail.png', 'thumbnail.jpg'].forEach(function(th) { 61 | var fullImagePath = path.join(fullpath, th); 62 | if (fs.existsSync(fullImagePath)) { 63 | dataset.thumbnail = dirName + '/' + th; 64 | } 65 | }); 66 | } 67 | 68 | listToFill.push(dataset); 69 | } 70 | 71 | function processDirectory(basePath) { 72 | var dataToLoadPath = path.join(basePath, 'index.json'); 73 | if (fs.existsSync(dataToLoadPath)) { 74 | var existingDataset = require(dataToLoadPath); 75 | try { 76 | if(existingDataset.type.indexOf('arctic-viewer-list') === 0) { 77 | // OK 78 | } else { 79 | return; 80 | } 81 | } catch(e) { 82 | return; 83 | } 84 | 85 | } 86 | 87 | var datasets = [], 88 | result = { type: ['arctic-viewer-list'], list: datasets }, 89 | subDirectories = fs.readdirSync(basePath).filter(function(file) { return fs.statSync(path.join(basePath, file)).isDirectory(); }); 90 | 91 | subDirectories.forEach(function(dirName) { 92 | var dsPath = path.join(basePath, dirName), 93 | indexPath = path.join(dsPath, 'index.json'); 94 | if (fs.existsSync(indexPath)) { 95 | addDataset(datasets, dsPath, dirName, require(indexPath)); 96 | } 97 | }); 98 | 99 | 100 | fs.writeFile(path.join(basePath, 'index.json'), JSON.stringify(result, null, 2)); 101 | } 102 | 103 | // Expose method 104 | module.exports = processDirectory; 105 | -------------------------------------------------------------------------------- /bin/arctic-viewer-cli.js: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env node 2 | 3 | /* eslint-disable */ 4 | 5 | require('shelljs/global'); 6 | 7 | var fs = require('fs'), 8 | path = require('path'), 9 | program = require('commander'), 10 | downloader = require('./arctic-data-downloader.js'), 11 | clientConfiguration = {}, 12 | ipList = require('./network'), 13 | pkg = require('../package.json'), 14 | version = /semantically-release/.test(pkg.version) ? 'development version' : pkg.version; 15 | 16 | function handlePort(value) { 17 | if (!isNaN(parseInt(value, 10))) { 18 | return parseInt(value, 10); 19 | } 20 | throw Error('port option requires a number'); 21 | } 22 | 23 | program 24 | .version(version) 25 | .option('-p, --port [3000]', 'Start web server with given port', handlePort, 3000) 26 | .option('-d, --data [directory/http]', 'Data directory to serve') 27 | .option('-s, --server-only', 'Do not open the web browser\n') 28 | 29 | .option('-o, --output-pattern [path/pattern]', 'Provide a path/pattern for the exported images\n', './export/{__}.jpg') 30 | 31 | .option('--download-sample-data', 'Choose data to download inside current directory') 32 | .option('--download [http://remote-host/data]', 'Download remote data inside current directory\n') 33 | 34 | .option('-M, --magic-lens', 'Enable MagicLens inside client configuration') 35 | .option('-S, --single-view', 'Enable SingleView inside client configuration') 36 | .option('-R, --recording', 'Enable Recording inside client configuration') 37 | .option('-D, --development', 'Enable Development inside client configuration') 38 | 39 | .parse(process.argv); 40 | 41 | // Update client configuration: 42 | clientConfiguration.MagicLens = !!program.magicLens; 43 | clientConfiguration.SingleView = !!program.singleView; 44 | clientConfiguration.Recording = !!program.recording; 45 | clientConfiguration.Development = !!program.development; 46 | 47 | if (!process.argv.slice(2).length || !program.help) { 48 | program.outputHelp(); 49 | process.exit(); 50 | } 51 | 52 | if(program.downloadSampleData) { 53 | downloader.downloadSampleData(); 54 | } else if(program.download) { 55 | downloader.downloadData(program.download); 56 | } else { 57 | var dataPath = program.data ? program.data : process.cwd(), 58 | app = require('./server')(dataPath, { clientConfiguration: clientConfiguration, output: program.output }); 59 | 60 | // Start server and listening 61 | app.listen(program.port); 62 | 63 | // Print server information 64 | if(ipList.length === 1) { 65 | console.log("\nArcticViewer\n => Serve " + dataPath + "\n | http://" + ipList[0].ip + ":" + program.port + "/\n"); 66 | } else { 67 | console.log("\nArcticViewer\n => Serve " + dataPath + " on port " + program.port + "\n"); 68 | ipList.forEach(function(l){ 69 | console.log(" ", l.name, "=> http://" + l.ip + ":" + program.port + "/"); 70 | }); 71 | console.log(); 72 | } 73 | 74 | // Open browser if asked 75 | if (!program.serverOnly) { 76 | require('open')('http://localhost:' + program.port); 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /bin/bundle.js: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env node 2 | 3 | /* eslint-disable */ 4 | 5 | require('shelljs/global'); 6 | 7 | var path = require('path'), 8 | tarball = require('tarball-extract'), 9 | nodeDest = path.join(__dirname, '../bundles/nodes'), 10 | bundleRoot = path.join(__dirname, '../bundles'), 11 | packageJSON = require(path.join(__dirname, '../package.json')), 12 | bundleIdx = 0, 13 | baseNodeModulePath = path.join(__dirname, '../node_modules'), 14 | directoryToCopy = [ 15 | path.join(__dirname, '../bin'), 16 | path.join(__dirname, '../dist'), 17 | path.join(__dirname, '../LICENSE'), 18 | ], 19 | nodePaths = [ 20 | { name: 'osx', url: 'https://nodejs.org/dist/v4.1.1/node-v4.1.1-darwin-x64.tar.gz' }, 21 | { name: 'linux_x64', url: 'https://nodejs.org/dist/v4.1.1/node-v4.1.1-linux-x64.tar.gz' }, 22 | { name: 'linux_x86', url: 'https://nodejs.org/dist/v4.1.1/node-v4.1.1-linux-x86.tar.gz' }, 23 | // { name: 'win32', url: [ 'https://nodejs.org/dist/v4.1.1/win-x86/node.exe', 'https://nodejs.org/dist/v4.1.1/win-x86/node.lib'] }, 24 | // { name: 'win64', url: [ 'https://nodejs.org/dist/v4.1.1/win-x64/node.exe', 'https://nodejs.org/dist/v4.1.1/win-x64/node.lib'] } 25 | ], 26 | unixScript = '#!/bin/bash\n' 27 | + 'SOURCE="${BASH_SOURCE[0]}"\n' 28 | + 'while [ -h "$SOURCE" ]; do\n' 29 | + ' DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"\n' 30 | + ' SOURCE="$(readlink "$SOURCE")"\n' 31 | + ' [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"\n' 32 | + 'done\n' 33 | + 'DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"\n' 34 | + '$DIR/bin/node $DIR/bin/arctic-viewer-cli.js $@'; 35 | 36 | // Create download destination 37 | rm('-rf', bundleRoot); 38 | mkdir('-p', nodeDest); 39 | 40 | // Process next bundle 41 | function next() { 42 | if(bundleIdx < nodePaths.length) { 43 | var bundle = nodePaths[bundleIdx++]; 44 | generateBundle(bundle); 45 | } else { 46 | // Clean download 47 | rm('-rf', nodeDest); 48 | } 49 | } 50 | 51 | // Generate a bundle 52 | function generateBundle(nodeConf) { 53 | var filePath = nodeConf.url.split('/').pop(), 54 | bundleDest = path.join(bundleRoot, nodeConf.name); 55 | nodeExecDest = path.join(bundleDest, 'bin'); 56 | 57 | // Copy ArcticViewer into bundle 58 | directoryToCopy.forEach(function(d) { 59 | cp('-r', d, bundleDest); 60 | }) 61 | 62 | // Copy only needed node_modules 63 | var destNodeModules = path.join(bundleDest, 'node_modules'); 64 | mkdir('-p', destNodeModules); 65 | for(var moduleName in packageJSON.dependencies) { 66 | cp('-r', path.join(baseNodeModulePath, moduleName), destNodeModules); 67 | } 68 | 69 | // Create Exec file 70 | var execScript = path.join(bundleDest, 'ArcticViewer'); 71 | unixScript.to(execScript); 72 | chmod('u+x', execScript); 73 | 74 | // Download node and put the binary into bundle 75 | tarball.extractTarballDownload( 76 | nodeConf.url, 77 | path.join(nodeDest, filePath), 78 | nodeDest, 79 | {}, 80 | function(err, result) { 81 | if(!err) { 82 | // copy node exec 83 | var dirName = filePath.split('.tar')[0]; 84 | cp(path.join(nodeDest, dirName, 'bin', 'node'), nodeExecDest); 85 | 86 | // Move to next bundle 87 | next(); 88 | } else { 89 | console.log(' | oups something wrong happened while downloading ' + nodeConf.url); 90 | } 91 | } 92 | ); 93 | } 94 | 95 | // Start bundle process 96 | next(); 97 | -------------------------------------------------------------------------------- /bin/cinema-2-query-data-model-cli.js: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env node 2 | 3 | /* eslint-disable */ 4 | 5 | var fs = require('fs'), 6 | path = require('path'), 7 | usage = 'Usage: Cinema2ArcticViewer /path/to/Cinema/DataSet/directory'; 8 | 9 | // Make sure we have valid argument 10 | if(process.argv.length !== 3) { 11 | console.log(usage); 12 | return; 13 | } 14 | 15 | function ask(message, callback) { 16 | process.stdin.resume(); 17 | process.stdin.setEncoding('utf8'); 18 | process.stdout.write(message); 19 | process.stdin.on('data', function (text) { 20 | if(text[text.length - 1] === '\n') { 21 | process.stdin.pause(); 22 | callback(text.substring(0, text.length - 1)); 23 | } 24 | }); 25 | } 26 | 27 | // Load Tonic descriptor 28 | var cinemaDescriptor = require(process.argv[2] + '/info.json'); 29 | 30 | // Find the possible type mapping 31 | if(cinemaDescriptor.metadata && cinemaDescriptor.metadata.type === 'parametric-image-stack') { 32 | // Spec A 33 | require('./cinema/specA').queryDataModel(cinemaDescriptor, process.argv[2]); 34 | } else if (cinemaDescriptor.associations) { 35 | // Assuming Spec B 36 | console.log('The selected Cinema database seems to be a SpecB. A more complex convertion will be needed.'); 37 | ask('Please provide pvpython path\n => ', function(pvpythonPath){ 38 | ask('Please provide a destination directory\n => ', function(destPath){ 39 | console.log('\nStart converting database into directory', destPath); 40 | require('./cinema/specB').queryDataModel(cinemaDescriptor, process.argv[2], destPath, pvpythonPath); 41 | }); 42 | }); 43 | 44 | } else { 45 | console.log('The following Cinema database can not be converted into ArcticViewer dataset.'); 46 | console.log('=>', cinemaDescriptor.type); 47 | } 48 | -------------------------------------------------------------------------------- /bin/cinema/specA.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs'); 2 | var path = require('path'); 3 | var reservedNames = { 4 | phi: 'Camera_X', 5 | theta: 'Camera_Y', 6 | }; 7 | var queryDataModelBinds = { 8 | theta: { 9 | mouse: { drag: { modifier: 0, coordinate: 1, step: 30, orientation: +1 } }, 10 | }, 11 | phi: { 12 | mouse: { drag: { modifier: 0, coordinate: 0, step: 10, orientation: +1 } }, 13 | }, 14 | }; 15 | 16 | function convertQueryDataModelToCinemaSpecA(queryDataModelMetaData, destinationDirectory) { 17 | var cinemaFormat = { 18 | type: "simple", 19 | version: "1.1", 20 | metadata: { 21 | type: "parametric-image-stack" 22 | }, 23 | name_pattern: queryDataModelMetaData.data[0].pattern, 24 | arguments: {} 25 | } 26 | 27 | // Register each arguments 28 | for(var name in queryDataModelMetaData.arguments) { 29 | var cinemaArg = {}; 30 | 31 | // Add values 32 | cinemaArg.label = name; // Cinema does not support (label != name) 33 | cinemaArg.type = queryDataModelMetaData.arguments[name].ui ? (queryDataModelMetaData.arguments[name].ui === 'slider' ? 'range' : 'list') : 'list'; 34 | cinemaArg.values = queryDataModelMetaData.arguments[name].values; 35 | cinemaArg.default = cinemaArg.values[queryDataModelMetaData.arguments[name].default || 0]; 36 | 37 | if(reservedNames[name]) { 38 | cinemaFormat.name_pattern = cinemaFormat.name_pattern.replace('{'+name+'}','{' + reservedNames[name] + '}'); 39 | name = cinemaArg.label = reservedNames[name]; 40 | } 41 | 42 | cinemaFormat.arguments[name] = cinemaArg; 43 | } 44 | 45 | // Write into info.json 46 | var outputFilename = path.join(destinationDirectory, 'info.json'); 47 | fs.writeFile(outputFilename, JSON.stringify(cinemaFormat, null, 2), function(err) { 48 | if(err) { 49 | console.log(err); 50 | } else { 51 | console.log("Dataset converted and Cinema Store saved to " + outputFilename); 52 | } 53 | }); 54 | } 55 | 56 | function convertCinemaArgToQueryDataModel(argName, cinemaArg) { 57 | var queryDataModelArg = {}; 58 | 59 | // Fill data if needed 60 | queryDataModelArg.values = cinemaArg.values; 61 | if(cinemaArg.values.indexOf(cinemaArg.default) > 0) { 62 | queryDataModelArg.default = cinemaArg.values.indexOf(cinemaArg.default); 63 | } 64 | if(cinemaArg.type === 'range') { 65 | queryDataModelArg.ui = 'slider'; 66 | } 67 | if(cinemaArg.label !== argName) { 68 | queryDataModelArg.label = cinemaArg.label; 69 | } 70 | 71 | // Add default binding 72 | if(queryDataModelBinds[argName]) { 73 | queryDataModelArg.bind = queryDataModelBinds[argName]; 74 | } 75 | 76 | // Add default looping 77 | if(argName === 'phi') { 78 | queryDataModelArg.loop = 'modulo'; 79 | } 80 | 81 | return queryDataModelArg; 82 | } 83 | 84 | function convertCinemaSpecAToQueryDataModel(cinemaMetadata, destinationDirectory) { 85 | var queryDataModelFormat = { 86 | type: [ 'tonic-query-data-model' ], 87 | arguments_order: [], 88 | arguments: {}, 89 | data: [{ name: "image", type: "blob", mimeType: "image/", pattern: cinemaMetadata.name_pattern }], 90 | metadata: {} 91 | }; 92 | 93 | // Extract mime type 94 | var patternList = cinemaMetadata.name_pattern.split('.'); 95 | queryDataModelFormat.data[0].mimeType += patternList[patternList.length - 1]; 96 | 97 | // Process arguments 98 | for(var argName in cinemaMetadata.arguments) { 99 | var cinemaArg = cinemaMetadata.arguments[argName], 100 | queryDataModelArg = convertCinemaArgToQueryDataModel(argName, cinemaArg); 101 | 102 | if(queryDataModelArg.values.length > 1) { 103 | queryDataModelFormat.arguments_order.push(argName); 104 | } 105 | queryDataModelFormat.arguments[argName] = queryDataModelArg; 106 | } 107 | 108 | // Process metadata 109 | for(var metaKey in cinemaMetadata.metadata) { 110 | if(metaKey !== 'type') { 111 | queryDataModelFormat.metadata[metaKey] = cinemaMetadata.metadata[metaKey]; 112 | } 113 | } 114 | 115 | // Write into cinema.json 116 | var outputFilename = path.join(destinationDirectory, 'index.json'); 117 | fs.writeFile(outputFilename, JSON.stringify(queryDataModelFormat, null, 2), function(err) { 118 | if(err) { 119 | console.log(err); 120 | } else { 121 | console.log("Dataset converted from Cinema Store Spec A to ArcticViewer dataset: " + outputFilename); 122 | } 123 | }); 124 | } 125 | 126 | module.exports = { 127 | cinema: convertQueryDataModelToCinemaSpecA, 128 | queryDataModel: convertCinemaSpecAToQueryDataModel, 129 | queryDataModelArg: convertCinemaArgToQueryDataModel 130 | }; 131 | -------------------------------------------------------------------------------- /bin/network.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | 3 | var os = require('os'), 4 | ifaces = os.networkInterfaces(), 5 | networkInterfaces = []; 6 | 7 | Object.keys(ifaces).forEach(function (ifname) { 8 | var alias = 0; 9 | 10 | ifaces[ifname].forEach(function (iface) { 11 | if ('IPv4' !== iface.family || iface.internal !== false) { 12 | // skip over internal (i.e. 127.0.0.1) and non-ipv4 addresses 13 | return; 14 | } 15 | 16 | if (alias >= 1) { 17 | // this single interface has multiple ipv4 addresses 18 | networkInterfaces.push({ name: ifname + ':' + alias , ip: iface.address }); 19 | } else { 20 | // this interface has only one ipv4 adress 21 | networkInterfaces.push({name: ifname , ip: iface.address}); 22 | } 23 | ++alias; 24 | }); 25 | }); 26 | 27 | module.exports = networkInterfaces; 28 | -------------------------------------------------------------------------------- /bin/query-data-model-2-cinema-cli.js: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env node 2 | 3 | /* eslint-disable */ 4 | 5 | var fs = require('fs'), 6 | path = require('path'), 7 | usage = 'Usage: ArcticViewer2Cinema /path/to/Arctic/DataSet/directory'; 8 | 9 | // Make sure we have valid argument 10 | if(process.argv.length !== 3) { 11 | console.log(usage); 12 | return; 13 | } 14 | 15 | // Load Tonic descriptor 16 | var queryDataModelDescriptor = require(process.argv[2] + '/index.json'); 17 | 18 | // Find the possible type mapping 19 | if(queryDataModelDescriptor.type.length === 1 && queryDataModelDescriptor.type[0] === 'tonic-query-data-model') { 20 | // Spec A 21 | require('./cinema/specA').cinema(queryDataModelDescriptor, process.argv[2]); 22 | } else { 23 | console.log('The following ArcticViewer dataset can not be converted into Cinema database.'); 24 | console.log('=>', queryDataModelDescriptor.type); 25 | } 26 | -------------------------------------------------------------------------------- /bin/run-dev.js: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env node 2 | 3 | /* eslint-disable */ 4 | 5 | require('shelljs/global'); 6 | 7 | var path = require('path'), 8 | tonicPath = path.join(__dirname, '../../..'), 9 | arcticNodeModulesPath = path.join(__dirname, '../node_modules'), 10 | tonicList = require(path.join(tonicPath, 'bin', 'tonic-suite.json')); 11 | 12 | // Index components 13 | tonicList.forEach(function(tonicItem) { 14 | // Only with tonic-components 15 | if(tonicItem.path == 'tonic-components') { 16 | // Remove Arctic tonic libs 17 | rm('-rf', path.join(arcticNodeModulesPath, tonicItem.name, 'lib')); 18 | 19 | // Copy dev lib 20 | cp('-r', path.join(tonicPath, tonicItem.path, tonicItem.name, 'lib'), path.join(arcticNodeModulesPath, tonicItem.name)); 21 | } 22 | }); 23 | 24 | // Build new viewer 25 | exec('npm run build'); 26 | 27 | // Run local code base 28 | var cmd = [ path.join(__dirname, 'arctic-viewer-cli.js'), '-s', '-d' ]; 29 | for (var i = 2; i < process.argv.length; i++) { 30 | cmd.push(process.argv[i]); 31 | } 32 | exec(cmd.join(' ')); 33 | 34 | -------------------------------------------------------------------------------- /bin/server.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | 3 | /* global mkdir */ 4 | require('shelljs/global'); 5 | 6 | var path = require('path'), 7 | fs = require('fs'), 8 | express = require('express'), 9 | bodyParser = require('body-parser'), 10 | gzipStatic = require('connect-gzip-static'), 11 | httpProxy = require('http-proxy'), 12 | preCheckDataDir = require('./arctic-dataset-list-builder'), 13 | tenSeconds = 10000; 14 | 15 | function getExportPath(result, args) { 16 | var keyPattern = ['{', '}']; 17 | 18 | for (var opt in args) { 19 | result = result.replace(keyPattern.join(opt), args[opt]); 20 | } 21 | 22 | // Create directory if need be 23 | mkdir('-p', path.dirname(result)); 24 | 25 | return result; 26 | } 27 | 28 | function removeHead(rawString, keyword) { 29 | var cutIdx = rawString.indexOf(keyword); 30 | 31 | if (cutIdx === -1) { 32 | return null; 33 | } 34 | 35 | // Need to shift idx and cut string 36 | cutIdx += keyword.length; 37 | return rawString.slice(cutIdx); 38 | } 39 | 40 | function extractImageBase64(rawString) { 41 | return removeHead(rawString, 'base64,'); 42 | } 43 | 44 | module.exports = function(dataPath, config) { 45 | // for electron 46 | var clientConfiguration = { 47 | MagicLens: false, 48 | SingleView: false, 49 | Recording: false, 50 | Development: false, 51 | }, 52 | programOutput = './export/{__}.jpg'; 53 | 54 | if (config.clientConfiguration) { 55 | for (var key in config.clientConfiguration) { 56 | clientConfiguration[key] = config.clientConfiguration[key]; 57 | } 58 | } 59 | 60 | if (config.output) { 61 | programOutput = config.output; 62 | } 63 | 64 | var needProxy = (dataPath.indexOf('http') === 0); 65 | 66 | // Handle relative path 67 | if (dataPath[0] === '.') { 68 | dataPath = path.normalize(path.join(process.cwd(), dataPath)); 69 | } 70 | 71 | // Build request handling 72 | var app = express(); 73 | // - static HTML + JS 74 | app.use(express.static(__dirname + "/../dist")); 75 | 76 | // For each route and use Express adds a Layer type to the router stack 77 | // here we remove it so that we can replace it later. 78 | // Layer { handle: fn, name: fn.name || , params: {}, path: urlPath, 79 | // keys: [], regexp: path regexp, route: Route object } 80 | app.removeLayer = function(urlPath) { 81 | var layerIndex = -1; 82 | for (var i = 0; i < app._router.stack.length; i++) { 83 | if (app._router.stack[i].path === urlPath) { 84 | layerIndex = i; 85 | break; 86 | } 87 | } 88 | 89 | if (layerIndex === -1) { 90 | // console.log(`Router layer for '${urlPath}' not found`); 91 | return; 92 | } 93 | 94 | app._router.stack.splice(layerIndex, 1); 95 | }; 96 | 97 | app.updateDataPath = function (newDataPath, callback) { 98 | app.removeLayer('/data'); 99 | app.dataPath = newDataPath; 100 | // - Handle data 101 | if (needProxy) { 102 | // Need to proxy the data directory 103 | var proxy = httpProxy.createProxyServer({}); 104 | app.use('/data', function data(req, res) { 105 | proxy.web(req, res, { 106 | target: app.dataPath, 107 | changeOrigin: true, 108 | }); 109 | }); 110 | } else { 111 | // Handle the case we provide a file instead of directory 112 | if (!fs.statSync(app.dataPath).isDirectory()) { 113 | app.dataPath = path.dirname(app.dataPath); 114 | } 115 | 116 | // Build Dataset list if need be 117 | preCheckDataDir(app.dataPath); 118 | 119 | // Serve data from static content 120 | app.use('/data', gzipStatic(app.dataPath, { maxAge: tenSeconds })); 121 | } 122 | if (callback && typeof callback === 'function') { 123 | callback(); 124 | } 125 | }; 126 | 127 | app.getClientConfiguration = function() { 128 | return clientConfiguration; 129 | }; 130 | 131 | app.updateClientConfiguration = function(newConfig) { 132 | clientConfiguration = Object.assign(clientConfiguration, newConfig); 133 | }; 134 | 135 | app.updateDataPath(dataPath); 136 | 137 | // Add image export handler 138 | app.use(bodyParser.json({limit: 10000000})); 139 | app.post('/export', function(req, res) { 140 | var data = req.body, 141 | args = data.arguments, 142 | base64Data = extractImageBase64(data.image); 143 | 144 | if (base64Data) { 145 | fs.writeFile(getExportPath(programOutput, args), base64Data, 'base64', function(err) { 146 | }); 147 | } else { 148 | // We should get the URL of image and copy it with a different name 149 | } 150 | 151 | res.send('Data saved'); 152 | }); 153 | 154 | // Add metadata update 155 | app.post('/update', function(req, res) { 156 | var data = req.body, 157 | title = data.title.replace(/
/g, '').replace(//g, ''), 158 | description = data.description.replace(/
/g, '').replace(//g, ''), 159 | dsPath = path.join(app.dataPath, removeHead(data.path, '/data/')), 160 | imagePath = data.image, 161 | base64Data = extractImageBase64(data.image); 162 | 163 | // Create thumbnail 164 | if (base64Data) { 165 | // Write thumbnail as base64 166 | var thumbnailPath = path.join(dsPath, 'thumbnail.png'); 167 | fs.writeFile(thumbnailPath, base64Data, 'base64', function(err) {}); 168 | } else { 169 | // Copy image 170 | console.log('Should copy image: ', imagePath); 171 | } 172 | 173 | // Update index.json 174 | var indexPath = path.join(dsPath, 'index.json'), 175 | originalData = require(indexPath); 176 | 177 | if (!originalData.metadata) { 178 | originalData.metadata = {}; 179 | } 180 | 181 | originalData.metadata.title = title; 182 | originalData.metadata.description = description; 183 | fs.writeFile(indexPath, JSON.stringify(originalData, null, 2)); 184 | 185 | res.send('Data updated'); 186 | }); 187 | 188 | // Add config.json endpoint 189 | app.get('/config.json', function(req, res) { 190 | res.send(JSON.stringify(clientConfiguration, null, 2)); 191 | }); 192 | 193 | return app; 194 | }; 195 | -------------------------------------------------------------------------------- /dist/1dc35d25e61d819a9c357074014867ab.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/dist/1dc35d25e61d819a9c357074014867ab.ttf -------------------------------------------------------------------------------- /dist/25a32416abee198dd821b0b17a198a8f.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/dist/25a32416abee198dd821b0b17a198a8f.eot -------------------------------------------------------------------------------- /dist/401b1af1d2627cc7eaec1708dff00bde.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/dist/401b1af1d2627cc7eaec1708dff00bde.png -------------------------------------------------------------------------------- /dist/c8ddf1e5e5bf3682bc7bebf30f394148.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/dist/c8ddf1e5e5bf3682bc7bebf30f394148.woff -------------------------------------------------------------------------------- /dist/e6cf7c6ec7c2d6f670ae9d762604cb0b.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/dist/e6cf7c6ec7c2d6f670ae9d762604cb0b.woff2 -------------------------------------------------------------------------------- /dist/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/dist/favicon.ico -------------------------------------------------------------------------------- /dist/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
10 | 11 | 12 | 13 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /dist/sample-data.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "title": "Diskout Ref - Composite", 4 | "filesize":"40 MB", 5 | "thumbnail": "http://tonic.kitware.com/arctic-viewer/diskout-composite.png", 6 | "url": "http://tonic.kitware.com/arctic-viewer/diskout-composite.tgz" 7 | },{ 8 | "title": "Ensemble - Demo", 9 | "filesize":"94.2 MB", 10 | "thumbnail": "http://tonic.kitware.com/arctic-viewer/ensemble.jpg", 11 | "url": "http://tonic.kitware.com/arctic-viewer/ensemble.tgz" 12 | },{ 13 | "title": "Head CT - 3 features composite", 14 | "filesize":"14 MB", 15 | "thumbnail": "http://tonic.kitware.com/arctic-viewer/head_ct_3_features.png", 16 | "url": "http://tonic.kitware.com/arctic-viewer/head_ct_3_features.tgz" 17 | },{ 18 | "title": "Head CT - 4 features composite", 19 | "filesize":"13.1 MB", 20 | "thumbnail": "http://tonic.kitware.com/arctic-viewer/head_ct_4_features.png", 21 | "url": "http://tonic.kitware.com/arctic-viewer/head_ct_4_features.tgz" 22 | },{ 23 | "title": "Hydra - Fluid Velocity", 24 | "filesize":"50.8 MB", 25 | "thumbnail": "http://tonic.kitware.com/arctic-viewer/hydra-image-fluid-velocity.jpg", 26 | "url": "http://tonic.kitware.com/arctic-viewer/hydra-image-fluid-velocity.tgz" 27 | },{ 28 | "title": "MPAS - Composite Temperature contours", 29 | "filesize":"162.3 MB", 30 | "thumbnail": "http://tonic.kitware.com/arctic-viewer/mpas-composite-earth.png", 31 | "url": "http://tonic.kitware.com/arctic-viewer/mpas-composite-earth.tgz" 32 | },{ 33 | "title": "MPAS - Flat earth prober", 34 | "filesize":"37.5 MB", 35 | "thumbnail": "http://tonic.kitware.com/arctic-viewer/mpas-flat-earth-prober.png", 36 | "url": "http://tonic.kitware.com/arctic-viewer/mpas-flat-earth-prober.tgz" 37 | },{ 38 | "title": "MPAS - High contours resolution", 39 | "filesize":"583.7 MB", 40 | "thumbnail": "http://tonic.kitware.com/arctic-viewer/mpas-hd-500-7t.png", 41 | "url": "http://tonic.kitware.com/arctic-viewer/mpas-hd-500-7t.tgz" 42 | },{ 43 | "title": "Diskout geometry data", 44 | "filesize":"792 KB", 45 | "thumbnail": "http://tonic.kitware.com/arctic-viewer/Geometry-diskout-multi-contour.jpg", 46 | "url": "http://tonic.kitware.com/arctic-viewer/Geometry-diskout-multi-contour.tgz" 47 | } 48 | ] 49 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | build-tmp 2 | -------------------------------------------------------------------------------- /docs/config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | cname: 'kitware.github.io', 3 | baseUrl: '/arctic-viewer', 4 | work: './build-tmp', 5 | examples: [], 6 | config: { 7 | title: 'ArcticViewer', 8 | description: '"Visualize your data in the Web"', 9 | subtitle: '"Enable scientific visualization to the Web."', 10 | author: 'Kitware Inc.', 11 | timezone: 'UTC', 12 | url: 'https://kitware.github.io/arctic-viewer', 13 | root: '/arctic-viewer/', 14 | github: 'kitware/arctic-viewer', 15 | google_analytics: 'UA-90338862-4', 16 | }, 17 | copy: [], 18 | }; 19 | -------------------------------------------------------------------------------- /docs/content/api/index.md: -------------------------------------------------------------------------------- 1 | title: API 2 | --- 3 | 4 | This is an application and therefore we won't document its API. 5 | -------------------------------------------------------------------------------- /docs/content/docs/CompositeFormat.md: -------------------------------------------------------------------------------- 1 | title: Composite 2 | --- 3 | 4 | # Introduction 5 | 6 | The ParaView ArcticViewer is able to load several type of datasets, but this guide will focus on the basic composite one and will explain the requirements so you can create your own dataset. 7 | 8 | # Dataset structure 9 | 10 | ParaView ArcticViewer expects a dataset descriptor that will formalize that convention in a way it can be understood by the application. The application expects a file named __index.json__ at the root of the tree structure (if any) with a content similar to the following one. 11 | 12 | ```js 13 | { 14 | "type": [ "tonic-query-data-model", "composite-pipeline" ], 15 | "arguments_order": ["time"], 16 | "arguments": { 17 | "time": { 18 | "values": [ "0", "1", "2", "3", "4", "5" ] 19 | } 20 | }, 21 | "data": [ 22 | { 23 | "name": "sprite", 24 | "type": "blob", 25 | "mimeType": "image/jpg", 26 | "pattern": "{time}/rgb.jpg" 27 | },{ 28 | "name": "composite", 29 | "type": "json", 30 | "pattern": "{time}/composite.json" 31 | }, 32 | ], 33 | "CompositePipeline": { 34 | "layers": ["A", "B", "C"], 35 | "dimensions": [ 500, 500 ], 36 | "fields": { 37 | "A": "salinity", 38 | "B": "temperature", 39 | "C": "bottomDepth" 40 | }, 41 | "layer_fields": { 42 | "A": [ "C" ], 43 | "B": [ "B", "A" ], 44 | "C": [ "B", "A" ] 45 | }, 46 | "offset": { 47 | "AC": 1, 48 | "BA": 3, 49 | "BB": 2, 50 | "CA": 5, 51 | "CB": 4 52 | }, 53 | "pipeline": [ 54 | { 55 | "ids": [ "A" ], 56 | "name": "Earth core", 57 | },{ 58 | "ids": [ "B", "C" ], 59 | "name": "Contour by temperature", 60 | "children": [ 61 | { 62 | "ids": [ "B" ], 63 | "name": "t=25.0", 64 | "type": "layer" 65 | },{ 66 | "ids": [ "C" ], 67 | "name": "t=20.0", 68 | "type": "layer" 69 | } 70 | ], 71 | } 72 | ] 73 | } 74 | } 75 | ``` 76 | 77 | In that meta description, we find the pipeline tree described but also we have the mapping of the image sprite with the layer and its "ColorBy". The first character is the layer and the second one is the actual field used for the ColorBy. 78 | 79 | Then for each query (view position, time, configuration, ...), the dataset is composed of an __image sprite__ and a __composite.json__ file. 80 | 81 | The image sprite represents each layer independently with each of its ColorBy settings. 82 | 83 | The __composite.json__ file, on the other hand, provides the pixel ordering for each of the possible layers with a content similar to the following one: 84 | 85 | ```js 86 | { 87 | "pixel-order": "@2258+A+CBA+CA+CB+..." 88 | } 89 | ``` 90 | 91 | The __pixel-order__ string describes the layer order for each pixel, starting from the top left corner of the image. 92 | 93 | The encoding can be described as follows: 94 | 95 | - __@456__ : Skip 456 pixels as no layers are contributing to those pixels. 96 | - __CBA__: For the given pixel, the layer _C_ is on top of _B_ which is on top of _A_. This implies if _C_ is hidden, for example, then the pixel of layer _B_ should be used. 97 | - __+__ : Character delimiter between pixel or pixel group. 98 | 99 | # Image Sprite 100 | 101 | Image Sprite of each layer 102 | -------------------------------------------------------------------------------- /docs/content/docs/DepthSortedCompositeFormat.md: -------------------------------------------------------------------------------- 1 | title: Depth-Sorted Composite 2 | --- 3 | 4 | # Introduction 5 | 6 | This guide will focus on the depth-sorted composite datatype and will explain the structure and requirements for this datatype so you can create your own datasets. 7 | 8 | The name depth-sorted composite comes from the fact that these datasets have been processed so that the images can be drawn directly and immediately in back-to-front order. 9 | 10 | Additionally, we need an opacity or alpha for each pixel in each layer. If we also have an intensity for each pixel in each layer, we can make use of that to do additional interesting things with the images produced by ParaView ArcticViewer. 11 | 12 | # Dataset structure 13 | 14 | ParaView ArcticViewer expects any data to be accompanied by a dataset descriptor that formalizes the data convention so it can be understood by the application. The application expects to find a file named __index.json__ at the root of the tree structure (if any), and for the depth-sorted composite type of dataset, the file should be similar to the following example: 15 | 16 | ```js 17 | { 18 | "SortedComposite": { 19 | "layers": 5, 20 | "scalars": [ 21 | 511.875, 22 | 1535.625, 23 | 2559.375, 24 | 3583.125, 25 | 4596.875 26 | ], 27 | "dimensions": [ 28 | 3, 29 | 3 30 | ] 31 | }, 32 | "type": [ 33 | "tonic-query-data-model", 34 | "sorted-composite", 35 | "alpha" 36 | ], 37 | "arguments": { 38 | "theta": { 39 | "default": 2, 40 | "bind": { 41 | "mouse": { 42 | "drag": { 43 | "coordinate": 1, 44 | "step": 30, 45 | "modifier": 0, 46 | "orientation": 1 47 | } 48 | } 49 | }, 50 | "ui": "slider", 51 | "name": "theta", 52 | "values": [ 53 | "30", 54 | "60", 55 | "90", 56 | "120", 57 | "150" 58 | ] 59 | }, 60 | "phi": { 61 | "bind": { 62 | "mouse": { 63 | "drag": { 64 | "coordinate": 0, 65 | "step": 30, 66 | "modifier": 0, 67 | "orientation": 1 68 | } 69 | } 70 | }, 71 | "ui": "slider", 72 | "values": [ 73 | "0", 74 | "30", 75 | "60", 76 | "90", 77 | "120", 78 | "150", 79 | "180", 80 | "210", 81 | "240", 82 | "270", 83 | "300", 84 | "330" 85 | ], 86 | "name": "phi", 87 | "loop": "modulo" 88 | } 89 | }, 90 | "data": [ 91 | { 92 | "pattern": "{theta}_{phi}/intensity.uint8", 93 | "type": "array", 94 | "name": "intensity", 95 | "categories": [ 96 | "intensity" 97 | ], 98 | "metadata": {} 99 | }, 100 | { 101 | "pattern": "{theta}_{phi}/alpha.uint8", 102 | "type": "array", 103 | "name": "alpha", 104 | "metadata": {} 105 | }, 106 | { 107 | "pattern": "{theta}_{phi}/order.uint8", 108 | "type": "array", 109 | "name": "order", 110 | "metadata": {} 111 | } 112 | ], 113 | "arguments_order": [ 114 | "phi", 115 | "theta" 116 | ], 117 | "metadata": { 118 | "backgroundColor": "rgb(0, 0, 0)" 119 | } 120 | } 121 | ``` 122 | 123 | Let us begin by noting some important details from this descriptor file. First, the image dimensions are described by the `SortedComposite` -> `dimensions` attribute. In this case, the images we will compose are 3px by 3px. Next, note the the number of "layers" we will be compositing is `5` in this case. Looking in the `data` section we can see there are three components to the data for building up a final composite image: `order.uint8`, `alpha.uint8` and `intensity.uint8`. These data components are described in a bit more detail below. 124 | 125 | Example data layout for 3x3 images with 5 layers 126 | 127 | The image above shows an example of the data layout for two of the three components (intensity is exactly the same, and thus not shown above) in the case where the layer images are 3x3 pixels, and there are 5 layers (features) total. Each data component is simply a 1D array of 8-bit pixels, where the length of the array is found by multiplying image width by image height by the number of layers. Refer to this depiction when reading about the different data components below. 128 | 129 | ## `order` 130 | 131 | The data in the order file consists of 8 bits per pixel, where the value in a pixel is just the original layer index of that pixel. The letters `a` through `e` in the above illustration correspond to depth-sorted layers where the pixels in `e` are furthest from the camera, `d` are the next-furthest, and so on, until `a` are the closest to the camera. The actual value in each pixel within the sprite is a single byte indicating which feature layer contains the `alpha` and `intensity` for the pixel. 132 | 133 | By starting with the pixels marked with an `e` and proceeding backwards through the alphabet to pixels marked with an `a`, we can quickly do back-to-front rendering (using the traditional over-operator) which will produce a correctly alpha-blended image. 134 | 135 | ## `alpha` 136 | 137 | The data in the `alpha` file consists of 8 bits per pixel, each pixel giving an opacity value of a pixel in an original layer or feature. 138 | 139 | ## `intensity` 140 | 141 | The data in the `intensity` file is completely analogous to the data found in the `alpha` file. The difference is that the intensities give a measure of the "brightness" of the pixel, rather than the transparency. 142 | -------------------------------------------------------------------------------- /docs/content/docs/EnsembleFormat.md: -------------------------------------------------------------------------------- 1 | title: Ensemble 2 | --- 3 | 4 | # Introduction 5 | 6 | The ParaView ArcticViewer is able to load several type of datasets, but this guide will focus on the ensemble one and will explain the requirements for it so you can create your own datasets. 7 | 8 | # Dataset structure 9 | 10 | ParaView ArcticViewer expects a dataset descriptor that formalizes the data convention so it can be understood by the application. The application expects a file named __index.json__ at the root of the tree structure (if any) with a content similar to the following one: 11 | 12 | ```js 13 | { 14 | "type": [ 15 | "ensemble-dataset" 16 | ], 17 | "Ensemble": { 18 | "datasets": [ 19 | { 20 | "name": "Velocity", 21 | "data": "hydra-image-fluid-velocity/index.json" 22 | },{ 23 | "name": "Velocity2", 24 | "data": "hydra-image-fluid-velocity/index.json" 25 | },{ 26 | "name": "VelocityFree", 27 | "data": "hydra-image-fluid-velocity/index.json" 28 | },{ 29 | "name": "Earth", 30 | "data": "mpas-probe-flat-earth/index.json" 31 | },{ 32 | "name": "Earth2", 33 | "data": "mpas-probe-flat-earth/index.json" 34 | },{ 35 | "name": "EarthFree", 36 | "data": "mpas-probe-flat-earth/index.json" 37 | } 38 | ], 39 | "binding": [ 40 | { 41 | "datasets": [ "Velocity", "Velocity2" ], 42 | "arguments": [ "phi", "theta", "time" ] 43 | },{ 44 | "datasets": [ "Earth", "Earth2" ], 45 | "arguments": [ "time" ], 46 | "other": [ 47 | { 48 | "listener": "onProbeChange", 49 | "setter": "setProbe" 50 | },{ 51 | "listener": "onRenderMethodChange", 52 | "setter": "setRenderMethod" 53 | },{ 54 | "listener": "onCrosshairVisibilityChange", 55 | "setter": "setCrossHairEnable" 56 | } 57 | ] 58 | } 59 | ], 60 | "operators": [ 61 | { "name": "Velocity diff", "datasets": [ "Velocity", "Velocity2", "VelocityFree" ], "operation": "Velocity - VelocityFree" }, 62 | { "name": "Earth diff", "datasets": [ "Earth", "Earth2", "EarthFree" ], "operation": "Earth - EarthFree" } 63 | ] 64 | }, 65 | "metadata": { 66 | "title": "Ensemble demo" 67 | } 68 | } 69 | ``` 70 | 71 | In the above meta description, we find the list of datasets that compose the ensemble, along with the relationship that should exist between them. Like synchronizing the __time__ across several dataset or any particular argument or parameter. We can also see above and example of the kind of binding which needs to happen directly on the ImageBuilder instance. 72 | 73 | The path provided for the datasets is relative to the initial index.json file. 74 | 75 | Then we can add some Operator views of those dataset. These operators will allow you to apply pixel operations between the set of datasets you've listed in them. 76 | -------------------------------------------------------------------------------- /docs/content/docs/ImageBasedFormat.md: -------------------------------------------------------------------------------- 1 | title: Image Based 2 | --- 3 | 4 | # Introduction 5 | 6 | The ParaView ArcticViewer is able to load several type of datasets, but this guide will focus on the most basic one (image based) and will explain the requirements so you can create your own dataset. 7 | 8 | # Dataset structure 9 | 10 | The data must be a set of images (JPG, PNG...) defined by a N dimensional parameter set. 11 | 12 | For example a movie is considered a 1D dataset as the only variable is time. But imagine you have a set of pictures/images that are dependent on a point of view and the time, then you have a 2D one. By now, you should be able to understand the concept. 13 | 14 | So assuming you have a large set of images, they need to have a structure and/or convention on their naming relative to those parameters. 15 | 16 | In order to make those images available to ParaView ArcticViewer, you will need to create a dataset descriptor that will formalize that convention so it can be understood by the application. The application expects a file named __info.json__ at the root of the tree structure (if any) with a content similar to the following one: 17 | 18 | ```js 19 | { 20 | "type": [ "tonic-query-data-model" ], 21 | "arguments_order": ["time"], 22 | "arguments": { 23 | "time": { 24 | "values": [ "0", "1", "2", "3", "4", "5" ] 25 | } 26 | }, 27 | "data": [ 28 | { 29 | "name": "image", 30 | "type": "blob", 31 | "mimeType": "image/jpg", 32 | "pattern": "sky_{time}.jpg" 33 | } 34 | ] 35 | } 36 | ``` 37 | 38 | The data descriptor has 4 main sections explained below. 39 | 40 | ## type 41 | 42 | This expresses the type of the dataset and what kind of processing could be needed for rendering the data. In our case, we don't need any specific processing, just the fact that we will rely on a "tonic-query-data-model" to handle the data. 43 | 44 | ## arguments_order 45 | 46 | This list of String is used by the graphical user interface to create a set of controls over the specified parameters and specify in which order they should appear. 47 | 48 | ## arguments 49 | 50 | This section defines each dimension that you want to specify with a name and a set of possible values. 51 | 52 | This can be extended by the following set of optional properties: 53 | 54 | - __label__: Specify a different name than the one used as the key. 55 | - __loop__: Specify if you want that parameter to loop either in __modulo__ or __reverse__ mode. 56 | - __ui__: Specify the type of UI that should be used such as a __slider__ or as a __list__ (the default value). 57 | - __bind__: Specify what type of event you want to bind this parameter to. 58 | 59 | __bind__ 60 | 61 | You can bind a parameter to a mouse action as follows: 62 | 63 | ```js 64 | "mouse" : { 65 | "drag" : { // Type of the mouse event 66 | "modifier": 0, // Which modifier NONE | ALT | SHIFT | META | CTR 67 | "coordinate": 0, // Which coordinate [x,y]. 0 for x and 1 for y. 68 | "step": 10, // How much drag on x or y is needed to switch to the next value of the parameter 69 | "orientation": 1 // Which way to go based on the delta. (1 or -1) 70 | }, 71 | } 72 | ``` 73 | 74 | ## data 75 | 76 | This section lists the set of data that should be retrieved for a given set of parameters. For the Image Based Format, we just need a single entry with a name __"image"__ and the appropriate pattern representing the path of the image relative to the __info.json__ file and where each {xxx} will be replaced by the currently active dimension value. The pattern can include the __"/"__ character to denote a sub-directory. 77 | -------------------------------------------------------------------------------- /docs/content/docs/ProbeFormat.md: -------------------------------------------------------------------------------- 1 | title: Probe 2 | --- 3 | 4 | # Introduction 5 | 6 | The ParaView ArcticViewer is able to load several type of datasets, but this guide will focus on the probe one and will explain the requirements for it so you can create your own datasets. 7 | 8 | # Dataset structure 9 | 10 | ParaView ArcticViewer expects the data to be accompanied by a dataset descriptor that formalizes the data convention so it can be understood by the application. The application expects a file named __index.json__ at the root of the tree structure (if any) with a content similar to the following one: 11 | 12 | ```js 13 | { 14 | "type": [ "tonic-query-data-model", "in-situ-data-prober" ], 15 | "arguments_order": ["time"], 16 | "arguments": { 17 | "time": { 18 | "loop": "reverse", 19 | "ui": "slider", 20 | "values": [ "0", "1", "2", "3", "4" ] 21 | } 22 | }, 23 | "InSituDataProber": { 24 | "dimensions": [ 500, 250, 30 ], 25 | "fields": [ "temperature", "salinity" ], 26 | "ranges": { 27 | "salinity": [0, 38], 28 | "temperature": [-5, 30] 29 | }, 30 | "slices": [ "slice_0", "slice_1", "slice_2" ], 31 | "spacing": [ 1.0, 1.0, 4.0 ], 32 | "sprite_size": 10 33 | }, 34 | "data": [ 35 | { 36 | "name": "slice_0", 37 | "type": "blob", 38 | "mimeType": "image/png", 39 | "pattern": "{time}/{field}_0.png" 40 | },{ 41 | "name": "slice_1", 42 | "type": "blob", 43 | "mimeType": "image/png", 44 | "pattern": "{time}/{field}_1.png" 45 | },{ 46 | "name": "slice_2", 47 | "type": "blob", 48 | "mimeType": "image/png", 49 | "pattern": "{time}/{field}_2.png" 50 | } 51 | ] 52 | } 53 | ``` 54 | 55 | In the above meta description, we find the volume size, the set of fields, the data range, the spacing, and how many layers are available in a sprite. 56 | 57 | # Image Sprite 58 | 59 | Image Sprite with raw scalar field encoded 60 | -------------------------------------------------------------------------------- /docs/content/docs/composite-sprite.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/docs/composite-sprite.jpg -------------------------------------------------------------------------------- /docs/content/docs/contributing.md: -------------------------------------------------------------------------------- 1 | title: Contributing 2 | --- 3 | 4 | We welcome you to join the development of ParaView ArcticViewer. This document will help you through the process. 5 | 6 | ## Before You Start 7 | 8 | Please follow the coding style: 9 | 10 | - Follow [Airbnb JavaScript Style Guide](https://github.com/airbnb/javascript). 11 | - Use soft-tabs with a two space indent. 12 | - Don't put commas first. 13 | 14 | ## Workflow 15 | 16 | 1. Fork [kitware/arctic-viewer](https://github.com/kitware/arctic-viewer) into your own Github account. 17 | 2. Clone the forked repository to your computer and install dependencies. 18 | 19 | {% code %} 20 | $ git clone https://github.com//arctic-viewer.git 21 | $ cd arctic-viewer 22 | $ npm install 23 | $ npm install -g commitizen 24 | {% endcode %} 25 | 26 | 3. Create a feature branch. 27 | 28 | {% code %} 29 | $ git checkout -b new_feature 30 | {% endcode %} 31 | 32 | 4. Start hacking. 33 | 5. Use Commitizen for commit message 34 | 35 | {% code %} 36 | $ git add 37 | $ git cz 38 | {% endcode %} 39 | 40 | 6. Push the branch: 41 | 42 | {% code %} 43 | $ git push origin new_feature 44 | {% endcode %} 45 | 46 | 6. Create a pull request and describe the change. 47 | 48 | ## Updating Documentation 49 | 50 | The ParaView ArcticViewer documentation is part of the code repository. 51 | 52 | ## Reporting Issues 53 | 54 | When you encounter some problems when using ParaView ArcticViewer, you may be able to find a solution in [Troubleshooting](troubleshooting.html) or ask a question on our [issues list](https://github.com/kitware/arctic-viewer/issues) or [mailing list](http://www.paraview.org/mailman/listinfo/paraview). If you can't find the answer, please report the issue on GitHub. 55 | -------------------------------------------------------------------------------- /docs/content/docs/electron.md: -------------------------------------------------------------------------------- 1 | title: Desktop application 2 | --- 3 | 4 | ArcticViewer can be built and distributed as a desktop application by relying on the [Electron](http://electron.atom.io/) infrastructrue. 5 | 6 | We currently don't provide links to download pre-built packaged versions, but you should be able to build them yourself. 7 | 8 | ## Create an application bundle yourself 9 | 10 | ArcticViewer provides you with the infrastructure to create a redistributable application of ArcticViewer itself. 11 | 12 | For that you will just need to follow the appropriate instructions below based on your operating system. 13 | 14 | ### Windows 15 | 16 | ```sh 17 | $ git clone https://github.com/Kitware/arctic-viewer.git 18 | $ cd arctic-viewer/electron 19 | $ npm run build:win 20 | ``` 21 | 22 | Your bundle should be available in `./bin` directory. 23 | 24 | ### MacOS 25 | 26 | ```sh 27 | $ git clone https://github.com/Kitware/arctic-viewer.git 28 | $ cd arctic-viewer/electron 29 | $ npm run build:mac 30 | ``` 31 | 32 | Your bundle should be available in `./bin` directory. 33 | 34 | ### Linux 35 | 36 | ```sh 37 | $ git clone https://github.com/Kitware/arctic-viewer.git 38 | $ cd arctic-viewer/electron 39 | $ npm run build:lin 40 | ``` 41 | 42 | Your bundle should be available in `./bin` directory. 43 | -------------------------------------------------------------------------------- /docs/content/docs/index.md: -------------------------------------------------------------------------------- 1 | title: Documentation 2 | --- 3 | 4 | # Introduction 5 | 6 | The ParaView ArcticViewer application lets you create a local web server which will then serve a dedicated web application and your data to any browser that can connect to it. 7 | 8 | The ParaView ArcticViewer is an example application based on [Cinema](https://datascience.lanl.gov/Cinema.html) and intended to highlight some of the capabilities of [ParaViewWeb](https://kitware.github.io/paraviewweb) and [vtk.js](https://kitware.github.io/vtk-js). While it was developed to support a growing set of data types, it will remain simply an example application . Some of the types of datasets it can handle currently include: 9 | 10 | - Image based dataset. 11 | - 3D probed dataset with encoded scalar field which allows the use of user defined LookupTable. 12 | - Image based composite dataset with a JSON structure providing the pixel ordering for each layer. 13 | - Floating point sorted composite dataset to enable dynamic scene recomposition while enabling transparency across layers, as well as editable color mapping. 14 | - Dynamic 3D mesh where animation can be replayed. 15 | - CSV table based dataset which will uses Plotly for rendering various types of charts. 16 | - Time based floating point images which enable time analysis and comparison over various regions. 17 | - ... 18 | 19 | # Installation 20 | 21 | ParaView ArcticViewer is simple to install as long as you have a NodeJS environment working. To install it globally on your system, you just need to run the following command line: 22 | 23 | ```sh 24 | $ npm install -g arctic-viewer 25 | ``` 26 | 27 | # Command line tool 28 | 29 | ParaView ArcticViewer can be used as a Web server to serve both the application and your data. Alternatively, it can be used to download remote datasets. The following sections explain how to use it and describe the available options. 30 | 31 | ## Serve a local dataset 32 | 33 | For this configuration, ParaView ArcticViewer will act as a local web server and will need to know which directory you would like to serve. Then optional configuration can be provided such as which network port should be used and whether you want the application to start your default web browser on the application page. 34 | 35 | The following command lines illustrate various usage scenarios: 36 | 37 | ```sh 38 | ## This will start the server on port 3000 and will open your browser automatically 39 | $ ArcticViewer -d ./path/to/your/dataset/directory 40 | 41 | ## This will start the server on port 3000 and wait for connections on http://localhost:3000 42 | $ ArcticViewer -d ./path/to/your/dataset/directory -s 43 | 44 | ## This will start the server on port 1234 so you can connect to http://localhost:1234 45 | $ ArcticViewer -d ./path/to/your/dataset/directory -s -p 1234 46 | ``` 47 | 48 | ## Serve a remote dataset 49 | 50 | Serving a remote dataset is similar to serving a local dataset except that you need to provide the full http URL to location where the data is hosted. For example: 51 | 52 | ```sh 53 | $ ArcticViewer -d http://tonic.kitware.com/data/head-ct 54 | ``` 55 | 56 | ## Download a set of sample datasets 57 | 58 | ArcticViewer also lets you download some data so you can interact with it on your own machine. For that you will need to run the application in the directory into which you want to download your data. 59 | 60 | Here is an example of how to do that: 61 | 62 | ```sh 63 | $ mkdir ArcticData && cd $_ 64 | $ ArcticViewer --download-sample-data 65 | 66 | | Available datasets for download (path: /home/projects/ArcticData) 67 | | (1) 40.0 MB - diskout-composite 68 | | (2) 94.2 MB - ensemble 69 | | (3) 13.7 MB - head_ct_3_features 70 | | (4) 13.1 MB - head_ct_4_features 71 | | (5) 50.8 MB - hydra-image-fluid-velocity 72 | | (6) 162.3 MB - mpas-composite-earth 73 | | (7) 37.5 MB - mpas-flat-earth-prober 74 | | (8) 552.5 MB - mpas-hd-500-7t 75 | | 76 | | Press Enter to quit or the dataset number to download: 1 77 | | Press Enter to quit or the dataset number to download: 3 78 | | Press Enter to quit or the dataset number to download: 79 | | 80 | | => You will be able to try ArcticViewer with these commands: 81 | | 82 | | $ ArcticViewer -d /home/projects/ArcticData/head_ct_3_features 83 | | $ ArcticViewer -d /home/projects/ArcticData/diskout-composite 84 | | 85 | | Thank you for trying this out... 86 | 87 | ``` 88 | 89 | As you can see, once you choose a couple of datasets by typing in the associated number and hitting `Enter`, the data is downloaded to the current directory, and you are given the command lines to run in order to interact with them. 90 | 91 | ## Download a remote dataset 92 | 93 | This allows the application to act as a regular viewer and download each resource locally. 94 | 95 | Here is an example on how to run it: 96 | 97 | ```sh 98 | $ mkdir head-ct && cd $_ 99 | $ ArcticViewer --download http://tonic.kitware.com/data/head-ct 100 | ``` 101 | -------------------------------------------------------------------------------- /docs/content/docs/probe-sprite.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/docs/probe-sprite.png -------------------------------------------------------------------------------- /docs/content/docs/tonic-volume-data-format.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/docs/tonic-volume-data-format.png -------------------------------------------------------------------------------- /docs/content/docs/tools.md: -------------------------------------------------------------------------------- 1 | title: Tools 2 | --- 3 | 4 | ParaView ArcticViewer is meant to be an Open Source application for interactive data exploration. 5 | 6 | The following guide explains our software process and the tools use to build and develop this framework. 7 | 8 | ## Software process 9 | 10 | We rely on Semantic-release to manage our change log, tagging and publishing to NPM via Travis. 11 | 12 | In order to maintain that process each commit message should follow a specific formatting. To ensure that formating, we use Commitizen which can be triggered via the following command line. Additional information can be found [here](https://gist.github.com/stephenparish/9941e89d80e2bc58a153). 13 | 14 | $ git cz 15 | 16 | Then a set of questions will be presented to you like the following ones: 17 | 18 | $ git cz 19 | 20 | Line 1 will be cropped at 100 characters. All other lines will be wrapped 21 | after 100 characters. 22 | 23 | ? Select the type of change that you're committing: (Use arrow keys) 24 | feat: A new feature 25 | fix: A bug fix 26 | docs: Documentation only changes 27 | ❯ style: Changes that do not affect the meaning of the code 28 | (white-space, formatting, missing semi-colons, etc) 29 | refactor: A code change that neither fixes a bug or adds a feature 30 | perf: A code change that improves performance 31 | (Move up and down to reveal more choices) 32 | 33 | ? Denote the scope of this change ($location, $browser, $compile, etc.): 34 | ESLint 35 | 36 | ? Write a short, imperative tense description of the change: 37 | Update code formatting to comply with our ESLint specification 38 | 39 | ? Provide a longer description of the change: 40 | 41 | ? List any breaking changes or issues closed by this change: 42 | 43 | Will generate the following commit message: 44 | 45 | commit 1a31ecfcc2f6f4283e51187a24ce0e9d9c17ae54 46 | Author: Sebastien Jourdain 47 | Date: Mon Dec 21 09:29:21 2015 -0700 48 | 49 | style(ESLint): Update code formatting to comply with our ESLint specification 50 | 51 | 52 | [Full convention](https://gist.github.com/stephenparish/9941e89d80e2bc58a153) 53 | 54 | ## Code editing 55 | 56 | [Sublime Text 3](http://www.sublimetext.com) with the following set of plugins. To install plugins you will have to first install the [Package constrol](https://packagecontrol.io/installation). 57 | 58 | Then installing new plugin should start with: ```Ctrl/Cmd + Shift + p``` Install 59 | 60 | ### Git + GitGutter 61 | 62 | With GitGutter, you can see which lines have been added, deleted or modified in the gutter. 63 | 64 | ### Babel 65 | 66 | This plugin adds proper syntax highlighting to your ES6/2015 and React JSX code. 67 | 68 | ### JsFormat 69 | 70 | Once installed, to use JSFormat, go to your JS file and hit Ctrl + Alt + f on Windows/Linux or Ctrl + ⌥ + f on Mac. Alternatively, use the context menu. 71 | 72 | ### Sublime-Linter + SublimeLinter-eslint 73 | 74 | [More information available here](https://github.com/roadhump/SublimeLinter-eslint). 75 | 76 | $ npm install -g eslint 77 | 78 | ### EditorConfig 79 | 80 | [More information available here](https://github.com/sindresorhus/editorconfig-sublime#readme) 81 | -------------------------------------------------------------------------------- /docs/content/docs/troubleshooting.md: -------------------------------------------------------------------------------- 1 | title: Troubleshooting 2 | --- 3 | 4 | If you experience problems using ParaView ArcticViewer, try doing a search on our [issues list](https://github.com/kitware/arctic-viewer/issues) or our [mailing list](http://www.paraview.org/mailman/listinfo/paraview). 5 | -------------------------------------------------------------------------------- /docs/content/icon/favicon-160x160.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/icon/favicon-160x160.png -------------------------------------------------------------------------------- /docs/content/icon/favicon-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/icon/favicon-16x16.png -------------------------------------------------------------------------------- /docs/content/icon/favicon-196x196.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/icon/favicon-196x196.png -------------------------------------------------------------------------------- /docs/content/icon/favicon-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/icon/favicon-32x32.png -------------------------------------------------------------------------------- /docs/content/icon/favicon-96x96.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/icon/favicon-96x96.png -------------------------------------------------------------------------------- /docs/content/index.jade: -------------------------------------------------------------------------------- 1 | layout: index 2 | description: Interactive artifact data visualizer 3 | subtitle: ParaView ArcticViewer, The ultimate data viewer 4 | cmd: npm install -g arctic-viewer 5 | comments: false 6 | --- 7 | 8 | ul#intro-feature-list 9 | li.intro-feature-wrap 10 | .intro-feature 11 | .intro-feature-icon 12 | i.fa.fa-child 13 | h3.intro-feature-title 14 | a(href="https://www.npmjs.com/package/arctic-viewer").link Releases 15 | img(style="padding-left: 25px",src="https://badge.fury.io/js/arctic-viewer.svg") 16 | p.intro-feature-desc 17 | | The ParaView ArcticViewer is an example application based on 18 | | Cinema and 19 | | intended to show off what you can do with 20 | | ParaViewWeb and 21 | | vtk.js. While it was 22 | | developed to support a growing set of data types, it's still just an 23 | | example. 24 | 25 | li.intro-feature-wrap 26 | .intro-feature 27 | .intro-feature-icon 28 | i.fa.fa-terminal 29 | h3.intro-feature-title Command line tool 30 | p.intro-feature-desc 31 | | The ParaView ArcticViewer is a command line tool that can work anywhere 32 | | NodeJS or JavaScript is available and lets you serve and or 33 | | explore large datasets in an intuitive manner. 34 | 35 | li.intro-feature-wrap 36 | .intro-feature 37 | .intro-feature-icon 38 | i.fa.fa-mobile 39 | h3.intro-feature-title Mobile Friendly 40 | p.intro-feature-desc 41 | | Mobile clients can connect to a ParaView ArcticViewer server or 42 | | download the native iOS application on the App Store. The latter 43 | | approach wraps the JavaScript code of this project into a native 44 | | Swift application for a better mobile experience. 45 | 46 | li.intro-feature-wrap 47 | .intro-feature 48 | .intro-feature-icon 49 | i.fa.fa-life-ring 50 | h3.intro-feature-title 51 | a(href="http://www.vtk.org/services/").link Support and Services 52 | p.intro-feature-desc 53 | | Like what we've done here? Kitware offers advanced software R&D 54 | | solutions and services. Find out how we can help you with your next project. 55 | 56 | center 57 | img(style="border-radius: 2px;border: solid 1px;margin: 0.5%", src="/arctic-viewer/screenshots/ArcticViewer-ListDataSets.jpg", width="15%") 58 | img(style="border-radius: 2px;border: solid 1px;margin: 0.5%", src="/arctic-viewer/screenshots/ArcticViewer-JetPropulsion.jpg", width="15%") 59 | img(style="border-radius: 2px;border: solid 1px;margin: 0.5%", src="/arctic-viewer/screenshots/ArcticViewer-Medical.jpg", width="15%") 60 | img(style="border-radius: 2px;border: solid 1px;margin: 0.5%", src="/arctic-viewer/screenshots/ArcticViewer-Earth.jpg", width="15%") 61 | img(style="border-radius: 2px;border: solid 1px;margin: 0.5%", src="/arctic-viewer/screenshots/ArcticViewer-EarthMagicLens.jpg", width="15%") 62 | img(style="border-radius: 2px;border: solid 1px;margin: 0.5%", src="/arctic-viewer/screenshots/ArcticViewer-Prober.jpg", width="15%") 63 | -------------------------------------------------------------------------------- /docs/content/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/logo.png -------------------------------------------------------------------------------- /docs/content/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | ArcticViewerlogo 5 | Created with Sketch. 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /docs/content/screenshots/ArcticViewer-Earth.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/screenshots/ArcticViewer-Earth.jpg -------------------------------------------------------------------------------- /docs/content/screenshots/ArcticViewer-EarthMagicLens.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/screenshots/ArcticViewer-EarthMagicLens.jpg -------------------------------------------------------------------------------- /docs/content/screenshots/ArcticViewer-JetPropulsion.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/screenshots/ArcticViewer-JetPropulsion.jpg -------------------------------------------------------------------------------- /docs/content/screenshots/ArcticViewer-ListDataSets.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/screenshots/ArcticViewer-ListDataSets.jpg -------------------------------------------------------------------------------- /docs/content/screenshots/ArcticViewer-Medical.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/screenshots/ArcticViewer-Medical.jpg -------------------------------------------------------------------------------- /docs/content/screenshots/ArcticViewer-Prober.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/screenshots/ArcticViewer-Prober.jpg -------------------------------------------------------------------------------- /docs/content/screenshots/ArcticViewer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/docs/content/screenshots/ArcticViewer.png -------------------------------------------------------------------------------- /docs/data/menu.yml: -------------------------------------------------------------------------------- 1 | docs: /docs/ 2 | -------------------------------------------------------------------------------- /docs/tpl/__en__: -------------------------------------------------------------------------------- 1 | menu: 2 | docs: Docs 3 | api: API 4 | examples: Examples 5 | news: News 6 | search: Search 7 | 8 | index: 9 | get_started: Get started 10 | 11 | page: 12 | contents: Contents 13 | back_to_top: Back to Top 14 | improve: Improve this doc 15 | prev: Prev 16 | next: Next 17 | last_updated: "Last updated: %s" 18 | 19 | sidebar: 20 | docs: 21 | getting_started: Getting Started 22 | overview: Overview 23 | miscellaneous: Miscellaneous 24 | tools: Tools 25 | troubleshooting: Troubleshooting 26 | contributing: Contributing 27 | formats: Formats 28 | imageBased: Image based 29 | composite: Composite 30 | depthSorted: Sorted Composite 31 | probe: Probe 32 | ensemble: Ensemble 33 | available: Available 34 | ios: iOS 35 | desktop: Desktop 36 | -------------------------------------------------------------------------------- /docs/tpl/__sidebar__: -------------------------------------------------------------------------------- 1 | docs: 2 | getting_started: 3 | overview: index.html 4 | miscellaneous: 5 | tools: tools.html 6 | troubleshooting: troubleshooting.html 7 | contributing: contributing.html 8 | formats: 9 | imageBased: ImageBasedFormat.html 10 | composite: CompositeFormat.html 11 | depthSorted: DepthSortedCompositeFormat.html 12 | probe: ProbeFormat.html 13 | ensemble: EnsembleFormat.html 14 | available: 15 | ios: http://kitware.github.io/arctic-viewer-ios 16 | desktop: electron.html 17 | -------------------------------------------------------------------------------- /electron/README.md: -------------------------------------------------------------------------------- 1 | # ArcticViewer Desktop 2 | 3 | An [Electron](https://github.com/electron/electron) wrapper for ArcticViewer. 4 | 5 | ## Getting Started 6 | 7 | In this directory run 8 | 9 | ```sh 10 | $ npm install # installs electron and other dependencies 11 | $ npm start # starts electron app 12 | ``` 13 | 14 | You'll be presented a blank window prompting you to open a dataset. Use the hotkey _Ctrl / ⌘ + O_ to open a dataset folder, _File > Open..._ will present the same window to open a dataset. 15 | 16 | ## Bundling 17 | 18 | Depending on your platform, you can bundle ArcticViewer as a native application with one of the following: 19 | 20 | ```sh 21 | $ npm run build:mac # bundle for macOS 22 | $ npm run build:lin # bundle for Linux 23 | $ npm run build:win # bundle for Windows 24 | ``` 25 | 26 | The output from the build process will be placed in the folder `./bin/` 27 | -------------------------------------------------------------------------------- /electron/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "arctic-viewer-desktop", 3 | "productName": "Arctic Viewer", 4 | "description": "Load and explore in-situ/batch data products", 5 | "homepage": "https://kitware.github.io/arctic-viewer", 6 | "license": "BSD-3-Clause", 7 | "version": "0.0.1", 8 | "main": "src/index.js", 9 | "dependencies": { 10 | "arctic-viewer": "latest", 11 | "electron": "2.0.0", 12 | "shelljs": "0.7.4", 13 | "about-window": "1.4.0" 14 | }, 15 | "devDependencies": { 16 | "electron-packager": "8.1.0" 17 | }, 18 | "scripts": { 19 | "start": "electron .", 20 | "build:all": "electron-packager . --all --icon ./src/icon.icns --out ./bin/ --overwrite", 21 | "build:mac": "electron-packager . --darwin --icon ./src/icon.icns --out ./bin/ --overwrite", 22 | "build:win": "electron-packager . --win32 --out ./bin/ --overwrite", 23 | "build:lin": "electron-packager . --linux --out ./bin/ --overwrite" 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /electron/src/aboutPage.js: -------------------------------------------------------------------------------- 1 | const openAboutWindow = require('about-window').default; 2 | const join = require('path').join; 3 | 4 | module.exports = { 5 | label: 'About Arctic Viewer', 6 | click: () => openAboutWindow({ 7 | icon_path: join(__dirname, 'icon.png'), 8 | package_json_dir: join(__dirname, '../'), 9 | }), 10 | }; 11 | -------------------------------------------------------------------------------- /electron/src/icon.icns: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/electron/src/icon.icns -------------------------------------------------------------------------------- /electron/src/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/electron/src/icon.png -------------------------------------------------------------------------------- /electron/src/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 18 | Arctic Viewer 19 | 20 | 21 |

No data open, press

22 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /electron/src/index.js: -------------------------------------------------------------------------------- 1 | /* global exec */ 2 | require('shelljs/global'); 3 | 4 | const electron = require('electron'); 5 | const avServer = require('arctic-viewer/bin/server'); 6 | const aboutPage = require('./aboutPage'); 7 | 8 | const { app, dialog, Menu, shell } = electron; 9 | const BrowserWindow = electron.BrowserWindow; 10 | 11 | // Keep a global reference of the window object, if you don't, the window will 12 | // be closed automatically when the JavaScript object is garbage collected. 13 | let mainWindow; 14 | let server = null; 15 | 16 | function createWindow() { 17 | // Create the browser window. 18 | mainWindow = new BrowserWindow({ width: 800, height: 600, icon: `${__dirname}/icon.png` }); 19 | 20 | // and load the index.html of the app. 21 | mainWindow.loadURL(`file://${__dirname}/index.html`); 22 | 23 | // Open the DevTools. 24 | // mainWindow.webContents.openDevTools() 25 | 26 | // Emitted when the window is closed. 27 | mainWindow.on('closed', () => { 28 | // Dereference the window object, usually you would store windows 29 | // in an array if your app supports multi windows, this is the time 30 | // when you should delete the corresponding element. 31 | mainWindow = null; 32 | }); 33 | } 34 | 35 | function openFile(dataPath) { 36 | if (mainWindow === null) { 37 | createWindow(); 38 | } 39 | 40 | if (!dataPath) { 41 | return; 42 | } 43 | console.log(dataPath); 44 | 45 | if (!server) { 46 | server = avServer(dataPath[0], { clientConfiguration: { MagicLens: false } }); 47 | server.listen(3000); 48 | server.on('error', (err) => { 49 | dialog.showErrorBox('Error starting arctic viewer server', err); 50 | }); 51 | 52 | mainWindow.loadURL('http://localhost:3000'); 53 | } else { 54 | server.updateDataPath(dataPath[0], () => { 55 | mainWindow.webContents.reloadIgnoringCache(); 56 | }); 57 | } 58 | createMenu(); 59 | } 60 | 61 | function toggleMagicLens() { 62 | server.updateClientConfiguration({ MagicLens: !server.getClientConfiguration().MagicLens }); 63 | mainWindow.reload(); 64 | createMenu(); 65 | } 66 | 67 | // create the application menu, 68 | // this is called initially on 'ready' and when there is a change in the server or window 69 | function createMenu() { 70 | const menuTemplate = [ 71 | { 72 | label: 'File', 73 | submenu: [ 74 | { 75 | label: 'Open...', 76 | accelerator: 'CmdOrCtrl+O', 77 | click() { dialog.showOpenDialog(mainWindow, { title: 'Open Dataset', properties: ['openDirectory'] }, openFile); }, 78 | }, 79 | ], 80 | }, 81 | { 82 | label: 'View', 83 | submenu: [ 84 | { 85 | label: 'Refresh', 86 | accelerator: 'CmdOrCtrl+R', 87 | enabled: mainWindow !== null && server !== null, 88 | click() { mainWindow.webContents.reloadIgnoringCache(); }, 89 | }, 90 | { 91 | label: `${server && server.getClientConfiguration().MagicLens ? 'Disable' : 'Enable'} MagicLens`, 92 | enabled: mainWindow !== null && server !== null, 93 | click() { toggleMagicLens(); }, 94 | }, 95 | { 96 | label: 'Open in Browser', 97 | enabled: mainWindow !== null && server !== null, 98 | click() { shell.openExternal('http://localhost:3000'); }, 99 | }, 100 | // singleView; 101 | // recording; 102 | // development; 103 | ], 104 | }, 105 | { 106 | role: 'help', 107 | submenu: [ 108 | { 109 | label: 'Learn More', 110 | click() { shell.openExternal('https://kitware.github.io/arctic-viewer'); }, 111 | }, 112 | ], 113 | }, 114 | ]; 115 | 116 | if (process.platform === 'darwin') { 117 | const name = app.getName(); 118 | menuTemplate.unshift({ 119 | label: name, 120 | submenu: [ 121 | aboutPage, 122 | { type: 'separator' }, 123 | { role: 'services', submenu: [] }, 124 | { type: 'separator' }, 125 | { role: 'hide' }, 126 | { role: 'hideothers' }, 127 | { role: 'unhide' }, 128 | { type: 'separator' }, 129 | { role: 'quit' }, 130 | ], 131 | }); 132 | } else { 133 | menuTemplate[menuTemplate.length - 1].submenu.push(aboutPage); 134 | } 135 | 136 | const menu = Menu.buildFromTemplate(menuTemplate); 137 | Menu.setApplicationMenu(menu); 138 | } 139 | 140 | // This method will be called when Electron has finished 141 | // initialization and is ready to create browser windows. 142 | // Some APIs can only be used after this event occurs. 143 | app.on('ready', () => { 144 | createMenu(); 145 | createWindow(); 146 | }); 147 | 148 | // Quit when all windows are closed. 149 | app.on('window-all-closed', () => { 150 | if (server) { 151 | server = null; 152 | createMenu(); 153 | } 154 | 155 | // On OS X it is common for applications and their menu bar 156 | // to stay active until the user quits explicitly with Cmd + Q 157 | if (process.platform !== 'darwin') { 158 | app.quit(); 159 | } 160 | }); 161 | 162 | app.on('activate', () => { 163 | // On OS X it's common to re-create a window in the app when the 164 | // dock icon is clicked and there are no other windows open. 165 | if (mainWindow === null) { 166 | createWindow(); 167 | } 168 | }); 169 | -------------------------------------------------------------------------------- /electron/src/renderer.js: -------------------------------------------------------------------------------- 1 | // This file is required by the index.html file and will 2 | // be executed in the renderer process for that window. 3 | // All of the Node.js APIs are available in this process. 4 | -------------------------------------------------------------------------------- /lib/arctic-viewer.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable import/prefer-default-export */ 2 | 3 | // CSS loading ---------------------------------------------------------------- 4 | 5 | import 'font-awesome/css/font-awesome.css'; 6 | import 'normalize.css'; 7 | 8 | import vtkURLExtract from 'vtk.js/Sources/Common/Core/URLExtract'; 9 | 10 | // Local import --------------------------------------------------------------- 11 | 12 | import * as Factory from './factory'; 13 | 14 | // Dependencies injections ---------------------------------------------------- 15 | 16 | const iOS = /iPad|iPhone|iPod/.test(window.navigator.platform); 17 | 18 | // Add class to body if iOS device -------------------------------------------- 19 | 20 | if (iOS) { 21 | document.querySelector('body').classList.add('is-ios-device'); 22 | } 23 | 24 | // Expose viewer factory method ----------------------------------------------- 25 | 26 | export function load(url, container) { 27 | Factory.createViewer(url, (viewer) => { 28 | if (!viewer) { 29 | /* eslint-disable no-alert */ 30 | alert('The metadata format seems to be unsupported.'); 31 | /* eslint-enable no-alert */ 32 | return; 33 | } 34 | 35 | Factory.createUI(viewer, container); 36 | }); 37 | } 38 | 39 | export function start(container) { 40 | const userParams = vtkURLExtract.extractURLParameters(); 41 | const url = userParams.data || '/data/index.json'; 42 | load(`${window.location.protocol}//${window.location.host}${url}`, container); 43 | } 44 | 45 | export const { updateConfig } = Factory; 46 | -------------------------------------------------------------------------------- /lib/types/CDF.js: -------------------------------------------------------------------------------- 1 | import contains from 'mout/src/array/contains'; 2 | import ChartBuilder from 'paraviewweb/src/Rendering/Chart/CDFChartBuilder'; 3 | 4 | export default function build({ basepath, viewer, dataType }) { 5 | // Can we handle the data 6 | if (!contains(dataType, 'cdf')) { 7 | return false; 8 | } 9 | 10 | viewer.ui = 'ChartViewer'; 11 | viewer.allowMagicLens = false; 12 | viewer.chartBuilder = new ChartBuilder(viewer.queryDataModel); 13 | 14 | viewer.queryDataModel.fetchData(); 15 | 16 | return true; 17 | } 18 | -------------------------------------------------------------------------------- /lib/types/Chart.js: -------------------------------------------------------------------------------- 1 | import contains from 'mout/src/array/contains'; 2 | import ChartBuilder from 'paraviewweb/src/Rendering/Chart/PlotlyChartBuilder'; 3 | 4 | export default function build({ basepath, viewer, dataType }) { 5 | // Can we handle the data 6 | if (!contains(dataType, 'chart')) { 7 | return false; 8 | } 9 | 10 | viewer.ui = 'ChartViewer'; 11 | viewer.allowMagicLens = false; 12 | viewer.chartBuilder = new ChartBuilder(viewer.queryDataModel); 13 | 14 | viewer.queryDataModel.fetchData(); 15 | 16 | return true; 17 | } 18 | -------------------------------------------------------------------------------- /lib/types/CompositeImageQueryDataModel.js: -------------------------------------------------------------------------------- 1 | import CollapsibleElement from 'paraviewweb/src/React/Widgets/CollapsibleWidget'; 2 | import CompositeControl from 'paraviewweb/src/React/Widgets/CompositePipelineWidget'; 3 | import contains from 'mout/src/array/contains'; 4 | import PipelineModel from 'paraviewweb/src/Common/State/PipelineState'; 5 | import QueryDataModelImageBuilder from 'paraviewweb/src/Rendering/Image/QueryDataModelImageBuilder'; 6 | import React from 'react'; 7 | 8 | export default function build({ viewer, dataType }) { 9 | // Can we handle the data 10 | if (!contains(dataType, 'composite-pipeline-image')) { 11 | return false; 12 | } 13 | 14 | viewer.pipelineModel = new PipelineModel(viewer.queryDataModel.originalData); 15 | viewer.imageBuilder = new QueryDataModelImageBuilder(viewer.queryDataModel); 16 | 17 | viewer.pipelineModel.onChange((pipeline) => { 18 | viewer.queryDataModel.setValue('pipeline', pipeline); 19 | viewer.imageBuilder.update(); 20 | }); 21 | viewer.queryDataModel.setValue( 22 | 'pipeline', 23 | viewer.pipelineModel.getPipelineQuery() 24 | ); 25 | 26 | viewer.menuAddOn = [ 27 | 28 | 29 | , 30 | ]; 31 | 32 | viewer.imageBuilder.update(); 33 | 34 | return true; 35 | } 36 | -------------------------------------------------------------------------------- /lib/types/CompositePipeline.js: -------------------------------------------------------------------------------- 1 | import PipelineModel from 'paraviewweb/src/Common/State/PipelineState'; 2 | import CompositeImageBuilder from 'paraviewweb/src/Rendering/Image/CompositeImageBuilder'; 3 | import contains from 'mout/src/array/contains'; 4 | 5 | export default function build({ viewer, dataType }) { 6 | // Can we handle the data 7 | if (!contains(dataType, 'composite-pipeline')) { 8 | return false; 9 | } 10 | 11 | viewer.pipelineModel = new PipelineModel(viewer.queryDataModel.originalData); 12 | viewer.imageBuilder = new CompositeImageBuilder( 13 | viewer.queryDataModel, 14 | viewer.pipelineModel 15 | ); 16 | viewer.imageBuilder.update(); 17 | 18 | return true; 19 | } 20 | -------------------------------------------------------------------------------- /lib/types/DataProber.js: -------------------------------------------------------------------------------- 1 | import DataProberImageBuilder from 'paraviewweb/src/Rendering/Image/DataProberImageBuilder'; 2 | import BinaryDataProberImageBuilder from 'paraviewweb/src/Rendering/Image/BinaryDataProberImageBuilder'; 3 | import LineChartPainter from 'paraviewweb/src/Rendering/Painter/LineChartPainter'; 4 | import equals from 'mout/src/array/equals'; 5 | import contains from 'mout/src/array/contains'; 6 | 7 | export default function build({ viewer, dataType, callback }) { 8 | let ProberImageBuilder = DataProberImageBuilder; 9 | let dimensions = null; 10 | 11 | // Can we handle the data 12 | if ( 13 | !contains(dataType, 'in-situ-data-prober') && 14 | !(contains(dataType, 'data-prober') && contains(dataType, 'binary')) 15 | ) { 16 | return false; 17 | } 18 | 19 | // Handle the image and binary format 20 | if (contains(dataType, 'data-prober') && contains(dataType, 'binary')) { 21 | ProberImageBuilder = BinaryDataProberImageBuilder; 22 | dimensions = viewer.queryDataModel.originalData.DataProber.dimensions; 23 | } else { 24 | dimensions = viewer.queryDataModel.originalData.InSituDataProber.dimensions; 25 | } 26 | 27 | // Create 2 viewer choice 28 | const singleView = { 29 | name: 'Single View', 30 | imageBuilder: new ProberImageBuilder( 31 | viewer.queryDataModel, 32 | viewer.config.lookupTableManager 33 | ), 34 | queryDataModel: viewer.queryDataModel, 35 | ui: 'ProbeViewerWidget', 36 | allowMagicLens: false, 37 | destroy: () => { 38 | this.imageBuilder.destroy(); 39 | }, 40 | }; 41 | 42 | const multiView = { 43 | name: 'Multi View', 44 | renderers: {}, 45 | queryDataModel: viewer.queryDataModel, 46 | ui: 'MultiViewerWidget', 47 | allowMagicLens: false, 48 | destroy: () => { 49 | while (this.renderers.length) { 50 | this.renderers.pop().destroy(); 51 | } 52 | }, 53 | }; 54 | 55 | // Configure single view 56 | singleView.imageBuilder.setProbeLineNotification(true); 57 | singleView.imageBuilder.update(); 58 | 59 | if (!viewer.config.ensemble) { 60 | // Configure multi view 61 | const imageBuilders = [ 62 | new ProberImageBuilder( 63 | viewer.queryDataModel, 64 | viewer.config.lookupTableManager 65 | ), 66 | new ProberImageBuilder( 67 | viewer.queryDataModel, 68 | viewer.config.lookupTableManager 69 | ), 70 | new ProberImageBuilder( 71 | viewer.queryDataModel, 72 | viewer.config.lookupTableManager 73 | ), 74 | ]; 75 | 76 | const chartPainters = [ 77 | new LineChartPainter('X: {x}'), 78 | new LineChartPainter('Y: {x}'), 79 | new LineChartPainter('Z: {x}'), 80 | ]; 81 | 82 | const updateProbeLocation = (data, envelope) => { 83 | imageBuilders.forEach((builder) => { 84 | if (!equals(data, builder.getProbe())) { 85 | builder.setProbe(data[0], data[1], data[2]); 86 | } 87 | }); 88 | 89 | // Update charts 90 | chartPainters[0].setMarkerLocation(data[0] / (dimensions[0] - 1)); 91 | chartPainters[1].setMarkerLocation(data[1] / (dimensions[1] - 1)); 92 | chartPainters[2].setMarkerLocation(data[2] / (dimensions[2] - 1)); 93 | }; 94 | 95 | const updateCrosshairVisibility = (data, envelope) => { 96 | imageBuilders.forEach((builder) => { 97 | builder.setCrossHairEnable(data); 98 | }); 99 | 100 | // Update charts 101 | chartPainters[0].enableMarker(data); 102 | chartPainters[1].enableMarker(data); 103 | chartPainters[2].enableMarker(data); 104 | }; 105 | 106 | const updateChartPainters = (data, envelope) => { 107 | if (data.x.fields[0].data.length) { 108 | chartPainters[0].updateData(data.x); 109 | } 110 | if (data.y.fields[0].data.length) { 111 | chartPainters[1].updateData(data.y); 112 | } 113 | if (data.z.fields[0].data.length) { 114 | chartPainters[2].updateData(data.z); 115 | } 116 | }; 117 | 118 | // Initialize the Image builders 119 | const methods = [].concat(singleView.imageBuilder.getRenderMethods()); 120 | imageBuilders.forEach((builder) => { 121 | const name = methods.shift(); 122 | builder.setRenderMethod(name); 123 | builder.setRenderMethodImutable(); 124 | builder.setProbeLineNotification(true); 125 | builder.onProbeChange(updateProbeLocation); 126 | builder.onCrosshairVisibilityChange(updateCrosshairVisibility); 127 | builder.onProbeLineReady(updateChartPainters); 128 | builder.update(); 129 | multiView.renderers[name] = { name, builder }; 130 | }); 131 | 132 | // Initialize the Chart painters 133 | const names = ['Line Chart X', 'Line Chart Y', 'Line Chart Z']; 134 | chartPainters.forEach((painter) => { 135 | const name = names.shift(); 136 | multiView.renderers[name] = { name, painter }; 137 | }); 138 | 139 | // Show the selector UI 140 | viewer.list = [singleView, multiView]; 141 | viewer.ui = 'ViewerSelector'; 142 | viewer.allowMagicLens = false; 143 | viewer.onChange = (idx, list) => { 144 | callback(viewer.list[idx]); 145 | }; 146 | } else { 147 | viewer.imageBuilder = singleView.imageBuilder; 148 | viewer.ui = singleView.ui; 149 | } 150 | 151 | return true; 152 | } 153 | -------------------------------------------------------------------------------- /lib/types/DepthComposite.js: -------------------------------------------------------------------------------- 1 | import PipelineModel from 'paraviewweb/src/Common/State/PipelineState'; 2 | import DepthCompositeImageBuilder from 'paraviewweb/src/Rendering/Image/DepthCompositeImageBuilder'; 3 | import contains from 'mout/src/array/contains'; 4 | 5 | export default function build({ viewer, dataType }) { 6 | // Can we handle the data 7 | if ( 8 | !contains(dataType, 'webgl-composite') && 9 | !contains(dataType, 'depth-composite') 10 | ) { 11 | return false; 12 | } 13 | 14 | viewer.pipelineModel = new PipelineModel(viewer.queryDataModel.originalData); 15 | viewer.imageBuilder = new DepthCompositeImageBuilder( 16 | viewer.queryDataModel, 17 | viewer.pipelineModel, 18 | viewer.config.lookupTableManager 19 | ); 20 | viewer.imageBuilder.update(); 21 | 22 | return true; 23 | } 24 | -------------------------------------------------------------------------------- /lib/types/FloatImage.js: -------------------------------------------------------------------------------- 1 | import FloatImageImageBuilder from 'paraviewweb/src/Rendering/Image/FloatDataImageBuilder'; 2 | import LineChartPainter from 'paraviewweb/src/Rendering/Painter/LineChartPainter'; 3 | import contains from 'mout/src/array/contains'; 4 | 5 | export default function build({ viewer, dataType }) { 6 | // Can we handle the data 7 | if (!contains(dataType, 'float-image')) { 8 | return false; 9 | } 10 | 11 | const timeSize = viewer.queryDataModel.getSize('time'); 12 | if ( 13 | viewer.queryDataModel.originalData.FloatImage.layers.length === 1 && 14 | timeSize && 15 | timeSize > 1 16 | ) { 17 | return false; 18 | } 19 | 20 | // No Magic Lens for us 21 | viewer.allowMagicLens = false; 22 | 23 | // Create ImageBuilder + Line Chart for time probe 24 | const imageBuilder = new FloatImageImageBuilder( 25 | viewer.queryDataModel, 26 | viewer.config.lookupTableManager 27 | ); 28 | 29 | imageBuilder.update(); 30 | 31 | // If time available provide a chart painter 32 | if ( 33 | timeSize && 34 | timeSize > 1 && 35 | !contains(dataType, 'single-view') && 36 | !viewer.config.SingleView 37 | ) { 38 | let timeArray = null; 39 | const chartPainter = new LineChartPainter(''); 40 | viewer.renderers = { 41 | '3D View': { 42 | builder: imageBuilder, 43 | name: '3D View', 44 | }, 45 | 'Time data': { 46 | painter: chartPainter, 47 | name: 'Time data', 48 | }, 49 | }; 50 | viewer.ui = 'MultiViewerWidget'; 51 | viewer.layout = '2x1'; 52 | 53 | // Link chartPainter with image builder 54 | imageBuilder.onTimeDataReady((data, envelope) => { 55 | if (data.fields.length) { 56 | timeArray = data.fields[0].data; 57 | chartPainter.setTitle(`Field: ${data.fields[0].name}`); 58 | chartPainter.updateData(data); 59 | } 60 | }); 61 | // Link time change with painter mark 62 | viewer.queryDataModel.onStateChange((data, envelope) => { 63 | const timeIdx = viewer.queryDataModel.getIndex('time'); 64 | imageBuilder.getTimeProbe().value = timeArray ? timeArray[timeIdx] : ''; 65 | chartPainter.setMarkerLocation(timeIdx / timeSize); 66 | }); 67 | } else { 68 | // Single view 69 | viewer.imageBuilder = imageBuilder; 70 | } 71 | 72 | return true; 73 | } 74 | -------------------------------------------------------------------------------- /lib/types/Geometry.js: -------------------------------------------------------------------------------- 1 | import CollapsibleElement from 'paraviewweb/src/React/Widgets/CollapsibleWidget'; 2 | import CompositeControl from 'paraviewweb/src/React/Widgets/CompositePipelineWidget'; 3 | import CompositePipelineModel from 'paraviewweb/src/Common/State/PipelineState'; 4 | import contains from 'mout/src/array/contains'; 5 | import GeometryBuilder from 'paraviewweb/src/Rendering/Geometry/ThreeGeometryBuilder'; 6 | import GeometryDataModel from 'paraviewweb/src/IO/Core/GeometryDataModel'; 7 | import LookupTableManagerWidget from 'paraviewweb/src/React/CollapsibleControls/LookupTableManagerControl'; 8 | import QueryDataModelWithExplorationWidget from 'paraviewweb/src/React/CollapsibleControls/QueryDataModelControl'; 9 | import React from 'react'; 10 | 11 | export default function build({ basepath, viewer, dataType }) { 12 | // Can we handle the data 13 | if (!contains(dataType, 'geometry')) { 14 | return false; 15 | } 16 | 17 | const pipelineModel = new CompositePipelineModel( 18 | viewer.queryDataModel.originalData 19 | ); 20 | const geometryDataModel = new GeometryDataModel(basepath); 21 | const lutMgr = viewer.config.lookupTableManager; 22 | 23 | viewer.ui = 'GeometryViewer'; 24 | viewer.allowMagicLens = false; 25 | viewer.geometryBuilder = new GeometryBuilder( 26 | lutMgr, 27 | geometryDataModel, 28 | pipelineModel, 29 | viewer.queryDataModel 30 | ); 31 | viewer.menuAddOn = [ 32 | , 37 | 38 | 39 | , 40 | , 45 | ]; 46 | 47 | return true; 48 | } 49 | -------------------------------------------------------------------------------- /lib/types/Histogram2D.js: -------------------------------------------------------------------------------- 1 | import Histogram2DImageBuilder from 'paraviewweb/src/Rendering/Image/Histogram2DImageBuilder'; 2 | import contains from 'mout/src/array/contains'; 3 | 4 | export default function build({ viewer, dataType }) { 5 | // Can we handle the data 6 | if (!contains(dataType, 'histogram2D')) { 7 | return false; 8 | } 9 | 10 | viewer.imageBuilder = new Histogram2DImageBuilder(viewer.queryDataModel); 11 | viewer.imageBuilder.update(); 12 | 13 | return true; 14 | } 15 | -------------------------------------------------------------------------------- /lib/types/Histogram2DPlotly.js: -------------------------------------------------------------------------------- 1 | import contains from 'mout/src/array/contains'; 2 | import ChartBuilder from 'paraviewweb/src/Rendering/Chart/Histogram2DPlotlyChartBuilder'; 3 | 4 | export default function build({ basepath, viewer, dataType }) { 5 | // Can we handle the data 6 | if (!contains(dataType, 'histogram2D') || !contains(dataType, 'plotly')) { 7 | return false; 8 | } 9 | 10 | viewer.ui = 'ChartViewer'; 11 | viewer.allowMagicLens = false; 12 | viewer.chartBuilder = new ChartBuilder(viewer.queryDataModel); 13 | 14 | viewer.queryDataModel.fetchData(); 15 | 16 | return true; 17 | } 18 | -------------------------------------------------------------------------------- /lib/types/ImageQueryDataModel.js: -------------------------------------------------------------------------------- 1 | import QueryDataModelImageBuilder from 'paraviewweb/src/Rendering/Image/QueryDataModelImageBuilder'; 2 | import contains from 'mout/src/array/contains'; 3 | 4 | export default function build({ viewer, dataType }) { 5 | // Can we handle the data 6 | if (!contains(dataType, 'tonic-query-data-model') || dataType.length > 1) { 7 | return false; 8 | } 9 | 10 | viewer.imageBuilder = new QueryDataModelImageBuilder(viewer.queryDataModel); 11 | viewer.imageBuilder.update(); 12 | 13 | return true; 14 | } 15 | -------------------------------------------------------------------------------- /lib/types/SortedComposite.js: -------------------------------------------------------------------------------- 1 | import PipelineModel from 'paraviewweb/src/Common/State/PipelineState'; 2 | import MultiColorBySortedImageBuilder from 'paraviewweb/src/Rendering/Image/MultiColorBySortedCompositeImageBuilder'; 3 | import SortedCompositeImageBuilder from 'paraviewweb/src/Rendering/Image/SortedCompositeImageBuilder'; 4 | import contains from 'mout/src/array/contains'; 5 | 6 | export default function build({ viewer, dataType }) { 7 | // Can we handle the data 8 | if (!contains(dataType, 'sorted-composite')) { 9 | return false; 10 | } 11 | 12 | if (contains(dataType, 'multi-color-by')) { 13 | viewer.pipelineModel = new PipelineModel( 14 | viewer.queryDataModel.originalData, 15 | true 16 | ); 17 | viewer.imageBuilder = new MultiColorBySortedImageBuilder( 18 | viewer.queryDataModel, 19 | viewer.config.lookupTableManager, 20 | viewer.pipelineModel 21 | ); 22 | viewer.imageBuilder.update(); 23 | } else { 24 | viewer.imageBuilder = new SortedCompositeImageBuilder( 25 | viewer.queryDataModel, 26 | viewer.config.lookupTableManager 27 | ); 28 | viewer.imageBuilder.update(); 29 | } 30 | 31 | return true; 32 | } 33 | -------------------------------------------------------------------------------- /lib/types/VTKGeometry.js: -------------------------------------------------------------------------------- 1 | import CollapsibleElement from 'paraviewweb/src/React/Widgets/CollapsibleWidget'; 2 | import CompositeControl from 'paraviewweb/src/React/Widgets/CompositePipelineWidget'; 3 | import CompositePipelineModel from 'paraviewweb/src/Common/State/PipelineState'; 4 | import contains from 'mout/src/array/contains'; 5 | import GeometryBuilder from 'paraviewweb/src/Rendering/Geometry/VTKGeometryBuilder'; 6 | import GeometryDataModel from 'paraviewweb/src/IO/Core/VTKGeometryDataModel'; 7 | import LookupTableManagerWidget from 'paraviewweb/src/React/CollapsibleControls/LookupTableManagerControl'; 8 | import QueryDataModelWithExplorationWidget from 'paraviewweb/src/React/CollapsibleControls/QueryDataModelControl'; 9 | import React from 'react'; 10 | 11 | export default function build({ basepath, viewer, dataType }) { 12 | // Can we handle the data 13 | if (!contains(dataType, 'vtk-geometry')) { 14 | return false; 15 | } 16 | 17 | const pipelineModel = new CompositePipelineModel( 18 | viewer.queryDataModel.originalData 19 | ); 20 | const geometryDataModel = new GeometryDataModel(basepath); 21 | const lutMgr = viewer.config.lookupTableManager; 22 | 23 | geometryDataModel.setDataManager(viewer.queryDataModel.getDataManager()); 24 | 25 | viewer.ui = 'GeometryViewer'; 26 | viewer.allowMagicLens = false; 27 | viewer.geometryBuilder = new GeometryBuilder( 28 | lutMgr, 29 | geometryDataModel, 30 | pipelineModel, 31 | viewer.queryDataModel 32 | ); 33 | viewer.menuAddOn = [ 34 | , 39 | 40 | 41 | , 42 | , 47 | ]; 48 | 49 | return true; 50 | } 51 | -------------------------------------------------------------------------------- /lib/types/VTKVolume.js: -------------------------------------------------------------------------------- 1 | import contains from 'mout/src/array/contains'; 2 | import VolumeBuilder from 'paraviewweb/src/Rendering/Geometry/VTKVolumeBuilder'; 3 | import VTKImageDataModel from 'paraviewweb/src/IO/Core/VTKImageDataModel'; 4 | import LookupTableManagerWidget from 'paraviewweb/src/React/CollapsibleControls/LookupTableManagerControl'; 5 | import QueryDataModelWithExplorationWidget from 'paraviewweb/src/React/CollapsibleControls/QueryDataModelControl'; 6 | import PieceWiseFunctionEditorWidget from 'paraviewweb/src/React/Widgets/PieceWiseFunctionEditorWidget'; 7 | import CollapsibleWidget from 'paraviewweb/src/React/Widgets/CollapsibleWidget'; 8 | import { debounce } from 'paraviewweb/src/Common/Misc/Debounce'; 9 | import React from 'react'; 10 | import PropTypes from 'prop-types'; 11 | 12 | export class PiecewiseWidget extends React.Component { 13 | constructor(props) { 14 | super(props); 15 | 16 | this.state = { 17 | points: [{ x: 0, y: 0 }, { x: 1, y: 1 }], 18 | }; 19 | 20 | // Bind methods 21 | this.onChange = this.onChange.bind(this); 22 | this.updateVolumeBuilder = debounce( 23 | this.updateVolumeBuilder.bind(this), 24 | 250 25 | ); 26 | 27 | // Handle initialization 28 | const volumeBuilder = this.props.volumeBuilder; 29 | this.subscription = volumeBuilder.onImageReady(() => { 30 | const metadata = volumeBuilder.queryDataModel.originalData.metadata; 31 | if (metadata && metadata.piecewise) { 32 | const points = []; 33 | const dataRange = volumeBuilder.getDataRange() || [0, 255]; 34 | const scale = dataRange[1] - dataRange[0]; 35 | metadata.piecewise.forEach((node) => { 36 | points.push({ 37 | x: (node[0] - dataRange[0]) / scale, 38 | y: node[1], 39 | }); 40 | }); 41 | this.setState({ points }, this.updateVolumeBuilder); 42 | } 43 | }); 44 | } 45 | 46 | componentWillUnmount() { 47 | this.subscription.unsubscribe(); 48 | } 49 | 50 | onChange(points) { 51 | this.setState({ points }, this.updateVolumeBuilder); 52 | } 53 | 54 | updateVolumeBuilder() { 55 | const points = this.state.points; 56 | const volumeBuilder = this.props.volumeBuilder; 57 | if (volumeBuilder && volumeBuilder.getDataRange()) { 58 | const dataRange = volumeBuilder.getDataRange() || [0, 255]; 59 | const pw = volumeBuilder.getPiecewiseFunction(); 60 | const scale = dataRange[1] - dataRange[0]; 61 | pw.removeAllPoints(); 62 | points.forEach(({ x, y }) => { 63 | pw.addPoint(dataRange[0] + x * scale, y); 64 | }); 65 | volumeBuilder.render(); 66 | } 67 | } 68 | 69 | render() { 70 | const dataRange = this.props.volumeBuilder.getDataRange() || [0, 255]; 71 | return ( 72 | { 74 | this.pwf = c; 75 | }} 76 | points={this.state.points} 77 | rangeMin={dataRange[0]} 78 | rangeMax={dataRange[1]} 79 | onChange={this.onChange} 80 | height={150} 81 | width={262} 82 | visible 83 | /> 84 | ); 85 | } 86 | } 87 | 88 | PiecewiseWidget.propTypes = { 89 | volumeBuilder: PropTypes.object.isRequired, 90 | }; 91 | 92 | export default function build({ basepath, viewer, dataType }) { 93 | // Can we handle the data 94 | if (!contains(dataType, 'vtk-volume')) { 95 | return false; 96 | } 97 | 98 | const imageyDataModel = new VTKImageDataModel(basepath); 99 | imageyDataModel.setDataManager(viewer.queryDataModel.getDataManager()); 100 | 101 | const lutMgr = viewer.config.lookupTableManager; 102 | const volumeBuilder = new VolumeBuilder( 103 | lutMgr, 104 | imageyDataModel, 105 | viewer.queryDataModel 106 | ); 107 | 108 | volumeBuilder.onImageReady(() => { 109 | const dataRange = volumeBuilder.getDataRange(); 110 | volumeBuilder.getLookupTable().setScalarRange(...dataRange); 111 | volumeBuilder.updateColoring(); 112 | }); 113 | 114 | imageyDataModel.setDataManager(viewer.queryDataModel.getDataManager()); 115 | imageyDataModel.setFetchGzip( 116 | !!viewer.queryDataModel.originalData.metadata.fetchGzip 117 | ); 118 | 119 | viewer.ui = 'GeometryViewer'; 120 | viewer.allowMagicLens = false; 121 | viewer.geometryBuilder = volumeBuilder; 122 | viewer.menuAddOn = [ 123 | , 128 | 129 | 133 | , 134 | , 139 | ]; 140 | 141 | return true; 142 | } 143 | -------------------------------------------------------------------------------- /lib/types/VTKVolumeSLIC.js: -------------------------------------------------------------------------------- 1 | import contains from 'mout/src/array/contains'; 2 | import VolumeBuilder from 'paraviewweb/src/Rendering/Geometry/VTKVolumeBuilder'; 3 | import VTKSLICDataModel from 'paraviewweb/src/IO/Core/VTKSLICDataModel'; 4 | import LookupTableManagerWidget from 'paraviewweb/src/React/CollapsibleControls/LookupTableManagerControl'; 5 | import QueryDataModelWithExplorationWidget from 'paraviewweb/src/React/CollapsibleControls/QueryDataModelControl'; 6 | import CollapsibleWidget from 'paraviewweb/src/React/Widgets/CollapsibleWidget'; 7 | import React from 'react'; 8 | import { PiecewiseWidget } from './VTKVolume'; 9 | 10 | export default function build({ basepath, viewer, dataType }) { 11 | // Can we handle the data 12 | if (!contains(dataType, 'vtk-slic-volume')) { 13 | return false; 14 | } 15 | 16 | const imageDataModel = new VTKSLICDataModel(); 17 | 18 | const lutMgr = viewer.config.lookupTableManager; 19 | const volumeBuilder = new VolumeBuilder( 20 | lutMgr, 21 | imageDataModel, 22 | viewer.queryDataModel 23 | ); 24 | 25 | volumeBuilder.onImageReady(() => { 26 | const dataRange = volumeBuilder.getDataRange(); 27 | volumeBuilder.getLookupTable().setScalarRange(...dataRange); 28 | volumeBuilder.updateColoring(); 29 | }); 30 | 31 | viewer.ui = 'GeometryViewer'; 32 | viewer.allowMagicLens = false; 33 | viewer.geometryBuilder = volumeBuilder; 34 | viewer.menuAddOn = [ 35 | , 40 | 41 | 45 | , 46 | , 51 | ]; 52 | 53 | return true; 54 | } 55 | -------------------------------------------------------------------------------- /lib/types/index.js: -------------------------------------------------------------------------------- 1 | import QueryDataModel from 'paraviewweb/src/IO/Core/QueryDataModel'; 2 | import LookupTableManager from 'paraviewweb/src/Common/Core/LookupTableManager'; 3 | import contains from 'mout/src/array/contains'; 4 | 5 | import 'paraviewweb/src/React/CollapsibleControls/CollapsibleControlFactory/QueryDataModelWidget'; 6 | 7 | import ViewerA from './Chart'; 8 | import ViewerB from './CompositeImageQueryDataModel'; 9 | import ViewerC from './CompositePipeline'; 10 | import ViewerD from './DataProber'; 11 | import ViewerE from './DepthComposite'; 12 | import ViewerF from './FloatImage'; 13 | import ViewerG from './Geometry'; 14 | import ViewerH from './ImageQueryDataModel'; 15 | import ViewerI from './SortedComposite'; 16 | import ViewerJ from './TimeFloatImage'; 17 | import ViewerK from './Histogram2D'; 18 | import ViewerL from './Histogram2DPlotly'; 19 | import ViewerM from './VTKGeometry'; 20 | import ViewerN from './VTKVolume'; 21 | import ViewerO from './VTKVolumeSLIC'; 22 | import ViewerP from './VTKVolumeSLICDebug'; 23 | import ViewerQ from './CDF'; 24 | 25 | const dataViewers = [ 26 | ViewerA, 27 | ViewerB, 28 | ViewerC, 29 | ViewerD, 30 | ViewerE, 31 | ViewerF, 32 | ViewerG, 33 | ViewerH, 34 | ViewerI, 35 | ViewerJ, 36 | ViewerK, 37 | ViewerL, 38 | ViewerM, 39 | ViewerN, 40 | ViewerO, 41 | ViewerP, 42 | ViewerQ, 43 | ]; 44 | 45 | const lookupTableManager = new LookupTableManager(); 46 | 47 | export default function build(basepath, data, config, callback) { 48 | let foundViewer = false; 49 | let viewerCount = dataViewers.length; 50 | 51 | const dataType = data.type; 52 | const viewer = { 53 | ui: 'GenericViewer', 54 | config, 55 | allowMagicLens: true, 56 | }; 57 | 58 | // Initializer shared variables 59 | config.lookupTableManager = lookupTableManager; 60 | 61 | // Update background if available 62 | if (data && data.metadata && data.metadata.backgroundColor) { 63 | viewer.bgColor = data.metadata.backgroundColor; 64 | } 65 | 66 | // Update QueryDataModel if needed 67 | if (contains(dataType, 'tonic-query-data-model')) { 68 | viewer.queryDataModel = 69 | config.queryDataModel || new QueryDataModel(data, basepath); 70 | } 71 | 72 | // Find the right viewer and build it 73 | const args = { basepath, data, callback, viewer, dataType }; 74 | while (viewerCount && !foundViewer) { 75 | viewerCount -= 1; 76 | foundViewer = dataViewers[viewerCount](args); 77 | } 78 | 79 | setImmediate(() => callback(viewer)); 80 | return foundViewer; 81 | } 82 | -------------------------------------------------------------------------------- /lib/viewer/DataSetView.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import PropTypes from 'prop-types'; 3 | 4 | export default class DataSetViewer extends React.Component { 5 | constructor(props) { 6 | super(props); 7 | 8 | this.openDataSet = this.openDataSet.bind(this); 9 | } 10 | 11 | openDataSet() { 12 | ArcticViewer.load( 13 | `http://${window.location.host}${this.props.base}${this.props.item.path}`, 14 | document.querySelector('.react-content') 15 | ); // eslint-disable-line 16 | } 17 | 18 | render() { 19 | return ( 20 |
21 |
29 | 30 |
31 |
32 | {this.props.item.name} 33 | {this.props.item.size} 34 |
35 |
36 | {this.props.item.description} 37 |
38 |
39 | ); 40 | } 41 | } 42 | 43 | DataSetViewer.propTypes = { 44 | base: PropTypes.string, 45 | item: PropTypes.object, 46 | }; 47 | 48 | DataSetViewer.defaultProps = { 49 | base: '/data/', 50 | item: 'index.json', 51 | }; 52 | -------------------------------------------------------------------------------- /lib/viewer/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Kitware/arctic-viewer/1813d5b702a729bce2defaabf165289ef04566ee/lib/viewer/icon.png -------------------------------------------------------------------------------- /lib/viewer/index.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import PropTypes from 'prop-types'; 3 | 4 | import DataSetView from './DataSetView'; 5 | import icon from './icon.png'; 6 | 7 | // Load CSS 8 | import './style.css'; 9 | 10 | export default function ArcticViewer(props) { 11 | const base = props.basePath; 12 | return ( 13 |
14 |
15 | ArcticViewer 16 | {props.title} 17 |
18 |
19 | {props.list.map((item, i) => ( 20 | 21 | ))} 22 |
23 |
24 | ); 25 | } 26 | 27 | ArcticViewer.propTypes = { 28 | basePath: PropTypes.string, 29 | icon: PropTypes.string, 30 | list: PropTypes.array, 31 | title: PropTypes.string, 32 | }; 33 | 34 | ArcticViewer.defaultProps = { 35 | title: 'Arctic Viewer', 36 | icon, 37 | list: [], 38 | basePath: '/', 39 | }; 40 | -------------------------------------------------------------------------------- /lib/viewer/style.css: -------------------------------------------------------------------------------- 1 | .ArcticViewer { 2 | 3 | } 4 | /* outline: 60606F | text: 003B4E | lightBlue: 72C2EC */ 5 | .ArcticViewer__bar { 6 | position: relative; 7 | width: 100%; 8 | background-color: #003B4E; 9 | color: #78CDF3; 10 | padding-bottom: 5px; 11 | border-bottom: 1px solid #003B4E; 12 | } 13 | 14 | .ArcticViewer__bar > img { 15 | height: 30px; 16 | position: relative; 17 | top: 5px; 18 | margin-left: 10px; 19 | } 20 | 21 | .ArcticViewer__bar > strong { 22 | font-size: 25px; 23 | } 24 | 25 | .ArcticViewer__content { 26 | position: relative; 27 | width: 100vw; 28 | height: calc(100vh - 30px); 29 | } 30 | 31 | .DataSetView { 32 | padding: 20px; 33 | display: block; 34 | clear: both; 35 | } 36 | 37 | .DataSetView__thumbnail { 38 | float: left; 39 | position: relative; 40 | width: 250px; 41 | height: 250px; 42 | border: 2px solid #888888; 43 | border-radius: 5px; 44 | background-position: 50% 50%; 45 | background-size: 250px; 46 | background-repeat: no-repeat; 47 | margin-right: 25px; 48 | box-shadow: 5px 5px 5px #888888; 49 | cursor: pointer; 50 | text-align: center; 51 | } 52 | .DataSetView__thumbnail > i { 53 | font-size: 250px; 54 | } 55 | 56 | .DataSetView__titleBar { 57 | font-size: 18px; 58 | } 59 | 60 | .DataSetView__titleBar > strong { 61 | font-size: 25px; 62 | } 63 | 64 | .DataSetView__description { 65 | padding: 25px 25px 0 0; 66 | height: 200px; 67 | overflow: auto; 68 | } 69 | 70 | .DataSetView__type { 71 | float: right; 72 | } 73 | 74 | .DataSetView__size { 75 | float: right; 76 | } 77 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "arctic-viewer", 3 | "description": "Standalone application useful to load and explore in-situ/batch data products.", 4 | "license": "BSD-3-Clause", 5 | "version": "0.0.0-semantically-release", 6 | "main": "./lib/arctic-viewer.js", 7 | "repository": { 8 | "type": "git", 9 | "url": "https://github.com/Kitware/arctic-viewer.git" 10 | }, 11 | "dependencies": { 12 | "body-parser": "1.15.2", 13 | "commander": "2.13.0", 14 | "connect-gzip-static": "1.0.0", 15 | "du": "0.1.0", 16 | "express": "4.14.0", 17 | "http-proxy": "1.15.1", 18 | "open": "0.0.5", 19 | "progress": "1.1.8", 20 | "shelljs": "0.7.8", 21 | "tarball-extract": "0.0.6" 22 | }, 23 | "devDependencies": { 24 | "normalize.css": "7.0.0", 25 | "font-awesome": "4.6.3", 26 | "hammerjs": "2.0.8", 27 | "ify-loader": "^1.1.0", 28 | "kw-doc": "1.1.1", 29 | "kw-web-suite": "6.1.0", 30 | "monologue.js": "0.3.5", 31 | "mout": "1.1.0", 32 | "paraviewweb": "3.1.7", 33 | "plotly.js": "1.33.1", 34 | "react": "16.2.0", 35 | "react-dom": "16.2.0", 36 | "vtk.js": "6.4.24" 37 | }, 38 | "scripts": { 39 | "validate": "prettier --config ./prettier.config.js --list-different \"lib/**/*.js\"", 40 | "reformat": "prettier --config ./prettier.config.js --write \"lib/**/*.js\"", 41 | "reformat-only": "prettier --single-quote --trailing-comma es5 --print-width 80 --arrow-parens always --write", 42 | "update:plotly": "cp ./node_modules/plotly.js/dist/plotly.min.js ./dist", 43 | "build": "webpack --progress --colors --mode development", 44 | "build:debug": "webpack --display-modules --mode development", 45 | "build:release": "webpack --progress --colors --mode production", 46 | "build:bundle": "./bin/bundle.js", 47 | "doc": "kw-doc -c ./docs/config.js", 48 | "doc:www": "kw-doc -c ./docs/config.js -s", 49 | "doc:publish": "kw-doc -c ./docs/config.js -p", 50 | "commit": "git cz", 51 | "semantic-release": "semantic-release" 52 | }, 53 | "bin": { 54 | "ArcticViewer": "./bin/arctic-viewer-cli.js", 55 | "ArcticViewer2Cinema": "./bin/query-data-model-2-cinema-cli.js", 56 | "Cinema2ArcticViewer": "./bin/cinema-2-query-data-model-cli.js" 57 | }, 58 | "preferGlobal": true, 59 | "config": { 60 | "commitizen": { 61 | "path": "node_modules/cz-conventional-changelog" 62 | } 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /prettier.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | printWidth: 80, 3 | singleQuote: true, 4 | trailingComma: 'es5', 5 | arrowParens: 'always', 6 | }; 7 | -------------------------------------------------------------------------------- /scripts/examples/paraview/mpas/earth-contours-sorted-composite.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | outputDir = '/Users/seb/Desktop/mpas-contours-sorted-composite' 6 | inputFile = '/Volumes/Kitware/Data/DataExploration/Data/MPAS/data/xyz_n_primal/X_Y_Z_NLAYER-primal_%d_0.vtu' 7 | earthCore = '/Volumes/Kitware/Data/vtk/mpas/earth/earth-ok.vtk' 8 | 9 | # outputDir = '/Volumes/KSW-Data/tonic/mpas-contours-sorted-composite/' 10 | # inputFile = '/Volumes/KSW-Data/Data/DataExploration/Data/MPAS/data/xyz_n_primal/X_Y_Z_NLAYER-primal_%d_0.vtu' 11 | # earthCore = '/Volumes/KSW-Data/Data/vtk/mpas/earth/earth-ok.vtk' 12 | 13 | phi = range(0, 360, 30) 14 | theta = range(-60, 61, 30) 15 | time = range(50, 5151, 50) 16 | 17 | dataRanges = { 18 | 'bottomDepth': [-9753, 5984], 19 | 'salinity': [24.8574, 37.4595], 20 | 'temperature': [-1.64296, 28.6918] 21 | } 22 | 23 | sections = { 24 | 'LookupTables': { 25 | "bottomDepth": { "preset": "earth"}, 26 | "temperature": { "preset": "ocean", "range": [5, 30]}, 27 | "salinity" : { "preset": "yellow2brown", "range": [34, 38]} 28 | } 29 | } 30 | 31 | # ----------------------------------------------------------------------------- 32 | 33 | from paraview import simple 34 | from paraview.web.dataset_builder import * 35 | 36 | # ----------------------------------------------------------------------------- 37 | # Pipeline creation 38 | # ----------------------------------------------------------------------------- 39 | 40 | core = simple.OpenDataFile(earthCore) 41 | coreSurface = simple.ExtractSurface(Input=core) 42 | coreWithNormals = simple.GenerateSurfaceNormals(Input=coreSurface) 43 | 44 | reader = simple.OpenDataFile(inputFile % time[0]) 45 | reader.CellArrayStatus = ['temperature', 'salinity'] 46 | 47 | dataCleanUp = simple.Threshold(Input = reader, Scalars = ['CELLS', 'temperature'], ThresholdRange = [-1000.0, 50.0]) 48 | dataToPoints = simple.CellDatatoPointData(Input = dataCleanUp) 49 | 50 | sceneDescription = { 51 | 'size': [500, 500], 52 | 'light': [ 'intensity', 'normal' ], 53 | 'camera': { 54 | 'CameraViewUp': [0.0, 0.0, 1.0], 55 | 'CameraPosition': [107823.5, -28000000, -44044.25], 56 | 'CameraFocalPoint': [107823.5, -7766.0, -44044.25] 57 | }, 58 | 'scene': [ 59 | { 60 | 'name': 'Earth', 61 | 'source': coreWithNormals, 62 | 'colors': { 63 | 'bottomDepth': {'location': 'POINT_DATA', 'range': dataRanges['bottomDepth'] } 64 | } 65 | },{ 66 | 'parent': 'Temperatures', 67 | 'name': '5C', 68 | 'source': simple.Contour( 69 | Input = dataToPoints, 70 | PointMergeMethod = "Uniform Binning", 71 | ContourBy = 'temperature', 72 | Isosurfaces = [5.0]), 73 | 'colors': { 74 | 'temperature': {'constant': 5.0 }, 75 | 'salinity': {'location': 'POINT_DATA', 'range': dataRanges['salinity'] } 76 | } 77 | },{ 78 | 'parent': 'Temperatures', 79 | 'name': '10C', 80 | 'source': simple.Contour( 81 | Input = dataToPoints, 82 | PointMergeMethod = "Uniform Binning", 83 | ContourBy = 'temperature', 84 | Isosurfaces = [10.0]), 85 | 'colors': { 86 | 'temperature': {'constant': 10.0 }, 87 | 'salinity': {'location': 'POINT_DATA', 'range': dataRanges['salinity'] } 88 | } 89 | },{ 90 | 'parent': 'Temperatures', 91 | 'name': '15C', 92 | 'source': simple.Contour( 93 | Input = dataToPoints, 94 | PointMergeMethod = "Uniform Binning", 95 | ContourBy = 'temperature', 96 | Isosurfaces = [15.0]), 97 | 'colors': { 98 | 'temperature': {'constant': 15.0 }, 99 | 'salinity': {'location': 'POINT_DATA', 'range': dataRanges['salinity'] } 100 | } 101 | },{ 102 | 'parent': 'Temperatures', 103 | 'name': '20C', 104 | 'source': simple.Contour( 105 | Input = dataToPoints, 106 | PointMergeMethod = "Uniform Binning", 107 | ContourBy = 'temperature', 108 | Isosurfaces = [20.0]), 109 | 'colors': { 110 | 'temperature': {'constant': 20.0 }, 111 | 'salinity': {'location': 'POINT_DATA', 'range': dataRanges['salinity'] } 112 | } 113 | },{ 114 | 'parent': 'Temperatures', 115 | 'name': '25C', 116 | 'source': simple.Contour( 117 | Input = dataToPoints, 118 | PointMergeMethod = "Uniform Binning", 119 | ContourBy = 'temperature', 120 | Isosurfaces = [25.0]), 121 | 'colors': { 122 | 'temperature': {'constant': 25.0 }, 123 | 'salinity': {'location': 'POINT_DATA', 'range': dataRanges['salinity'] } 124 | } 125 | } 126 | ] 127 | } 128 | 129 | # ----------------------------------------------------------------------------- 130 | # Data Generation 131 | # ----------------------------------------------------------------------------- 132 | 133 | # Create Image Builder 134 | dsb = CompositeDataSetBuilder(outputDir, sceneDescription, {'type': 'spherical', 'phi': phi, 'theta': theta}, sections=sections) 135 | 136 | # Add time information 137 | dsb.getDataHandler().registerArgument(priority=1, name='time', values=time, ui='slider', loop='modulo') 138 | 139 | dsb.start() 140 | for t in dsb.getDataHandler().time: 141 | reader.FileName = inputFile % t 142 | dsb.writeData() 143 | dsb.stop() 144 | 145 | -------------------------------------------------------------------------------- /scripts/examples/paraview/mpas/raw-probe-flat-earth.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | dataset_destination_path = '/Users/seb/Desktop/mpas_flat_earth_prober' 6 | source_filename = '/Volumes/Kitware/Data/DataExploration/Data/MPAS/data/flat_n_primal/LON_LAT_NLAYER-primal_%d_0.vtu' 7 | # dataset_destination_path = '/Volumes/KSW-Data/mpas_flat_earth_prober-raw' 8 | # source_filename = '/Volumes/KSW-Data/Data/DataExploration/Data/MPAS/data/flat_n_primal/LON_LAT_NLAYER-primal_%d_0.vtu' 9 | 10 | all_time_serie = range(50, 5151, 50) 11 | quick_time_serie = range(100, 5151, 200) 12 | single_time_serie = [ 50 ] 13 | 14 | time_serie = quick_time_serie 15 | 16 | sampling_arrays = ['temperature', 'salinity'] 17 | sampling_size = [ 500, 250, 30 ] 18 | sampling_bounds = [ -3.2, 3.2, 19 | -1.3, 1.5, 20 | -3.0, 0.0 ] 21 | 22 | # ----------------------------------------------------------------------------- 23 | 24 | from paraview import simple 25 | from paraview.web.dataset_builder import * 26 | 27 | # ----------------------------------------------------------------------------- 28 | # Pipeline creation 29 | # ----------------------------------------------------------------------------- 30 | 31 | reader = simple.XMLUnstructuredGridReader(FileName = source_filename % time_serie[0], CellArrayStatus = sampling_arrays) 32 | dataCleanUp = simple.Threshold(Input = reader, Scalars = ['CELLS', 'temperature'], ThresholdRange = [-1000.0, 50.0]) 33 | 34 | # ----------------------------------------------------------------------------- 35 | # Data Generation 36 | # ----------------------------------------------------------------------------- 37 | 38 | dpdsb = DataProberDataSetBuilder(dataCleanUp, dataset_destination_path, sampling_size, sampling_arrays, sampling_bounds) 39 | 40 | # Add time information 41 | dpdsb.getDataHandler().registerArgument(priority=1, name='time', values=time_serie, ui='slider', loop='modulo') 42 | 43 | # Explore dataset 44 | dpdsb.start() 45 | for time in dpdsb.getDataHandler().time: 46 | reader.FileName = source_filename % time 47 | dpdsb.writeData() 48 | dpdsb.stop() 49 | -------------------------------------------------------------------------------- /scripts/examples/paraview/samples/FloatImage-diskout.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | outputDir = '/Users/seb/Desktop/FloatImage-diskout/' 6 | inputFile = '/Users/seb/Downloads/ParaViewData-3.10.1/Data/disk_out_ref.ex2' 7 | 8 | # ----------------------------------------------------------------------------- 9 | 10 | from paraview import simple 11 | from paraview.web.dataset_builder import * 12 | 13 | # ----------------------------------------------------------------------------- 14 | # Pipeline creation 15 | # ----------------------------------------------------------------------------- 16 | 17 | variables = ['Temp', 'V', 'Pres', 'AsH3', 'GaMe3', 'CH4', 'H2'] 18 | 19 | reader = simple.OpenDataFile(inputFile) 20 | reader.PointVariables = variables 21 | 22 | clip = simple.Clip(Input=reader, Crinkleclip=1) 23 | clip.ClipType.Normal = [0.0, 1.0, 0.0] 24 | 25 | contourA = simple.Contour( Input = reader, 26 | PointMergeMethod = "Uniform Binning", 27 | ContourBy = 'AsH3', 28 | Isosurfaces = [0.1], 29 | ComputeScalars = 1) 30 | 31 | contourB = simple.Contour( Input = reader, 32 | PointMergeMethod = "Uniform Binning", 33 | ContourBy = 'AsH3', 34 | Isosurfaces = [0.14], 35 | ComputeScalars = 1) 36 | 37 | # ----------------------------------------------------------------------------- 38 | # Data To Export 39 | # ----------------------------------------------------------------------------- 40 | 41 | layerFields = { 42 | 'clip': variables, 43 | 'contour_0.1': variables, 44 | 'contour_0.14': variables 45 | } 46 | 47 | layerMesh = { 48 | 'clip': True, 49 | 'contour_0.1': True, 50 | 'contour_0.14': True, 51 | } 52 | 53 | layerSource = { 54 | 'clip': clip, 55 | 'contour_0.1': contourA, 56 | 'contour_0.14': contourB 57 | } 58 | 59 | layerList = ['clip', 'contour_0.1', 'contour_0.14'] 60 | 61 | # ----------------------------------------------------------------------------- 62 | # Data Generation 63 | # ----------------------------------------------------------------------------- 64 | db = LayerDataSetBuilder(reader, outputDir, {'type': 'spherical', 'phi': range(-10, 11, 10), 'theta': range(-10, 11, 10)}, [400,630]) 65 | 66 | # Setup view with camera position 67 | view = db.getView() 68 | view.CenterOfRotation = [0.0, 2.875, 0.08] 69 | view.CameraViewUp = [0.0, 0.0, 1.0] 70 | view.CameraFocalPoint = [0.0, 2.875, 0.08] 71 | view.CameraPosition = [0.0, -43.317, 0.08] 72 | 73 | db.start() 74 | 75 | for layerName in layerList: 76 | # Capture each field of each layer 77 | for field in layerFields[layerName]: 78 | db.setActiveLayer(layerName, field, layerMesh[layerName], layerSource[layerName]) 79 | db.writeLayerData() 80 | 81 | db.stop() 82 | -------------------------------------------------------------------------------- /scripts/examples/paraview/samples/Geometry-can.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | outputDir = '/Users/seb/Desktop/Geometry-can/' 6 | inputFile = '/Users/seb/Downloads/ParaViewData-3.10.1/Data/can.ex2' 7 | 8 | # ----------------------------------------------------------------------------- 9 | 10 | from paraview import simple 11 | from paraview.web.dataset_builder import * 12 | 13 | # ----------------------------------------------------------------------------- 14 | # Pipeline creation 15 | # ----------------------------------------------------------------------------- 16 | 17 | can = simple.OpenDataFile(inputFile) 18 | can.ElementVariables = ['EQPS'] 19 | can.PointVariables = ['DISPL', 'VEL', 'ACCL'] 20 | can.GlobalVariables = ['KE', 'XMOM', 'YMOM', 'ZMOM', 'NSTEPS', 'TMSTEP'] 21 | can.ElementBlocks = ['Unnamed block ID: 1 Type: HEX', 'Unnamed block ID: 2 Type: HEX'] 22 | 23 | anim = simple.GetAnimationScene() 24 | anim.UpdateAnimationUsingDataTimeSteps() 25 | 26 | timeValues = anim.TimeKeeper.TimestepValues 27 | 28 | sceneDescription = { 29 | 'scene': [ 30 | { 31 | 'name': 'Can', 32 | 'source': can, 33 | 'colors': { 34 | 'DISPL': {'location': 'POINT_DATA' }, 35 | 'VEL': {'location': 'POINT_DATA' }, 36 | 'ACCL': {'location': 'POINT_DATA' } 37 | } 38 | } 39 | ] 40 | } 41 | 42 | # ----------------------------------------------------------------------------- 43 | # Data Generation 44 | # ----------------------------------------------------------------------------- 45 | 46 | # Create Image Builder 47 | dsb = GeometryDataSetBuilder(outputDir, sceneDescription) 48 | 49 | # Add time information 50 | dsb.getDataHandler().registerArgument(priority=1, name='time', values=timeValues, ui='slider', loop='modulo') 51 | 52 | dsb.start() 53 | for time in dsb.getDataHandler().time: 54 | anim.TimeKeeper.Time = time 55 | dsb.writeData(time) 56 | dsb.stop() 57 | -------------------------------------------------------------------------------- /scripts/examples/paraview/samples/Geometry-diskout.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | outputDir = '/Users/seb/Desktop/Geometry-diskout/' 6 | inputFile = '/Users/seb/Downloads/ParaViewData-3.10.1/Data/disk_out_ref.ex2' 7 | 8 | # ----------------------------------------------------------------------------- 9 | 10 | from paraview import simple 11 | from paraview.web.dataset_builder import * 12 | 13 | # ----------------------------------------------------------------------------- 14 | # Pipeline creation 15 | # ----------------------------------------------------------------------------- 16 | 17 | reader = simple.OpenDataFile(inputFile) 18 | reader.PointVariables = ['Temp', 'V', 'Pres', 'AsH3', 'GaMe3', 'CH4', 'H2'] 19 | 20 | clip = simple.Clip(Input=reader) 21 | clip.ClipType.Normal = [0.0, 1.0, 0.0] 22 | 23 | streamLines = simple.StreamTracer( 24 | Input = reader, 25 | SeedType="High Resolution Line Source", 26 | Vectors = ['POINTS', 'V'], 27 | MaximumStreamlineLength = 20.16) 28 | streamLines.SeedType.Point2 = [5.75, 5.75, 10.15999984741211] 29 | streamLines.SeedType.Point1 = [-5.75, -5.75, -10.0] 30 | streamTubes = simple.Tube(Input=streamLines, Radius = 0.2) 31 | 32 | sections = { 33 | "LookupTables": { 34 | "AsH3": { 35 | "range": [ 36 | 0.0804768, 37 | 0.184839 38 | ], 39 | "preset": "wildflower" 40 | }, 41 | "Pres": { 42 | "range": [ 43 | 0.00678552, 44 | 0.0288185 45 | ], 46 | "preset": "cool" 47 | }, 48 | "Temp": { 49 | "range": [ 50 | 293.15, 51 | 913.15 52 | ], 53 | "preset": "spectralflip" 54 | } 55 | } 56 | } 57 | 58 | sceneDescription = { 59 | 'scene': [ 60 | { 61 | 'name': 'Stream lines', 62 | 'source': streamTubes, 63 | 'colors': { 64 | 'Pres': {'location': 'POINT_DATA' }, 65 | 'Temp': {'location': 'POINT_DATA' } 66 | } 67 | },{ 68 | 'name': 'Clip', 69 | 'source': clip, 70 | 'colors': { 71 | 'Pres': {'location': 'POINT_DATA' }, 72 | 'Temp': {'location': 'POINT_DATA' } 73 | } 74 | },{ 75 | 'parent': 'Contours', 76 | 'name': 'AsH3 0.1', 77 | 'source': simple.Contour( 78 | Input = reader, 79 | PointMergeMethod = "Uniform Binning", 80 | ContourBy = 'AsH3', 81 | Isosurfaces = [0.1], 82 | ComputeScalars = 1), 83 | 'colors': { 84 | 'AsH3': {'constant': 0.1 }, 85 | 'Pres': {'location': 'POINT_DATA' }, 86 | 'Temp': {'location': 'POINT_DATA' } 87 | } 88 | },{ 89 | 'parent': 'Contours', 90 | 'name': 'AsH3 0.14', 91 | 'source': simple.Contour( 92 | Input = reader, 93 | PointMergeMethod = "Uniform Binning", 94 | ContourBy = 'AsH3', 95 | Isosurfaces = [0.14], 96 | ComputeScalars = 1), 97 | 'colors': { 98 | 'AsH3': {'constant': 0.14 }, 99 | 'Pres': {'location': 'POINT_DATA' }, 100 | 'Temp': {'location': 'POINT_DATA' } 101 | } 102 | },{ 103 | 'parent': 'Contours', 104 | 'name': 'AsH3 0.18', 105 | 'source': simple.Contour( 106 | Input = reader, 107 | PointMergeMethod = "Uniform Binning", 108 | ContourBy = 'AsH3', 109 | Isosurfaces = [0.18], 110 | ComputeScalars = 1), 111 | 'colors': { 112 | 'AsH3': {'constant': 0.18 }, 113 | 'Pres': {'location': 'POINT_DATA' }, 114 | 'Temp': {'location': 'POINT_DATA' } 115 | } 116 | } 117 | ] 118 | } 119 | 120 | # ----------------------------------------------------------------------------- 121 | # Data Generation 122 | # ----------------------------------------------------------------------------- 123 | 124 | # Create Image Builder 125 | dsb = GeometryDataSetBuilder(outputDir, sceneDescription, sections=sections) 126 | 127 | dsb.start() 128 | dsb.writeData() 129 | dsb.stop() 130 | 131 | -------------------------------------------------------------------------------- /scripts/examples/paraview/samples/Geometry-multicontour-diskout.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | outputDir = '/Users/seb/Desktop/Geometry-diskout-multi-contour/' 6 | inputFile = '/Users/seb/Downloads/ParaViewData-3.10.1/Data/disk_out_ref.ex2' 7 | 8 | # ----------------------------------------------------------------------------- 9 | 10 | from paraview import simple 11 | from paraview.web.dataset_builder import * 12 | 13 | # ----------------------------------------------------------------------------- 14 | # Pipeline creation 15 | # ----------------------------------------------------------------------------- 16 | 17 | reader = simple.OpenDataFile(inputFile) 18 | reader.PointVariables = ['Temp', 'V', 'Pres', 'AsH3', 'GaMe3', 'CH4', 'H2'] 19 | 20 | clip = simple.Clip(Input=reader) 21 | clip.ClipType.Normal = [0.0, 1.0, 0.0] 22 | 23 | streamLines = simple.StreamTracer( 24 | Input = reader, 25 | SeedType="High Resolution Line Source", 26 | Vectors = ['POINTS', 'V'], 27 | MaximumStreamlineLength = 20.16) 28 | streamLines.SeedType.Point2 = [5.75, 5.75, 10.15999984741211] 29 | streamLines.SeedType.Point1 = [-5.75, -5.75, -10.0] 30 | streamTubes = simple.Tube(Input=streamLines, Radius = 0.2) 31 | 32 | contourFilter = simple.Contour( Input = reader, 33 | PointMergeMethod = "Uniform Binning", 34 | ContourBy = 'AsH3', 35 | Isosurfaces = [0.1], 36 | ComputeScalars = 1) 37 | 38 | contourValues = [ 0.09 + float(x)*0.01 for x in range(9) ] 39 | 40 | sections = { 41 | "LookupTables": { 42 | "AsH3": { 43 | "range": [ 44 | 0.0804768, 45 | 0.184839 46 | ], 47 | "preset": "wildflower" 48 | }, 49 | "Pres": { 50 | "range": [ 51 | 0.00678552, 52 | 0.0288185 53 | ], 54 | "preset": "cool" 55 | }, 56 | "Temp": { 57 | "range": [ 58 | 293.15, 59 | 913.15 60 | ], 61 | "preset": "spectralflip" 62 | } 63 | } 64 | } 65 | 66 | sceneDescription = { 67 | 'scene': [ 68 | { 69 | 'name': 'Stream lines', 70 | 'source': streamTubes, 71 | 'colors': { 72 | 'Pres': {'location': 'POINT_DATA' }, 73 | 'Temp': {'location': 'POINT_DATA' } 74 | } 75 | },{ 76 | 'name': 'Clip', 77 | 'source': clip, 78 | 'colors': { 79 | 'Pres': {'location': 'POINT_DATA' }, 80 | 'Temp': {'location': 'POINT_DATA' } 81 | } 82 | },{ 83 | 'name': 'Contour AsH3', 84 | 'source': contourFilter, 85 | 'colors': { 86 | 'AsH3': {'location': 'POINT_DATA' }, 87 | 'Pres': {'location': 'POINT_DATA' }, 88 | 'Temp': {'location': 'POINT_DATA' } 89 | } 90 | } 91 | ] 92 | } 93 | 94 | # ----------------------------------------------------------------------------- 95 | # Data Generation 96 | # ----------------------------------------------------------------------------- 97 | 98 | # Create Image Builder 99 | dsb = GeometryDataSetBuilder(outputDir, sceneDescription, sections=sections) 100 | 101 | 102 | dsb.getDataHandler().registerArgument(priority=1, name='contour', values=contourValues, ui='slider', loop='modulo') 103 | dsb.getDataHandler().registerArgument(priority=2, name='clip', values=range(5), ui='slider', loop='modulo') 104 | 105 | dsb.start() 106 | for v in dsb.getDataHandler().clip: 107 | for c in dsb.getDataHandler().contour: 108 | clip.ClipType.Origin = [0.0, float(2*v) - 5.0, 0.0] 109 | contourFilter.Isosurfaces = [ c ] 110 | dsb.writeData() 111 | dsb.stop() 112 | 113 | -------------------------------------------------------------------------------- /scripts/examples/paraview/samples/MPI-sphere.py: -------------------------------------------------------------------------------- 1 | from paraview import simple 2 | 3 | from paraview.web.dataset_builder import * 4 | 5 | dataset_destination_path = '/Users/seb/Desktop/mpi-sphere' 6 | 7 | sphere = simple.Sphere() 8 | 9 | rep = simple.Show() 10 | simple.ColorBy(rep, ('POINTS', 'vtkProcessId')) 11 | 12 | view = simple.Render() 13 | view.ResetCamera() 14 | 15 | rep.RescaleTransferFunctionToDataRange(True) 16 | 17 | phi = range(0,360,10) 18 | theta = range(-60, 61, 10) 19 | dh = ImageDataSetBuilder(dataset_destination_path, 'image/jpg', {'type': 'spherical', 'phi': phi, 'theta': theta}) 20 | 21 | dh.start(view) 22 | dh.writeImages() 23 | dh.stop() 24 | -------------------------------------------------------------------------------- /scripts/examples/paraview/samples/VTKGeometry-can.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | outputDir = '/Users/seb/Desktop/Geometry-can/' 6 | inputFile = '/Users/seb/Downloads/data/can.ex2' 7 | 8 | # ----------------------------------------------------------------------------- 9 | 10 | from paraview import simple 11 | from paraview.web.dataset_builder import * 12 | 13 | # ----------------------------------------------------------------------------- 14 | # Pipeline creation 15 | # ----------------------------------------------------------------------------- 16 | 17 | can = simple.OpenDataFile(inputFile) 18 | can.ElementVariables = ['EQPS'] 19 | can.PointVariables = ['DISPL', 'VEL', 'ACCL'] 20 | can.GlobalVariables = ['KE', 'XMOM', 'YMOM', 'ZMOM', 'NSTEPS', 'TMSTEP'] 21 | can.ElementBlocks = ['Unnamed block ID: 1 Type: HEX', 'Unnamed block ID: 2 Type: HEX'] 22 | 23 | anim = simple.GetAnimationScene() 24 | anim.UpdateAnimationUsingDataTimeSteps() 25 | 26 | timeValues = anim.TimeKeeper.TimestepValues 27 | 28 | sceneDescription = { 29 | 'scene': [ 30 | { 31 | 'name': 'Can', 32 | 'source': can, 33 | 'colors': { 34 | 'DISPL': {'location': 'POINT_DATA' }, 35 | 'VEL': {'location': 'POINT_DATA' }, 36 | 'ACCL': {'location': 'POINT_DATA' }, 37 | 'time': {'location': 'POINT_DATA', 'constant': 5 } 38 | } 39 | } 40 | ] 41 | } 42 | 43 | # ----------------------------------------------------------------------------- 44 | # Data Generation 45 | # ----------------------------------------------------------------------------- 46 | 47 | # Create Image Builder 48 | dsb = VTKGeometryDataSetBuilder(outputDir, sceneDescription) 49 | 50 | # Add time information 51 | dsb.getDataHandler().registerArgument(priority=1, name='time', values=timeValues, ui='slider', loop='modulo') 52 | 53 | dsb.start() 54 | for time in dsb.getDataHandler().time: 55 | anim.TimeKeeper.Time = time 56 | sceneDescription['scene'][0]['colors']['time']['constant'] = time; 57 | dsb.writeData(time) 58 | dsb.stop() 59 | -------------------------------------------------------------------------------- /scripts/examples/paraview/samples/camera-cylindrical.py: -------------------------------------------------------------------------------- 1 | from paraview.simple import * 2 | 3 | from vtk.web.query_data_model import * 4 | from paraview.web import camera as pv 5 | 6 | dataset_destination_path = '/tmp/cylinder' 7 | 8 | # Initial ParaView scene setup 9 | Cylinder(Resolution = 30, Height = 10.0, Center = (1,2,3)) 10 | rep = Show() 11 | view = Render() 12 | 13 | ResetCamera() 14 | view.CenterOfRotation = view.CameraFocalPoint 15 | 16 | ColorBy(rep, ('POINTS', 'Normals')) 17 | normalsLUT = GetColorTransferFunction('Normals') 18 | normalsLUT.VectorMode = 'Component' 19 | normalsLUT.VectorComponent = 0 20 | 21 | Render() 22 | 23 | # Choose data location 24 | dh = DataHandler(dataset_destination_path) 25 | camera = pv.create_cylindrical_camera(view, dh, range(0, 360, 30), range(-5, 5, 1)) 26 | 27 | # Create data 28 | dh.registerData(name='image', type='blob', mimeType='image/png', fileName='.png') 29 | 30 | # Loop over data 31 | for pos in camera: 32 | pv.update_camera(view, pos) 33 | WriteImage(dh.getDataAbsoluteFilePath('image')) 34 | 35 | # Write metadata 36 | dh.writeDataDescriptor() 37 | -------------------------------------------------------------------------------- /scripts/examples/paraview/samples/camera-cylindrical_v2.py: -------------------------------------------------------------------------------- 1 | from paraview.simple import * 2 | from paraview.web.dataset_builder import * 3 | 4 | dataset_destination_path = '/tmp/cylinder_v2' 5 | 6 | # Initial ParaView scene setup 7 | Cylinder(Resolution = 30, Height = 10.0, Center = (1,2,3)) 8 | rep = Show() 9 | view = Render() 10 | 11 | view.UseGradientBackground = 1 12 | view.Background = [0.6818646524757763, 0.7232318608377203, 0.9213092240787365] 13 | view.Background2 = [0.16470588235294117, 0.5490196078431373, 0.23529411764705882] 14 | 15 | ResetCamera() 16 | view.CenterOfRotation = view.CameraFocalPoint 17 | 18 | ColorBy(rep, ('POINTS', 'Normals')) 19 | normalsLUT = GetColorTransferFunction('Normals') 20 | normalsLUT.VectorMode = 'Component' 21 | normalsLUT.VectorComponent = 0 22 | 23 | Render() 24 | 25 | # Create Tonic Dataset 26 | dsb = ImageDataSetBuilder(dataset_destination_path, 'image/png', {'type': 'cylindrical', 'phi': range(0, 360, 30), 'translation': range(-5, 5, 1)}, {'author': 'Sebastien Jourdain'}) 27 | dsb.start(view) 28 | dsb.writeImages() 29 | dsb.stop() 30 | -------------------------------------------------------------------------------- /scripts/examples/paraview/samples/camera-spherical.py: -------------------------------------------------------------------------------- 1 | from paraview.simple import * 2 | 3 | from vtk.web.query_data_model import * 4 | from paraview.web import camera as pv 5 | 6 | dataset_destination_path = '/tmp/spherical' 7 | 8 | # Initial ParaView scene setup 9 | Cone() 10 | Show() 11 | view = Render() 12 | 13 | # Choose data location 14 | dh = DataHandler(dataset_destination_path) 15 | camera = pv.create_spherical_camera(view, dh, range(0, 360, 30), range(-60, 61, 30)) 16 | 17 | # Create data 18 | dh.registerData(name='image', type='blob', mimeType='image/png', fileName='.png') 19 | 20 | # Loop over data 21 | for pos in camera: 22 | pv.update_camera(view, pos) 23 | WriteImage(dh.getDataAbsoluteFilePath('image')) 24 | 25 | # Write metadata 26 | dh.writeDataDescriptor() 27 | -------------------------------------------------------------------------------- /scripts/examples/paraview/samples/composite-diskout.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | outputDir = '/Users/seb/Desktop/diskout-composite-with-normal/' 6 | inputFile = '/Users/seb/Downloads/ParaViewData-3.10.1/Data/disk_out_ref.ex2' 7 | 8 | phi = range(0, 360, 30) 9 | theta = range(-60, 61, 30) 10 | 11 | AsH3_range = [0.0804768, 0.184839] 12 | Temp_range = [293.15, 913.15] 13 | Pres_range = [0.00678552, 0.0288185] 14 | V_range = [0.0, 1.0] 15 | Vorticity_range = [0.0, 1.0] 16 | 17 | # ----------------------------------------------------------------------------- 18 | 19 | from paraview import simple 20 | from paraview.web.dataset_builder import * 21 | 22 | # ----------------------------------------------------------------------------- 23 | # Pipeline creation 24 | # ----------------------------------------------------------------------------- 25 | 26 | reader = simple.OpenDataFile(inputFile) 27 | reader.PointVariables = ['Temp', 'V', 'Pres', 'AsH3', 'GaMe3', 'CH4', 'H2'] 28 | 29 | clip = simple.Clip(Input=reader) 30 | clip.ClipType.Normal = [0.0, 1.0, 0.0] 31 | clipSurface = simple.ExtractSurface(Input=clip) 32 | clipWithNormals = simple.GenerateSurfaceNormals(Input=clipSurface) 33 | 34 | streamLines = simple.StreamTracer( 35 | Input = reader, 36 | SeedType="High Resolution Line Source", 37 | Vectors = ['POINTS', 'V'], 38 | MaximumStreamlineLength = 20.16) 39 | streamLines.SeedType.Point2 = [5.75, 5.75, 10.15999984741211] 40 | streamLines.SeedType.Point1 = [-5.75, -5.75, -10.0] 41 | streamTubes = simple.Tube(Input=streamLines, Radius = 0.2) 42 | streamSurface = simple.ExtractSurface(Input=streamTubes) 43 | streamWithNormals = simple.GenerateSurfaceNormals(Input=streamSurface) 44 | 45 | sceneDescription = { 46 | 'size': [500, 500], 47 | 'light': [ 'intensity', 'normal' ], # 'normal' 48 | 'camera': { 49 | 'CameraViewUp': [0.0, 0.0, 1.0], 50 | 'CameraPosition': [0.0, -58.47, 0.07], 51 | 'CameraFocalPoint': [0.0, 0.0, 0.07] 52 | }, 53 | 'scene': [ 54 | { 55 | 'name': 'Stream lines', 56 | 'source': streamWithNormals, 57 | 'colors': { 58 | 'Pres': {'location': 'POINT_DATA', 'range': Pres_range }, 59 | 'Temp': {'location': 'POINT_DATA', 'range': Temp_range } 60 | } 61 | },{ 62 | 'name': 'Clip', 63 | 'source': clipWithNormals, 64 | 'colors': { 65 | 'Pres': {'location': 'POINT_DATA', 'range': Pres_range }, 66 | 'Temp': {'location': 'POINT_DATA', 'range': Temp_range } 67 | } 68 | },{ 69 | 'parent': 'Contours', 70 | 'name': 'AsH3 0.1', 71 | 'source': simple.Contour( 72 | Input = reader, 73 | PointMergeMethod = "Uniform Binning", 74 | ContourBy = 'AsH3', 75 | Isosurfaces = [0.1], 76 | ComputeScalars = 1), 77 | 'colors': { 78 | 'AsH3': {'constant': 0.1 }, 79 | 'Pres': {'location': 'POINT_DATA', 'range': Pres_range }, 80 | 'Temp': {'location': 'POINT_DATA', 'range': Temp_range } 81 | } 82 | },{ 83 | 'parent': 'Contours', 84 | 'name': 'AsH3 0.14', 85 | 'source': simple.Contour( 86 | Input = reader, 87 | PointMergeMethod = "Uniform Binning", 88 | ContourBy = 'AsH3', 89 | Isosurfaces = [0.14], 90 | ComputeScalars = 1), 91 | 'colors': { 92 | 'AsH3': {'constant': 0.14 }, 93 | 'Pres': {'location': 'POINT_DATA', 'range': Pres_range }, 94 | 'Temp': {'location': 'POINT_DATA', 'range': Temp_range } 95 | } 96 | },{ 97 | 'parent': 'Contours', 98 | 'name': 'AsH3 0.18', 99 | 'source': simple.Contour( 100 | Input = reader, 101 | PointMergeMethod = "Uniform Binning", 102 | ContourBy = 'AsH3', 103 | Isosurfaces = [0.18], 104 | ComputeScalars = 1), 105 | 'colors': { 106 | 'AsH3': {'constant': 0.18 }, 107 | 'Pres': {'location': 'POINT_DATA', 'range': Pres_range }, 108 | 'Temp': {'location': 'POINT_DATA', 'range': Temp_range } 109 | } 110 | } 111 | ] 112 | } 113 | 114 | # ----------------------------------------------------------------------------- 115 | # Data Generation 116 | # ----------------------------------------------------------------------------- 117 | 118 | # Create Image Builder 119 | dsb = CompositeDataSetBuilder(outputDir, sceneDescription, {'type': 'spherical', 'phi': phi, 'theta': theta}) 120 | 121 | dsb.start() 122 | dsb.writeData() 123 | dsb.stop() 124 | 125 | -------------------------------------------------------------------------------- /scripts/examples/paraview/samples/composite-wavelet.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | outputDir = '/Users/seb/Desktop/float-image/' 5 | # ----------------------------------------------------------------------------- 6 | 7 | from paraview import simple 8 | from paraview.web.dataset_builder import * 9 | 10 | # ----------------------------------------------------------------------------- 11 | # VTK Pipeline creation 12 | # ----------------------------------------------------------------------------- 13 | 14 | wavelet = simple.Wavelet() 15 | calc = simple.Calculator() 16 | calc.Function = 'coordsX' 17 | calc.ResultArrayName = 'x' 18 | contour = simple.Contour( 19 | PointMergeMethod="Uniform Binning", 20 | ComputeScalars = 1, 21 | ComputeNormals = 1, 22 | Isosurfaces = 157.09, 23 | ContourBy = ['POINTS', 'RTData']) 24 | clip = simple.Clip() 25 | clip.ClipType.Normal = [0.0, 0.0, -1.0] 26 | 27 | # ----------------------------------------------------------------------------- 28 | # Data To Export 29 | # ----------------------------------------------------------------------------- 30 | 31 | layerMesh = { 32 | 'core1': False, 33 | 'core2': True, 34 | 'core3': True, 35 | 'core4': True, 36 | 'core5': True 37 | } 38 | 39 | fields = ['RTData', 'x'] 40 | cores = ['core1', 'core2', 'core3', 'core4', 'core5'] 41 | isoValues = [ 77.26, 117.18, 157.09, 197.0, 236.92 ] 42 | 43 | 44 | # ----------------------------------------------------------------------------- 45 | # Data Generation 46 | # ----------------------------------------------------------------------------- 47 | db = LayerDataSetBuilder(clip, outputDir, {'type': 'spherical', 'phi': range(-10, 11, 10), 'theta': range(-10, 11, 10)}, [400,400]) 48 | 49 | # Setup view with camera position 50 | view = db.getView() 51 | simple.Show(wavelet, view) 52 | simple.Render(view) 53 | simple.ResetCamera(view) 54 | simple.Hide(wavelet, view) 55 | 56 | db.start() 57 | 58 | layerIdx = 0 59 | for layer in cores: 60 | # Select only one layer 61 | contour.Isosurfaces = isoValues[layerIdx] 62 | 63 | # Capture each field of each layer 64 | for field in fields: 65 | db.setActiveLayer(layer, field, layerMesh[layer]) 66 | db.writeLayerData() 67 | 68 | # Move to the next layer 69 | layerIdx += 1 70 | 71 | db.stop() 72 | -------------------------------------------------------------------------------- /scripts/examples/paraview/samples/time-management.py: -------------------------------------------------------------------------------- 1 | from paraview.simple import * 2 | from paraview.web.dataset_builder import * 3 | 4 | # Can.ex2 file path 5 | fileToLoad = '/Users/seb/Downloads/ParaViewData-3.10.1/Data/can.ex2' 6 | dataset_destination_path = '/tmp/can' 7 | 8 | # Initial ParaView scene setup 9 | can = OpenDataFile(fileToLoad) 10 | can.ElementVariables = ['EQPS'] 11 | can.PointVariables = ['DISPL', 'VEL', 'ACCL'] 12 | can.GlobalVariables = ['KE', 'XMOM', 'YMOM', 'ZMOM', 'NSTEPS', 'TMSTEP'] 13 | can.ElementBlocks = ['Unnamed block ID: 1 Type: HEX', 'Unnamed block ID: 2 Type: HEX'] 14 | 15 | rep = Show() 16 | view = Render() 17 | 18 | anim = GetAnimationScene() 19 | anim.UpdateAnimationUsingDataTimeSteps() 20 | anim.GoToLast() 21 | 22 | ColorBy(rep, ('POINTS', 'DISPL')) 23 | rep.RescaleTransferFunctionToDataRange(True) 24 | 25 | timeValues = anim.TimeKeeper.TimestepValues 26 | 27 | view.CameraPosition = [-18.29191376466667, 21.185677224902403, -45.68993692892029] 28 | view.CameraFocalPoint = [-0.5119223594665527, 3.3483874797821045, -11.321756362915039] 29 | view.CameraViewUp = [0.29015080553622485, -0.779749133967588, -0.5548006832399148] 30 | 31 | view.ResetCamera() 32 | view.CenterOfRotation = view.CameraFocalPoint 33 | Render() 34 | 35 | # Create Tonic Dataset 36 | dsb = ImageDataSetBuilder(dataset_destination_path, 'image/jpg', {'type': 'spherical', 'phi': range(0, 360, 45), 'theta': range(-60, 61, 30)}) 37 | 38 | # Add time information 39 | dsb.getDataHandler().registerArgument(priority=1, name='time', values=timeValues, ui='slider', loop='modulo') 40 | 41 | # Explore dataset 42 | dsb.start(view) 43 | for time in dsb.getDataHandler().time: 44 | anim.TimeKeeper.Time = time 45 | dsb.writeImages() 46 | dsb.stop() 47 | -------------------------------------------------------------------------------- /scripts/examples/paraview/tests/extract-scalar.py: -------------------------------------------------------------------------------- 1 | from paraview import simple 2 | from paraview.web.data_writer import * 3 | 4 | ds = simple.Wavelet() 5 | 6 | dataRenderer = ScalarRenderer() 7 | 8 | view = dataRenderer.getView() 9 | view.ViewSize = [500, 500] 10 | 11 | simple.Show(ds, view) 12 | simple.ResetCamera(view) 13 | 14 | dataRenderer.writeArray('/Users/seb/Desktop/composite-rtdata', ds, 'RTData') 15 | -------------------------------------------------------------------------------- /scripts/examples/tests/data-handler.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | import vtk.web.query_data_model import * 4 | from vtk.web import camera 5 | 6 | dataset_destination_path = '/tmp/data_handler' 7 | 8 | # Choose data location 9 | dh = DataHandler(dataset_destination_path) 10 | camera = camera.SphericalCamera(dh, (0,0,0), (1,0,0), (0,0,1), range(0, 360, 30), range(-60, 61, 30)) 11 | 12 | # Provide metadata 13 | dh.addTypes('WebGL-Compositor', 'rgbd') 14 | dh.addMetaData('title', 'Test dataset'); 15 | dh.addMetaData('authors', ['Sebastien Jourdain', 'Patrick O\'Leary']); 16 | dh.addSection('Composite', { 'name': 'Seb', 'pipeline': { 'a': 1, 'b': 2}}) 17 | 18 | # Create arguments 19 | dh.registerArgument(priority=1, name='contour', values=range(5), ui='slider') 20 | dh.registerArgument(priority=2, name='time', values=range(0, 10), ui='slider') 21 | 22 | # Create data 23 | dh.registerData(name='image', type='blob', mimeType='image/png', fileName='.png') 24 | 25 | # Loop over data 26 | for time in dh.time: 27 | for contour in dh.contour: 28 | for pos in camera: 29 | print 'Time: %d | Contour: %d => %s' % (time, contour, dh.getDataAbsoluteFilePath('image')) 30 | print 'Phi %d - Theta %d - Position %s - ViewUp %s' % (pos['phi'], pos['theta'], str(pos['position']), str(pos['viewUp'])) 31 | 32 | # Write metadata 33 | dh.writeDataDescriptor() 34 | -------------------------------------------------------------------------------- /scripts/examples/tests/off-center-rotation.py: -------------------------------------------------------------------------------- 1 | from paraview.simple import * 2 | 3 | from vtk.web.query_data_model import * 4 | from paraview.web import camera as pv 5 | 6 | dataset_destination_path = '/Users/seb/spherical' 7 | 8 | # Initial ParaView scene setup 9 | Cone(Center=[2,4,8]) 10 | Show() 11 | view = Render() 12 | view.CameraFocalPoint = [2,4,8] 13 | view.CameraPosition = [2,4,0] 14 | view.CenterOfRotation = [2,4,8] 15 | view.CameraViewUp = [0,1,0] 16 | view = Render() 17 | 18 | 19 | # Choose data location 20 | dh = DataHandler(dataset_destination_path) 21 | camera = pv.create_spherical_camera(view, dh, range(0, 360, 30), range(-60, 61, 30)) 22 | 23 | # Create data 24 | dh.registerData(name='image', type='blob', mimeType='image/png', fileName='.png') 25 | 26 | # Loop over data 27 | for pos in camera: 28 | pv.update_camera(view, pos) 29 | WriteImage(dh.getDataAbsoluteFilePath('image')) 30 | 31 | # Write metadata 32 | dh.writeDataDescriptor() 33 | -------------------------------------------------------------------------------- /scripts/examples/vtk/medical/head-ct-volume-step-func.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # Download data: 3 | # - Browser: 4 | # http://midas3.kitware.com/midas/folder/10409 => VisibleMale/vm_head_frozenct.mha 5 | # - Terminal 6 | # curl "http://midas3.kitware.com/midas/download?folders=&items=235235" -o vm_head_frozenct.mha 7 | # ----------------------------------------------------------------------------- 8 | 9 | from vtk import * 10 | 11 | from vtk.web.query_data_model import * 12 | from vtk.web.dataset_builder import * 13 | 14 | # ----------------------------------------------------------------------------- 15 | # User configuration 16 | # ----------------------------------------------------------------------------- 17 | 18 | dataset_destination_path = '/Users/seb/Desktop/head_ct_4_features' 19 | file_path = '/Users/seb/Downloads/vm_head_frozenct.mha' 20 | 21 | field = 'MetaImage' 22 | fieldRange = [0.0, 4095.0] 23 | features = [ 24 | (100, 800), # Fluid 450 => 0.10 25 | (900, 1250), # Skin 1075 => 0.26 26 | (1400, 2525), # Skull 1962.5 => 0.47 27 | (2525, 4000) # Teeth 3262.5 => 0.79 28 | ] 29 | 30 | sections = { 31 | 'LookupTables': { 32 | "VolumeScalar": { 33 | "controlpoints": [ 34 | {"x": 0.00, "r": 0.5, "g": 0.5, "b": 0.5}, # Fluid 35 | {"x": 0.11, "r": 0.5, "g": 0.5, "b": 0.5}, # Fluid 36 | {"x": 0.12, "r": 1.0, "g": 0.8, "b": 0.4}, # Skin 37 | {"x": 0.27, "r": 1.0, "g": 0.8, "b": 0.4}, # Skin 38 | {"x": 0.28, "r": 1.0, "g": 1.0, "b": 1.0}, # Skull 39 | {"x": 0.48, "r": 1.0, "g": 1.0, "b": 1.0}, # Skull 40 | {"x": 0.49, "r": 1.0, "g": 0.8, "b": 0.6}, # Teeth 41 | {"x": 1.00, "r": 1.0, "g": 0.8, "b": 0.6} # Teeth 42 | ], 43 | "discrete" : True 44 | } 45 | } 46 | } 47 | 48 | # ----------------------------------------------------------------------------- 49 | # VTK Helper methods 50 | # ----------------------------------------------------------------------------- 51 | 52 | def updatePieceWiseAsStep(pwf, dataRange, start, end): 53 | scalarOpacity.RemoveAllPoints() 54 | 55 | scalarOpacity.AddPoint(dataRange[0], 0.0) 56 | scalarOpacity.AddPoint(start-1, 0.0) 57 | scalarOpacity.AddPoint(start, 1.0) 58 | scalarOpacity.AddPoint(end, 1.0) 59 | scalarOpacity.AddPoint(end+1, 0.0) 60 | scalarOpacity.AddPoint(dataRange[1], 0.0) 61 | 62 | # ----------------------------------------------------------------------------- 63 | # VTK Pipeline creation 64 | # ----------------------------------------------------------------------------- 65 | 66 | reader = vtkMetaImageReader() 67 | reader.SetFileName(file_path) 68 | 69 | mapper = vtkGPUVolumeRayCastMapper() 70 | mapper.SetInputConnection(reader.GetOutputPort()) 71 | mapper.RenderToImageOn() 72 | 73 | colorFunction = vtkColorTransferFunction() 74 | colorFunction.AddRGBPoint(fieldRange[0], 1.0, 1.0, 1.0) 75 | colorFunction.AddRGBPoint(fieldRange[1], 1.0, 1.0, 1.0) 76 | 77 | scalarOpacity = vtkPiecewiseFunction() 78 | 79 | volumeProperty = vtkVolumeProperty() 80 | volumeProperty.ShadeOn() 81 | volumeProperty.SetInterpolationType(VTK_LINEAR_INTERPOLATION) 82 | volumeProperty.SetColor(colorFunction) 83 | volumeProperty.SetScalarOpacity(scalarOpacity) 84 | 85 | volume = vtkVolume() 86 | volume.SetMapper(mapper) 87 | volume.SetProperty(volumeProperty) 88 | 89 | window = vtkRenderWindow() 90 | window.SetSize(512, 512) 91 | 92 | renderer = vtkRenderer() 93 | window.AddRenderer(renderer) 94 | 95 | renderer.AddVolume(volume) 96 | renderer.ResetCamera() 97 | window.Render() 98 | 99 | # Camera setting 100 | camera = { 101 | 'position': [-0.264, -890.168, -135.0], 102 | 'focalPoint': [-0.264, -30.264, -135.0], 103 | 'viewUp': [0,0,1] 104 | } 105 | update_camera(renderer, camera) 106 | 107 | # ----------------------------------------------------------------------------- 108 | # Data Generation 109 | # ----------------------------------------------------------------------------- 110 | 111 | # Create Image Builder 112 | phi = range(0, 360, 30) 113 | theta = range(-60, 61, 30) 114 | vcdsb = SortedCompositeDataSetBuilder(dataset_destination_path, {'type': 'spherical', 'phi': phi, 'theta': theta}, sections=sections) 115 | 116 | idx = 0 117 | vcdsb.start(window, renderer) 118 | for feature in features: 119 | idx += 1 120 | updatePieceWiseAsStep(scalarOpacity, fieldRange, feature[0], feature[1]) 121 | 122 | # Capture layer 123 | vcdsb.activateLayer(field, (feature[0] + feature[1])/2) 124 | 125 | # Write data 126 | vcdsb.writeData(mapper) 127 | 128 | vcdsb.stop() 129 | 130 | 131 | -------------------------------------------------------------------------------- /scripts/examples/vtk/medical/head-ct-volume.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # Download data: 3 | # - Browser: 4 | # http://midas3.kitware.com/midas/folder/10409 => VisibleMale/vm_head_frozenct.mha 5 | # - Terminal 6 | # curl "http://midas3.kitware.com/midas/download?folders=&items=235235" -o vm_head_frozenct.mha 7 | # ----------------------------------------------------------------------------- 8 | 9 | from vtk import * 10 | 11 | from vtk.web.query_data_model import * 12 | from vtk.web.dataset_builder import * 13 | 14 | # ----------------------------------------------------------------------------- 15 | # User configuration 16 | # ----------------------------------------------------------------------------- 17 | 18 | dataset_destination_path = '/Users/seb/Desktop/vm_head_frozenct_vi_%s_%s_%s' 19 | file_path = '/Users/seb/Downloads/vm_head_frozenct.mha' 20 | 21 | field = 'MetaImage' 22 | fieldRange = [0.0, 4095.0] 23 | nbSteps = 4 24 | 25 | # ----------------------------------------------------------------------------- 26 | # VTK Helper methods 27 | # ----------------------------------------------------------------------------- 28 | 29 | def updatePieceWise(pwf, dataRange, center, halfSpread): 30 | scalarOpacity.RemoveAllPoints() 31 | if (center - halfSpread) <= dataRange[0]: 32 | scalarOpacity.AddPoint(dataRange[0], 0.0) 33 | scalarOpacity.AddPoint(center, 1.0) 34 | else: 35 | scalarOpacity.AddPoint(dataRange[0], 0.0) 36 | scalarOpacity.AddPoint(center - halfSpread, 0.0) 37 | scalarOpacity.AddPoint(center, 1.0) 38 | 39 | if (center + halfSpread) >= dataRange[1]: 40 | scalarOpacity.AddPoint(dataRange[1], 0.0) 41 | else: 42 | scalarOpacity.AddPoint(center + halfSpread, 0.0) 43 | scalarOpacity.AddPoint(dataRange[1], 0.0) 44 | 45 | # ----------------------------------------------------------------------------- 46 | # VTK Pipeline creation 47 | # ----------------------------------------------------------------------------- 48 | 49 | reader = vtkMetaImageReader() 50 | reader.SetFileName(file_path) 51 | 52 | mapper = vtkGPUVolumeRayCastMapper() 53 | mapper.SetInputConnection(reader.GetOutputPort()) 54 | mapper.RenderToImageOn() 55 | 56 | colorFunction = vtkColorTransferFunction() 57 | colorFunction.AddRGBPoint(fieldRange[0], 1.0, 1.0, 1.0) 58 | colorFunction.AddRGBPoint(fieldRange[1], 1.0, 1.0, 1.0) 59 | 60 | halfSpread = (fieldRange[1] - fieldRange[0]) / float(2*nbSteps) 61 | centers = [ fieldRange[0] + halfSpread*float(2*i+1) for i in range(nbSteps)] 62 | 63 | scalarOpacity = vtkPiecewiseFunction() 64 | 65 | volumeProperty = vtkVolumeProperty() 66 | volumeProperty.ShadeOn() 67 | volumeProperty.SetInterpolationType(VTK_LINEAR_INTERPOLATION) 68 | volumeProperty.SetColor(colorFunction) 69 | volumeProperty.SetScalarOpacity(scalarOpacity) 70 | 71 | volume = vtkVolume() 72 | volume.SetMapper(mapper) 73 | volume.SetProperty(volumeProperty) 74 | 75 | window = vtkRenderWindow() 76 | window.SetSize(499, 400) 77 | 78 | renderer = vtkRenderer() 79 | window.AddRenderer(renderer) 80 | 81 | renderer.AddVolume(volume) 82 | renderer.ResetCamera() 83 | window.Render() 84 | 85 | # Camera setting 86 | camera = { 87 | 'position': [-0.264, -890.168, -135.0], 88 | 'focalPoint': [-0.264, -30.264, -135.0], 89 | 'viewUp': [0,0,1] 90 | } 91 | update_camera(renderer, camera) 92 | 93 | # ----------------------------------------------------------------------------- 94 | # Data Generation 95 | # ----------------------------------------------------------------------------- 96 | 97 | # Create Image Builder 98 | vcdsb = SortedCompositeDataSetBuilder(dataset_destination_path % (nbSteps, halfSpread, window.GetSize()[0]), {'type': 'spherical', 'phi': [0], 'theta': [0]}) 99 | 100 | idx = 0 101 | vcdsb.start(window, renderer) 102 | for center in centers: 103 | idx += 1 104 | updatePieceWise(scalarOpacity, fieldRange, center, halfSpread) 105 | 106 | # Capture layer 107 | vcdsb.activateLayer(field, center) 108 | 109 | # Write data 110 | vcdsb.writeData(mapper) 111 | 112 | vcdsb.stop() 113 | 114 | 115 | -------------------------------------------------------------------------------- /scripts/examples/vtk/medical/head-ct.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # Download data: 3 | # - Browser: 4 | # http://midas3.kitware.com/midas/folder/10409 => VisibleMale/vm_head_frozenct.mha 5 | # - Terminal 6 | # curl "http://midas3.kitware.com/midas/download?folders=&items=235235" -o vm_head_frozenct.mha 7 | # ----------------------------------------------------------------------------- 8 | 9 | from vtk import * 10 | 11 | from vtk.web.query_data_model import * 12 | from vtk.web.dataset_builder import * 13 | 14 | # ----------------------------------------------------------------------------- 15 | # User configuration 16 | # ----------------------------------------------------------------------------- 17 | 18 | dataset_destination_path = '/Users/seb/Desktop/head_ct' 19 | file_path = '/Users/seb/Downloads/vm_head_frozenct.mha' 20 | 21 | field = 'MetaImage' 22 | fieldRange = [0.0, 4096.0] 23 | nbSteps = 3 24 | 25 | features = [ { 'center': 200, 'halfSpread': 200 }, { 'center': 900, 'halfSpread': 200 }, { 'center': 2000, 'halfSpread': 900 }, ] 26 | 27 | # ----------------------------------------------------------------------------- 28 | # VTK Helper methods 29 | # ----------------------------------------------------------------------------- 30 | 31 | def updatePieceWise(pwf, dataRange, center, halfSpread): 32 | scalarOpacity.RemoveAllPoints() 33 | if (center - halfSpread) <= dataRange[0]: 34 | scalarOpacity.AddPoint(dataRange[0], 0.0) 35 | scalarOpacity.AddPoint(center, 1.0) 36 | else: 37 | scalarOpacity.AddPoint(dataRange[0], 0.0) 38 | scalarOpacity.AddPoint(center - halfSpread, 0.0) 39 | scalarOpacity.AddPoint(center, 1.0) 40 | 41 | if (center + halfSpread) >= dataRange[1]: 42 | scalarOpacity.AddPoint(dataRange[1], 0.0) 43 | else: 44 | scalarOpacity.AddPoint(center + halfSpread, 0.0) 45 | scalarOpacity.AddPoint(dataRange[1], 0.0) 46 | 47 | # ----------------------------------------------------------------------------- 48 | # VTK Pipeline creation 49 | # ----------------------------------------------------------------------------- 50 | 51 | reader = vtkMetaImageReader() 52 | reader.SetFileName(file_path) 53 | 54 | mapper = vtkGPUVolumeRayCastMapper() 55 | mapper.SetInputConnection(reader.GetOutputPort()) 56 | mapper.RenderToImageOn() 57 | 58 | colorFunction = vtkColorTransferFunction() 59 | colorFunction.AddRGBPoint(fieldRange[0], 1.0, 1.0, 1.0) 60 | colorFunction.AddRGBPoint(fieldRange[1], 1.0, 1.0, 1.0) 61 | 62 | halfSpread = (fieldRange[1] - fieldRange[0]) / float(2*nbSteps) 63 | centers = [ fieldRange[0] + halfSpread*float(2*i+1) for i in range(nbSteps)] 64 | 65 | scalarOpacity = vtkPiecewiseFunction() 66 | 67 | volumeProperty = vtkVolumeProperty() 68 | volumeProperty.ShadeOn() 69 | volumeProperty.SetInterpolationType(VTK_LINEAR_INTERPOLATION) 70 | volumeProperty.SetColor(colorFunction) 71 | volumeProperty.SetScalarOpacity(scalarOpacity) 72 | 73 | volume = vtkVolume() 74 | volume.SetMapper(mapper) 75 | volume.SetProperty(volumeProperty) 76 | 77 | window = vtkRenderWindow() 78 | window.SetSize(512, 512) 79 | 80 | renderer = vtkRenderer() 81 | window.AddRenderer(renderer) 82 | 83 | renderer.AddVolume(volume) 84 | renderer.ResetCamera() 85 | window.Render() 86 | 87 | # Camera setting 88 | camera = { 89 | 'position': [-0.264, -890.168, -135.0], 90 | 'focalPoint': [-0.264, -30.264, -135.0], 91 | 'viewUp': [0,0,1] 92 | } 93 | update_camera(renderer, camera) 94 | 95 | # ----------------------------------------------------------------------------- 96 | # Data Generation 97 | # ----------------------------------------------------------------------------- 98 | 99 | # Create Image Builder 100 | vcdsb = SortedCompositeDataSetBuilder(dataset_destination_path, {'type': 'spherical', 'phi': range(0, 360, 30), 'theta': range(-60, 61, 30)}) 101 | 102 | idx = 0 103 | vcdsb.start(window, renderer) 104 | for feature in features: 105 | idx += 1 106 | updatePieceWise(scalarOpacity, fieldRange, feature['center'], feature['halfSpread']) 107 | 108 | # Capture layer 109 | vcdsb.activateLayer(field, feature['center']) 110 | 111 | # Write data 112 | vcdsb.writeData(mapper) 113 | 114 | vcdsb.stop() 115 | 116 | 117 | -------------------------------------------------------------------------------- /scripts/examples/vtk/medical/head-mri.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # Download data: 3 | # - Browser: 4 | # http://midas3.kitware.com/midas/folder/10409 => VisibleMale/vm_head_mri.mha 5 | # - Terminal 6 | # curl "http://midas3.kitware.com/midas/download?folders=&items=235237" -o vm_head_mri.mha 7 | # ----------------------------------------------------------------------------- 8 | 9 | from vtk import * 10 | 11 | from vtk.web.query_data_model import * 12 | from vtk.web.dataset_builder import * 13 | 14 | # ----------------------------------------------------------------------------- 15 | # User configuration 16 | # ----------------------------------------------------------------------------- 17 | 18 | dataset_destination_path = '/Users/seb/Desktop/head_mri' 19 | file_path = '/Users/seb/Downloads/vm_head_mri.mha' 20 | 21 | field = 'MetaImage' 22 | fieldRange = [0.0, 1134.0] 23 | nbSteps = 25 24 | 25 | # ----------------------------------------------------------------------------- 26 | # VTK Helper methods 27 | # ----------------------------------------------------------------------------- 28 | 29 | def updatePieceWise(pwf, dataRange, center, halfSpread): 30 | scalarOpacity.RemoveAllPoints() 31 | if (center - halfSpread) <= dataRange[0]: 32 | scalarOpacity.AddPoint(dataRange[0], 0.0) 33 | scalarOpacity.AddPoint(center, 1.0) 34 | else: 35 | scalarOpacity.AddPoint(dataRange[0], 0.0) 36 | scalarOpacity.AddPoint(center - halfSpread, 0.0) 37 | scalarOpacity.AddPoint(center, 1.0) 38 | 39 | if (center + halfSpread) >= dataRange[1]: 40 | scalarOpacity.AddPoint(dataRange[1], 0.0) 41 | else: 42 | scalarOpacity.AddPoint(center + halfSpread, 0.0) 43 | scalarOpacity.AddPoint(dataRange[1], 0.0) 44 | 45 | # ----------------------------------------------------------------------------- 46 | # VTK Pipeline creation 47 | # ----------------------------------------------------------------------------- 48 | 49 | reader = vtkMetaImageReader() 50 | reader.SetFileName(file_path) 51 | 52 | mapper = vtkGPUVolumeRayCastMapper() 53 | mapper.SetInputConnection(reader.GetOutputPort()) 54 | mapper.RenderToImageOn() 55 | 56 | colorFunction = vtkColorTransferFunction() 57 | colorFunction.AddRGBPoint(fieldRange[0], 1.0, 1.0, 1.0) 58 | colorFunction.AddRGBPoint(fieldRange[1], 1.0, 1.0, 1.0) 59 | 60 | halfSpread = (fieldRange[1] - fieldRange[0]) / float(2*nbSteps) 61 | centers = [ fieldRange[0] + halfSpread*float(2*i+1) for i in range(nbSteps)] 62 | 63 | scalarOpacity = vtkPiecewiseFunction() 64 | 65 | volumeProperty = vtkVolumeProperty() 66 | volumeProperty.ShadeOn() 67 | volumeProperty.SetInterpolationType(VTK_LINEAR_INTERPOLATION) 68 | volumeProperty.SetColor(colorFunction) 69 | volumeProperty.SetScalarOpacity(scalarOpacity) 70 | 71 | volume = vtkVolume() 72 | volume.SetMapper(mapper) 73 | volume.SetProperty(volumeProperty) 74 | 75 | window = vtkRenderWindow() 76 | window.SetSize(512, 512) 77 | 78 | renderer = vtkRenderer() 79 | window.AddRenderer(renderer) 80 | 81 | renderer.AddVolume(volume) 82 | renderer.ResetCamera() 83 | window.Render() 84 | 85 | # Camera setting 86 | camera = { 87 | 'position': [-0.508, -872.745, 5.1], 88 | 'focalPoint': [-0.508, -32.108, 5.1], 89 | 'viewUp': [0,0,1] 90 | } 91 | 92 | update_camera(renderer, camera) 93 | 94 | # ----------------------------------------------------------------------------- 95 | # Data Generation 96 | # ----------------------------------------------------------------------------- 97 | 98 | # Create Image Builder 99 | vcdsb = SortedCompositeDataSetBuilder(dataset_destination_path, {'type': 'spherical', 'phi': range(0, 360, 45), 'theta': [0]}) 100 | 101 | idx = 0 102 | vcdsb.start(window, renderer) 103 | for center in centers: 104 | idx += 1 105 | updatePieceWise(scalarOpacity, fieldRange, center, halfSpread) 106 | 107 | # Capture layer 108 | vcdsb.activateLayer(field, center) 109 | 110 | # Write data 111 | vcdsb.writeData(mapper) 112 | 113 | vcdsb.stop() 114 | 115 | 116 | -------------------------------------------------------------------------------- /scripts/examples/vtk/mpas/raw-probe-flat-earth.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | dataset_destination_path = '/Users/seb/Desktop/vtk_data_prober_mpas' 6 | 7 | # ----------------------------------------------------------------------------- 8 | 9 | from vtk import * 10 | from vtk.web.dataset_builder import * 11 | 12 | # ----------------------------------------------------------------------------- 13 | 14 | data_base_path = '/Volumes/Backup3TB/DataExploration/Data/MPAS/data/flat_n_primal/' 15 | 16 | flat_file_pattern = 'LON_LAT_NLAYER-primal_%d_0.vtu' 17 | flat_file_times = range(50, 101, 50) # range(50, 5151, 50) # range(50, 5901, 50) 18 | 19 | flat_arrays = ['temperature', 'salinity'] 20 | flat_sampling_size = [ 500, 250, 30 ] 21 | flat_sampling_bounds = [ -3.2, 3.2, 22 | -1.3, 1.5, 23 | -3.0, 0.0 ] 24 | 25 | # ----------------------------------------------------------------------------- 26 | # VTK Data 27 | # ----------------------------------------------------------------------------- 28 | 29 | reader = vtkXMLUnstructuredGridReader() 30 | 31 | # ----------------------------------------------------------------------------- 32 | # Data Generation 33 | # ----------------------------------------------------------------------------- 34 | 35 | dpdsb = DataProberDataSetBuilder(dataset_destination_path, flat_sampling_size, flat_arrays, flat_sampling_bounds) 36 | dpdsb.setSourceToProbe(reader) 37 | 38 | # Add time information 39 | dpdsb.getDataHandler().registerArgument(priority=1, name='time', values=flat_file_times, ui='slider', loop='modulo') 40 | 41 | # Extract data 42 | dpdsb.start() 43 | for time in dpdsb.getDataHandler().time: 44 | fileName = data_base_path + (flat_file_pattern % time) 45 | reader.SetFileName(fileName) 46 | print 'processing', fileName 47 | dpdsb.writeData() 48 | dpdsb.stop() 49 | -------------------------------------------------------------------------------- /scripts/examples/vtk/samples/cone.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | dataset_destination_path = '/Users/seb/Desktop/vtk_cone' 6 | 7 | # ----------------------------------------------------------------------------- 8 | 9 | from vtk import * 10 | from vtk.web.dataset_builder import * 11 | 12 | # ----------------------------------------------------------------------------- 13 | # VTK Pipeline creation 14 | # ----------------------------------------------------------------------------- 15 | 16 | source = vtkConeSource() 17 | 18 | mapper = vtkDataSetMapper() 19 | mapper.SetInputConnection(source.GetOutputPort()) 20 | 21 | actor = vtkActor() 22 | actor.SetMapper(mapper) 23 | 24 | window = vtkRenderWindow() 25 | window.SetSize(500, 500) 26 | 27 | renderer = vtkRenderer() 28 | window.AddRenderer(renderer) 29 | 30 | renderer.AddActor(actor) 31 | renderer.SetBackground(0.5, 0.5, 0.6) 32 | 33 | camera = vtkCamera() 34 | renderer.SetActiveCamera(camera) 35 | 36 | window.Render() 37 | renderer.ResetCamera() 38 | window.Render() 39 | 40 | # ----------------------------------------------------------------------------- 41 | # Data Generation 42 | # ----------------------------------------------------------------------------- 43 | 44 | # Create Image Builder 45 | dsb = ImageDataSetBuilder(dataset_destination_path, 'image/jpg', {'type': 'spherical', 'phi': range(0, 360, 30), 'theta': range(-60, 61, 30)}) 46 | 47 | # Add resolution information 48 | dsb.getDataHandler().registerArgument(priority=1, name='resolution', values=range(10, 61, 10), ui='slider') 49 | 50 | # Loop over data and generate images 51 | dsb.start(window, renderer) 52 | for resolution in dsb.getDataHandler().resolution: 53 | source.SetResolution(resolution) 54 | dsb.writeImages() 55 | dsb.stop() 56 | -------------------------------------------------------------------------------- /scripts/examples/vtk/samples/diskout-volume.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | dataset_destination_path = '/Users/seb/Desktop/diskout_volume_%s' 6 | file_path = '/Users/seb/Downloads/ParaViewData-3.98.1/Data/disk_out_ref.ex2' 7 | 8 | field = 'AsH3' 9 | fieldRange = [0.0805, 0.185] 10 | nbSteps = 20 11 | 12 | # ----------------------------------------------------------------------------- 13 | 14 | from vtk import * 15 | from vtk.web.dataset_builder import * 16 | 17 | # ----------------------------------------------------------------------------- 18 | # VTK Helper methods 19 | # ----------------------------------------------------------------------------- 20 | 21 | def updatePieceWise(pwf, dataRange, center, halfSpread): 22 | scalarOpacity.RemoveAllPoints() 23 | if (center - halfSpread) <= dataRange[0]: 24 | scalarOpacity.AddPoint(dataRange[0], 0.0) 25 | scalarOpacity.AddPoint(center, 1.0) 26 | else: 27 | scalarOpacity.AddPoint(dataRange[0], 0.0) 28 | scalarOpacity.AddPoint(center - halfSpread, 0.0) 29 | scalarOpacity.AddPoint(center, 1.0) 30 | 31 | if (center + halfSpread) >= dataRange[1]: 32 | scalarOpacity.AddPoint(dataRange[1], 0.0) 33 | else: 34 | scalarOpacity.AddPoint(center + halfSpread, 0.0) 35 | scalarOpacity.AddPoint(dataRange[1], 0.0) 36 | 37 | # ----------------------------------------------------------------------------- 38 | # VTK Pipeline creation 39 | # ----------------------------------------------------------------------------- 40 | 41 | reader = vtkExodusIIReader() 42 | reader.SetFileName(file_path) 43 | reader.SetPointResultArrayStatus(field, 1) 44 | reader.SetElementBlockArrayStatus('Unnamed block ID: 1 Type: HEX8', 1) 45 | 46 | # disk_out_ref_ex2 = simple.ExodusIIReader( FileName=['/Users/seb/Downloads/ParaViewData-3.98.1/Data/disk_out_ref.ex2'] ) 47 | 48 | # disk_out_ref_ex2.FileRange = [0, 0] 49 | # disk_out_ref_ex2.XMLFileName = '/Users/seb/Downloads/ParaViewData-3.98.1/Data/artifact.dta' 50 | # disk_out_ref_ex2.FilePrefix = '/Users/seb/Downloads/ParaViewData-3.98.1/Data/disk_out_ref.ex2' 51 | # disk_out_ref_ex2.ModeShape = 0 52 | # disk_out_ref_ex2.FilePattern = '%s' 53 | 54 | # disk_out_ref_ex2.ElementBlocks = ['Unnamed block ID: 1 Type: HEX8'] 55 | # disk_out_ref_ex2.NodeSetArrayStatus = [] 56 | # disk_out_ref_ex2.SideSetArrayStatus = [] 57 | # disk_out_ref_ex2.PointVariables = ['Temp', 'V', 'Pres', 'AsH3', 'GaMe3', 'CH4', 'H2'] 58 | reader.Update() 59 | print reader.GetOutput() 60 | 61 | 62 | mapper = vtkGPUVolumeRayCastMapper() 63 | mapper.SetInputConnection(reader.GetOutputPort()) 64 | mapper.RenderToImageOn() 65 | 66 | colorFunction = vtkColorTransferFunction() 67 | colorFunction.AddRGBPoint(fieldRange[0], 1.0, 1.0, 1.0) 68 | colorFunction.AddRGBPoint(fieldRange[1], 1.0, 1.0, 1.0) 69 | 70 | halfSpread = (fieldRange[1] - fieldRange[0]) / float(2*nbSteps) 71 | centers = [ fieldRange[0] + halfSpread*float(2*i+1) for i in range(nbSteps)] 72 | 73 | scalarOpacity = vtkPiecewiseFunction() 74 | 75 | volumeProperty = vtkVolumeProperty() 76 | # volumeProperty.ShadeOn() 77 | volumeProperty.SetInterpolationType(VTK_LINEAR_INTERPOLATION) 78 | volumeProperty.SetColor(colorFunction) 79 | volumeProperty.SetScalarOpacity(scalarOpacity) 80 | 81 | volume = vtkVolume() 82 | volume.SetMapper(mapper) 83 | volume.SetProperty(volumeProperty) 84 | 85 | window = vtkRenderWindow() 86 | window.SetSize(500, 500) 87 | 88 | renderer = vtkRenderer() 89 | window.AddRenderer(renderer) 90 | 91 | renderer.AddVolume(volume) 92 | renderer.ResetCamera() 93 | window.Render() 94 | 95 | # ----------------------------------------------------------------------------- 96 | # Data Generation 97 | # ----------------------------------------------------------------------------- 98 | 99 | # Create Image Builder 100 | vcdsb = SortedCompositeDataSetBuilder(dataset_destination_path % nbSteps, {'type': 'spherical', 'phi': range(0, 360, 30), 'theta': range(-60, 61, 15)}) 101 | 102 | idx = 0 103 | vcdsb.start(window, renderer) 104 | for center in centers: 105 | idx += 1 106 | updatePieceWise(scalarOpacity, fieldRange, center, halfSpread) 107 | 108 | # Capture layer 109 | vcdsb.activateLayer(field, center) 110 | 111 | # Write data 112 | vcdsb.writeData(mapper) 113 | 114 | vcdsb.stop() 115 | 116 | 117 | -------------------------------------------------------------------------------- /scripts/examples/vtk/samples/multi-spheres-volume.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | dataset_destination_path = '/Users/seb/Desktop/spheres_%s' 6 | vti_sphere_path = '/Users/seb/Downloads/spheres.vti' 7 | 8 | # ----------------------------------------------------------------------------- 9 | 10 | from vtk import * 11 | from vtk.web.dataset_builder import * 12 | 13 | # ----------------------------------------------------------------------------- 14 | # VTK Helper methods 15 | # ----------------------------------------------------------------------------- 16 | 17 | def updatePieceWise(pwf, dataRange, center, halfSpread): 18 | scalarOpacity.RemoveAllPoints() 19 | if (center - halfSpread) <= dataRange[0]: 20 | scalarOpacity.AddPoint(dataRange[0], 0.0) 21 | scalarOpacity.AddPoint(center, 1.0) 22 | else: 23 | scalarOpacity.AddPoint(dataRange[0], 0.0) 24 | scalarOpacity.AddPoint(center - halfSpread, 0.0) 25 | scalarOpacity.AddPoint(center, 1.0) 26 | 27 | if (center + halfSpread) >= dataRange[1]: 28 | scalarOpacity.AddPoint(dataRange[1], 0.0) 29 | else: 30 | scalarOpacity.AddPoint(center + halfSpread, 0.0) 31 | scalarOpacity.AddPoint(dataRange[1], 0.0) 32 | 33 | # ----------------------------------------------------------------------------- 34 | # VTK Pipeline creation 35 | # ----------------------------------------------------------------------------- 36 | 37 | reader = vtkXMLImageDataReader() 38 | reader.SetFileName(vti_sphere_path) 39 | # reader.SetPointArrayStatus('ImageFile', 1) 40 | # reader.Update() 41 | 42 | mapper = vtkGPUVolumeRayCastMapper() 43 | mapper.SetInputConnection(reader.GetOutputPort()) 44 | mapper.RenderToImageOn() 45 | 46 | colorFunction = vtkColorTransferFunction() 47 | colorFunction.AddRGBPoint(0.0, 1.0, 1.0, 1.0) 48 | colorFunction.AddRGBPoint(255.0, 1.0, 1.0, 1.0) 49 | 50 | dataRange = [0.0, 255.0] 51 | nbSteps = 10 52 | halfSpread = (dataRange[1] - dataRange[0]) / float(2*nbSteps) 53 | centers = [ dataRange[0] + halfSpread*float(2*i+1) for i in range(nbSteps)] 54 | 55 | scalarOpacity = vtkPiecewiseFunction() 56 | 57 | volumeProperty = vtkVolumeProperty() 58 | # volumeProperty.ShadeOn() 59 | volumeProperty.SetInterpolationType(VTK_LINEAR_INTERPOLATION) 60 | volumeProperty.SetColor(colorFunction) 61 | volumeProperty.SetScalarOpacity(scalarOpacity) 62 | 63 | volume = vtkVolume() 64 | volume.SetMapper(mapper) 65 | volume.SetProperty(volumeProperty) 66 | 67 | window = vtkRenderWindow() 68 | window.SetSize(500, 500) 69 | 70 | renderer = vtkRenderer() 71 | window.AddRenderer(renderer) 72 | 73 | renderer.AddVolume(volume) 74 | renderer.ResetCamera() 75 | window.Render() 76 | 77 | # ----------------------------------------------------------------------------- 78 | # Data Generation 79 | # ----------------------------------------------------------------------------- 80 | 81 | # Create Image Builder 82 | vcdsb = SortedCompositeDataSetBuilder(dataset_destination_path % nbSteps, {'type': 'spherical', 'phi': range(0, 360, 30), 'theta': range(-60, 61, 30)}) 83 | 84 | idx = 0 85 | vcdsb.start(window, renderer) 86 | for center in centers: 87 | idx += 1 88 | updatePieceWise(scalarOpacity, dataRange, center, halfSpread) 89 | 90 | # Capture layer 91 | vcdsb.activateLayer('ImageFile', center) 92 | 93 | # Write data 94 | vcdsb.writeData(mapper) 95 | 96 | vcdsb.stop() 97 | 98 | 99 | -------------------------------------------------------------------------------- /scripts/examples/vtk/samples/syntax-evolution-volume_v2.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | dataset_destination_path = '/Users/seb/Desktop/vtk_volume_v2' 6 | 7 | # ----------------------------------------------------------------------------- 8 | 9 | from vtk import * 10 | from vtk.web.dataset_builder import * 11 | 12 | # ----------------------------------------------------------------------------- 13 | # VTK Helper methods 14 | # ----------------------------------------------------------------------------- 15 | 16 | def updatePieceWise(pwf, dataRange, center, halfSpread): 17 | scalarOpacity.RemoveAllPoints() 18 | if (center - halfSpread) <= dataRange[0]: 19 | scalarOpacity.AddPoint(dataRange[0], 0.0) 20 | scalarOpacity.AddPoint(center, 1.0) 21 | else: 22 | scalarOpacity.AddPoint(dataRange[0], 0.0) 23 | scalarOpacity.AddPoint(center - halfSpread, 0.0) 24 | scalarOpacity.AddPoint(center, 1.0) 25 | 26 | if (center + halfSpread) >= dataRange[1]: 27 | scalarOpacity.AddPoint(dataRange[1], 0.0) 28 | else: 29 | scalarOpacity.AddPoint(center + halfSpread, 0.0) 30 | scalarOpacity.AddPoint(dataRange[1], 0.0) 31 | 32 | # ----------------------------------------------------------------------------- 33 | 34 | imageWriter = vtkPNGWriter() 35 | 36 | def writeDepthMap(imageData, path): 37 | width = imageData.GetDimensions()[0] 38 | height = imageData.GetDimensions()[1] 39 | nbTuples = width * height 40 | 41 | inputArray = imageData.GetPointData().GetArray(0) 42 | array = bytearray(nbTuples) 43 | 44 | for idx in range(inputArray.GetNumberOfTuples()): 45 | array[idx] = 255 - int(inputArray.GetValue(idx)) 46 | 47 | with open(path, 'wb') as f: 48 | f.write(array) 49 | 50 | def writeColorMap(imageData, path): 51 | imageWriter.SetInputData(imageData) 52 | imageWriter.SetFileName(path) 53 | imageWriter.Write() 54 | 55 | # ----------------------------------------------------------------------------- 56 | # VTK Pipeline creation 57 | # ----------------------------------------------------------------------------- 58 | 59 | source = vtkRTAnalyticSource() 60 | 61 | mapper = vtkGPUVolumeRayCastMapper() 62 | mapper.SetInputConnection(source.GetOutputPort()) 63 | mapper.RenderToImageOn() 64 | 65 | colorFunction = vtkColorTransferFunction() 66 | colorFunction.AddRGBPoint(37.35310363769531, 0.231373, 0.298039, 0.752941) 67 | colorFunction.AddRGBPoint(157.0909652709961, 0.865003, 0.865003, 0.865003) 68 | colorFunction.AddRGBPoint(276.8288269042969, 0.705882, 0.0156863, 0.14902) 69 | 70 | dataRange = [37.3, 276.8] 71 | nbSteps = 10 72 | halfSpread = (dataRange[1] - dataRange[0]) / float(2*nbSteps) 73 | centers = [ dataRange[0] + halfSpread*float(2*i+1) for i in range(nbSteps)] 74 | 75 | scalarOpacity = vtkPiecewiseFunction() 76 | 77 | volumeProperty = vtkVolumeProperty() 78 | # volumeProperty.ShadeOn() 79 | volumeProperty.SetInterpolationType(VTK_LINEAR_INTERPOLATION) 80 | volumeProperty.SetColor(colorFunction) 81 | volumeProperty.SetScalarOpacity(scalarOpacity) 82 | 83 | volume = vtkVolume() 84 | volume.SetMapper(mapper) 85 | volume.SetProperty(volumeProperty) 86 | 87 | window = vtkRenderWindow() 88 | window.SetSize(500, 500) 89 | 90 | renderer = vtkRenderer() 91 | renderer.SetBackground(0.5, 0.5, 0.6) 92 | window.AddRenderer(renderer) 93 | 94 | renderer.AddVolume(volume) 95 | renderer.ResetCamera() 96 | window.Render() 97 | 98 | colorMap = vtkImageData() 99 | depthMap = vtkImageData() 100 | 101 | # ----------------------------------------------------------------------------- 102 | # Data Generation 103 | # ----------------------------------------------------------------------------- 104 | 105 | # Create Image Builder 106 | dsb = ImageDataSetBuilder(dataset_destination_path, 'image/png', {'type': 'spherical', 'phi': range(0, 360, 30), 'theta': range(-60, 61, 30)}) 107 | 108 | # Add PieceWise navigation 109 | dsb.getDataHandler().registerArgument(priority=1, name='pwf', label='Transfer function', values=centers, ui='slider') 110 | 111 | # Add Depth data 112 | dsb.getDataHandler().registerData(name='depth', type='array', fileName='_depth.uint8', metadata={ 'dimensions': window.GetSize() }) 113 | 114 | # Loop over data and generate images 115 | dsb.start(window, renderer) 116 | for center in dsb.getDataHandler().pwf: 117 | updatePieceWise(scalarOpacity, dataRange, center, halfSpread) 118 | for camera in dsb.getCamera(): 119 | dsb.updateCamera(camera) 120 | 121 | mapper.GetColorImage(colorMap) 122 | writeColorMap(colorMap, dsb.getDataHandler().getDataAbsoluteFilePath('image')) 123 | 124 | mapper.GetDepthImage(depthMap) 125 | writeDepthMap(depthMap, dsb.getDataHandler().getDataAbsoluteFilePath('depth')) 126 | dsb.stop() 127 | -------------------------------------------------------------------------------- /scripts/examples/vtk/samples/syntax-evolution-volume_v3.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | dataset_destination_path = '/Users/seb/Desktop/vtk_volume_v3' 6 | 7 | # ----------------------------------------------------------------------------- 8 | 9 | from vtk import * 10 | from vtk.web.dataset_builder import * 11 | 12 | # ----------------------------------------------------------------------------- 13 | # VTK Helper methods 14 | # ----------------------------------------------------------------------------- 15 | 16 | def updatePieceWise(pwf, dataRange, center, halfSpread): 17 | scalarOpacity.RemoveAllPoints() 18 | if (center - halfSpread) <= dataRange[0]: 19 | scalarOpacity.AddPoint(dataRange[0], 0.0) 20 | scalarOpacity.AddPoint(center, 1.0) 21 | else: 22 | scalarOpacity.AddPoint(dataRange[0], 0.0) 23 | scalarOpacity.AddPoint(center - halfSpread, 0.0) 24 | scalarOpacity.AddPoint(center, 1.0) 25 | 26 | if (center + halfSpread) >= dataRange[1]: 27 | scalarOpacity.AddPoint(dataRange[1], 0.0) 28 | else: 29 | scalarOpacity.AddPoint(center + halfSpread, 0.0) 30 | scalarOpacity.AddPoint(dataRange[1], 0.0) 31 | 32 | # ----------------------------------------------------------------------------- 33 | # VTK Pipeline creation 34 | # ----------------------------------------------------------------------------- 35 | 36 | source = vtkRTAnalyticSource() 37 | 38 | mapper = vtkGPUVolumeRayCastMapper() 39 | mapper.SetInputConnection(source.GetOutputPort()) 40 | mapper.RenderToImageOn() 41 | 42 | colorFunction = vtkColorTransferFunction() 43 | colorFunction.AddRGBPoint(37.35310363769531, 0.231373, 0.298039, 0.752941) 44 | colorFunction.AddRGBPoint(157.0909652709961, 0.865003, 0.865003, 0.865003) 45 | colorFunction.AddRGBPoint(276.8288269042969, 0.705882, 0.0156863, 0.14902) 46 | 47 | dataRange = [37.3, 276.8] 48 | nbSteps = 5 49 | halfSpread = (dataRange[1] - dataRange[0]) / float(2*nbSteps) 50 | centers = [ dataRange[0] + halfSpread*float(2*i+1) for i in range(nbSteps)] 51 | 52 | scalarOpacity = vtkPiecewiseFunction() 53 | 54 | volumeProperty = vtkVolumeProperty() 55 | # volumeProperty.ShadeOn() 56 | volumeProperty.SetInterpolationType(VTK_LINEAR_INTERPOLATION) 57 | volumeProperty.SetColor(colorFunction) 58 | volumeProperty.SetScalarOpacity(scalarOpacity) 59 | 60 | volume = vtkVolume() 61 | volume.SetMapper(mapper) 62 | volume.SetProperty(volumeProperty) 63 | 64 | window = vtkRenderWindow() 65 | window.SetSize(500, 500) 66 | 67 | renderer = vtkRenderer() 68 | renderer.SetBackground(0.5, 0.5, 0.6) 69 | window.AddRenderer(renderer) 70 | 71 | renderer.AddVolume(volume) 72 | renderer.ResetCamera() 73 | window.Render() 74 | 75 | # ----------------------------------------------------------------------------- 76 | # Data Generation 77 | # ----------------------------------------------------------------------------- 78 | 79 | # Create Image Builder 80 | vcdsb = VolumeCompositeDataSetBuilder(dataset_destination_path, 'image/png', {'type': 'spherical', 'phi': [0, 90], 'theta': [0]}) 81 | 82 | idx = 0 83 | vcdsb.start(window, renderer) 84 | for center in centers: 85 | idx += 1 86 | updatePieceWise(scalarOpacity, dataRange, center, halfSpread) 87 | 88 | # Capture layer 89 | vcdsb.activateLayer('Volumes', 'volume_%d' % idx, 'RTData') 90 | 91 | # Write data 92 | vcdsb.writeData(mapper) 93 | 94 | vcdsb.stop() 95 | 96 | 97 | 98 | -------------------------------------------------------------------------------- /scripts/examples/vtk/samples/syntax-evolution-volume_v4.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | dataset_destination_path = '/Users/seb/Desktop/wavelet_%s' 6 | 7 | # ----------------------------------------------------------------------------- 8 | 9 | from vtk import * 10 | from vtk.web.dataset_builder import * 11 | 12 | # ----------------------------------------------------------------------------- 13 | # VTK Helper methods 14 | # ----------------------------------------------------------------------------- 15 | 16 | def updatePieceWise(pwf, dataRange, center, halfSpread): 17 | scalarOpacity.RemoveAllPoints() 18 | if (center - halfSpread) <= dataRange[0]: 19 | scalarOpacity.AddPoint(dataRange[0], 0.0) 20 | scalarOpacity.AddPoint(center, 1.0) 21 | else: 22 | scalarOpacity.AddPoint(dataRange[0], 0.0) 23 | scalarOpacity.AddPoint(center - halfSpread, 0.0) 24 | scalarOpacity.AddPoint(center, 1.0) 25 | 26 | if (center + halfSpread) >= dataRange[1]: 27 | scalarOpacity.AddPoint(dataRange[1], 0.0) 28 | else: 29 | scalarOpacity.AddPoint(center + halfSpread, 0.0) 30 | scalarOpacity.AddPoint(dataRange[1], 0.0) 31 | 32 | # ----------------------------------------------------------------------------- 33 | # VTK Pipeline creation 34 | # ----------------------------------------------------------------------------- 35 | 36 | source = vtkRTAnalyticSource() 37 | 38 | mapper = vtkGPUVolumeRayCastMapper() 39 | mapper.SetInputConnection(source.GetOutputPort()) 40 | mapper.RenderToImageOn() 41 | 42 | colorFunction = vtkColorTransferFunction() 43 | colorFunction.AddRGBPoint(37.35310363769531, 1.0, 1.0, 1.0) 44 | colorFunction.AddRGBPoint(276.8288269042969, 1.0, 1.0, 1.0) 45 | 46 | dataRange = [37.3, 276.8] 47 | nbSteps = 5 48 | halfSpread = (dataRange[1] - dataRange[0]) / float(2*nbSteps) 49 | centers = [ dataRange[0] + halfSpread*float(2*i+1) for i in range(nbSteps)] 50 | 51 | scalarOpacity = vtkPiecewiseFunction() 52 | 53 | volumeProperty = vtkVolumeProperty() 54 | # volumeProperty.ShadeOn() 55 | volumeProperty.SetInterpolationType(VTK_LINEAR_INTERPOLATION) 56 | volumeProperty.SetColor(colorFunction) 57 | volumeProperty.SetScalarOpacity(scalarOpacity) 58 | 59 | volume = vtkVolume() 60 | volume.SetMapper(mapper) 61 | volume.SetProperty(volumeProperty) 62 | 63 | window = vtkRenderWindow() 64 | window.SetSize(500, 500) 65 | 66 | renderer = vtkRenderer() 67 | renderer.SetBackground(0.5, 0.5, 0.6) 68 | window.AddRenderer(renderer) 69 | 70 | renderer.AddVolume(volume) 71 | renderer.ResetCamera() 72 | window.Render() 73 | 74 | # ----------------------------------------------------------------------------- 75 | # Data Generation 76 | # ----------------------------------------------------------------------------- 77 | 78 | # Create Image Builder 79 | vcdsb = SortedCompositeDataSetBuilder(dataset_destination_path % nbSteps, {'type': 'spherical', 'phi': range(0, 360, 30), 'theta': range(-60, 61, 60)}) 80 | 81 | idx = 0 82 | vcdsb.start(window, renderer) 83 | for center in centers: 84 | idx += 1 85 | updatePieceWise(scalarOpacity, dataRange, center, halfSpread) 86 | 87 | # Capture layer 88 | vcdsb.activateLayer('RTData', center) 89 | 90 | # Write data 91 | vcdsb.writeData(mapper) 92 | 93 | vcdsb.stop() 94 | 95 | 96 | 97 | -------------------------------------------------------------------------------- /scripts/examples/vtk/samples/wavelet-raw-data-prober.py: -------------------------------------------------------------------------------- 1 | 2 | # ----------------------------------------------------------------------------- 3 | # User configuration 4 | # ----------------------------------------------------------------------------- 5 | 6 | dataset_destination_path = '/Users/seb/Desktop/vtk_data_prober' 7 | 8 | # ----------------------------------------------------------------------------- 9 | 10 | from vtk import * 11 | from vtk.web.dataset_builder import * 12 | 13 | # ----------------------------------------------------------------------------- 14 | # VTK Pipeline creation 15 | # ----------------------------------------------------------------------------- 16 | 17 | source = vtkRTAnalyticSource() 18 | 19 | # ----------------------------------------------------------------------------- 20 | # Data Generation 21 | # ----------------------------------------------------------------------------- 22 | 23 | dpdsb = DataProberDataSetBuilder(dataset_destination_path, [20,20,20], ['RTData']) 24 | dpdsb.setSourceToProbe(source) 25 | 26 | dpdsb.start() 27 | dpdsb.writeData() 28 | dpdsb.stop() 29 | -------------------------------------------------------------------------------- /scripts/examples/vtk/tests/convert-stack-to-sorted.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # User configuration 3 | # ----------------------------------------------------------------------------- 4 | 5 | convert_dir = [ '/Users/seb/Desktop/vtk_volume_v3/0_90', '/Users/seb/Desktop/vtk_volume_v3/0_0'] 6 | 7 | # ----------------------------------------------------------------------------- 8 | 9 | from vtk.web.dataset_builder import * 10 | 11 | converter = ConvertVolumeStackToSortedStack(500, 500) 12 | for d in convert_dir: 13 | converter.convert(d) 14 | 15 | -------------------------------------------------------------------------------- /site.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "output" : "docs/www", 3 | "src" : "lib", 4 | "api" : "docs/api", 5 | "doc" : "docs/guides", 6 | "news" : "docs/news", 7 | "theme" : "tonic", 8 | "icon" : "docs/images/ArcticViewer.png", 9 | "gitdir" : "../../.git/modules/docs/www", 10 | "ctx" : { 11 | "title": "ArcticViewer", 12 | "vision": "The cool Interactive Data viewer", 13 | "description": "Command line tool that let you explore interactively your cold data using your web browser.", 14 | "details": "", 15 | "license": "BSD 3 Clause Open Source", 16 | "company": "Kitware, Inc.", 17 | "companyURL": "http://www.kitware.com", 18 | "baseurl" : "/arctic-viewer", 19 | "url" : "", 20 | "repository": "Kitware/arctic-viewer", 21 | "timezone": "America/Denver", 22 | "project": "arctic-viewer", 23 | "noApi" : "no mdoc link", 24 | "noTravis": "no testables set", 25 | "status": [ 26 | {"subject": "api_stability", "status": "fair", "color": "green"}, 27 | {"subject": "feature_complete", "status": "★★☆☆☆", "color": "green"} 28 | ] 29 | } 30 | }; 31 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const autoprefixer = require('autoprefixer'); 3 | 4 | const entry = path.join(__dirname, './lib/arctic-viewer.js'); 5 | const outputPath = path.join(__dirname, './dist'); 6 | const eslintrcPath = path.join(__dirname, '.eslintrc.js'); 7 | 8 | const plugins = []; 9 | 10 | module.exports = { 11 | plugins, 12 | entry, 13 | output: { 14 | path: outputPath, 15 | filename: 'viewer.js', 16 | libraryTarget: 'umd', 17 | }, 18 | module: { 19 | rules: [ 20 | { test: entry, loader: 'expose-loader?ArcticViewer' }, 21 | { 22 | test: /\.worker\.js$/, 23 | include: /vtk\.js/, 24 | use: [ 25 | { 26 | loader: 'worker-loader', 27 | options: { inline: true, fallback: false }, 28 | }, 29 | ], 30 | }, 31 | { 32 | test: /\.css$/, 33 | exclude: /\.module\.css$/, 34 | use: [ 35 | 'style-loader', 36 | 'css-loader', 37 | { 38 | loader: 'postcss-loader', 39 | options: { 40 | plugins: () => [autoprefixer('last 2 version', 'ie >= 10')], 41 | }, 42 | }, 43 | ], 44 | }, 45 | { 46 | test: /\.css$/, 47 | include: /\.module\.css$/, 48 | use: [ 49 | { loader: 'style-loader' }, 50 | { 51 | loader: 'css-loader', 52 | options: { 53 | localIdentName: '[name]-[local]_[sha512:hash:base64:5]', 54 | modules: true, 55 | }, 56 | }, 57 | { 58 | loader: 'postcss-loader', 59 | options: { 60 | plugins: () => [autoprefixer('last 2 version', 'ie >= 10')], 61 | }, 62 | }, 63 | ], 64 | }, 65 | { 66 | test: /\.mcss$/, 67 | use: [ 68 | { loader: 'style-loader' }, 69 | { 70 | loader: 'css-loader', 71 | options: { 72 | localIdentName: '[name]-[local]_[sha512:hash:base64:5]', 73 | modules: true, 74 | }, 75 | }, 76 | { 77 | loader: 'postcss-loader', 78 | options: { 79 | plugins: () => [autoprefixer('last 2 version', 'ie >= 10')], 80 | }, 81 | }, 82 | ], 83 | }, 84 | { 85 | test: /\.svg$/, 86 | loader: 'svg-sprite-loader?runtimeCompat=true', 87 | exclude: /fonts/, 88 | }, 89 | { 90 | test: /\.woff(2)?(\?v=[0-9]\.[0-9]\.[0-9])?$/, 91 | loader: 'url-loader?limit=60000&mimetype=application/font-woff', 92 | }, 93 | { 94 | test: /\.(ttf|eot|svg)(\?v=[0-9]\.[0-9]\.[0-9])?$/, 95 | loader: 'url-loader?limit=60000', 96 | include: /fonts/, 97 | }, 98 | { 99 | test: /\.(png|jpg)$/, 100 | loader: 'url-loader?limit=8192', 101 | }, 102 | { 103 | test: /\.c$/i, 104 | loader: 'shader-loader', 105 | }, 106 | { 107 | test: /\.html$/, 108 | loader: 'html-loader', 109 | }, 110 | { 111 | test: /\.isvg$/, 112 | loader: 'html-loader?attrs=false', 113 | }, 114 | { 115 | test: /\.js$/, 116 | include: /paraviewweb/, 117 | loader: 'babel-loader?presets[]=env,presets[]=react,babelrc=false', 118 | }, 119 | { 120 | test: /\.js$/, 121 | include: /vtk.js/, 122 | loader: 'babel-loader?presets[]=env,presets[]=react,babelrc=false', 123 | }, 124 | { 125 | test: /\.js$/, 126 | include: /wslink/, 127 | loader: 'babel-loader?presets[]=env,babelrc=false', 128 | }, 129 | { 130 | test: /\.glsl$/, 131 | loader: 'shader-loader', 132 | }, 133 | { 134 | test: /\.js$/, 135 | exclude: /node_modules/, 136 | loader: 'babel-loader?presets[]=env,presets[]=react', 137 | }, 138 | ].concat({ 139 | test: /\.js$/, 140 | loader: 'eslint-loader', 141 | exclude: /node_modules/, 142 | enforce: 'pre', 143 | options: { configFile: eslintrcPath }, 144 | }), 145 | }, 146 | externals: { 147 | three: 'THREE', 148 | 'plotly.js': 'Plotly', 149 | }, 150 | resolve: { 151 | alias: { 152 | PVWStyle: path.resolve('./node_modules/paraviewweb/style'), 153 | }, 154 | }, 155 | }; 156 | --------------------------------------------------------------------------------