├── LICENCE.txt ├── .gitmodules ├── tools ├── pointIdType.h ├── labelContour.py ├── trimTransform.py ├── CMakeLists.txt ├── grid.py ├── transformPoints.py ├── checkerBoard.py ├── DummyVolumeGenerator.cxx ├── MeshTransform.cxx ├── PointsTransform.cxx ├── AverageVolumes.cxx ├── flip_tool.py ├── transformIO.py ├── CheckDiffeomorphism.cxx ├── register.py ├── register2volumes.py ├── VolumeTransform.cxx └── transformIO.h ├── package.json ├── registration ├── image.cxx ├── CMakeLists.txt ├── image.h ├── point.h ├── stats.h ├── stats.cxx ├── imageGroup.h └── frog.cxx ├── match ├── CMakeLists.txt └── match.cpp ├── params.sh ├── CMakeLists.txt ├── js ├── bin │ └── LSRegistration.js ├── pointsViewer.js ├── lib │ ├── TextSpriteHelper.js │ ├── LSRegistration.js │ ├── FROG.js │ ├── heap.js │ └── laplaceSolver.js ├── pairwiseRigidRegistration.js └── stochasticRigidRegistration.js ├── .github └── workflows │ └── ci.yml ├── checkDiffeomorphism.sh ├── transform.sh ├── run.sh ├── Readme.md ├── frog.json └── FROG.py /LICENCE.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/valette/FROG/HEAD/LICENCE.txt -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "vtkOpenSURF3D"] 2 | path = vtkOpenSURF3D 3 | url = https://github.com/valette/vtkOpenSURF3D 4 | -------------------------------------------------------------------------------- /tools/pointIdType.h: -------------------------------------------------------------------------------- 1 | 2 | #ifdef INT_PTIDS 3 | typedef unsigned int pointIdType; 4 | #else 5 | typedef unsigned short pointIdType; 6 | #endif -------------------------------------------------------------------------------- /tools/labelContour.py: -------------------------------------------------------------------------------- 1 | import SimpleITK as sitk 2 | 3 | import sys 4 | 5 | file = sys.argv[1] 6 | image = sitk.ReadImage( file ) 7 | 8 | contour = sitk.LabelContourImageFilter() 9 | output = contour.Execute( image ) 10 | 11 | sitk.WriteImage( output , "output.nii.gz" ) 12 | sitk.WriteImage( output , "output.mhd" ) 13 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "FROG", 3 | "description": "a library to perform SURF-based registration", 4 | "license": "GPL-2.0", 5 | "version": "0.0.1", 6 | "dependencies": { 7 | "async": "1.5.2", 8 | "desk-client": "0.10.0", 9 | "heap": "0.2.7", 10 | "lodash": "4.17.21", 11 | "numeric": "1.2.6", 12 | "ttest": "4.0.0" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /registration/image.cxx: -------------------------------------------------------------------------------- 1 | #include "image.h" 2 | 3 | void Image::transformPoints( bool apply ) { 4 | 5 | for ( auto &point : this->points ) { 6 | 7 | this->transform->TransformPoint ( point.xyz, point.xyz2 ); 8 | if ( !apply ) continue; 9 | for ( int k = 0; k < 3; k++ ) point.xyz[ k ] = point.xyz2[ k ]; 10 | 11 | } 12 | 13 | } 14 | 15 | void Image::addPoints( vtkBoundingBox &box ) { 16 | 17 | for ( const auto &point : this->points ) 18 | box.AddPoint( point.xyz[ 0 ], point.xyz[ 1 ], point.xyz[ 2 ] ); 19 | 20 | } 21 | -------------------------------------------------------------------------------- /tools/trimTransform.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import json 3 | 4 | parser = argparse.ArgumentParser( description = 'Trims a tranfsorm : takes only the n first levels', formatter_class=argparse.ArgumentDefaultsHelpFormatter ) 5 | parser.add_argument( 'input', help = 'input transform' ) 6 | parser.add_argument( 'n', help = 'number of levels to keep', type = int ) 7 | args = parser.parse_args() 8 | 9 | with open( args.input ) as f: 10 | data = json.load( f ) 11 | 12 | with open('output.json', 'w') as output: 13 | json.dump( { "transforms" : data[ "transforms" ][ : args.n ] }, output ) -------------------------------------------------------------------------------- /tools/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | # find_package(VTK REQUIRED) 3 | # include ("${VTK_USE_FILE}") 4 | 5 | find_package( OpenMP REQUIRED ) 6 | set( CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS}" ) 7 | set( CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}" ) 8 | 9 | SET(TOOL_FILES 10 | AverageVolumes 11 | CheckDiffeomorphism 12 | DummyVolumeGenerator 13 | MeshTransform 14 | PointsTransform 15 | VolumeTransform 16 | ) 17 | 18 | FOREACH(loop_var ${TOOL_FILES}) 19 | ADD_EXECUTABLE( ${loop_var} ${loop_var}.cxx ) 20 | TARGET_LINK_LIBRARIES( ${loop_var} ${VTK_LIBRARIES} ) 21 | ENDFOREACH(loop_var) 22 | -------------------------------------------------------------------------------- /registration/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | # find_package(OpenCV REQUIRED) 3 | # find_package(VTK REQUIRED) 4 | find_package( Boost COMPONENTS filesystem REQUIRED iostreams REQUIRED ) 5 | 6 | # include ("${VTK_USE_FILE}") 7 | 8 | find_package(OpenMP) 9 | 10 | if (OPENMP_FOUND) 11 | set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS}") 12 | set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}") 13 | endif (OPENMP_FOUND) 14 | 15 | include_directories(${Boost_INCLUDE_DIRS}) 16 | 17 | ADD_EXECUTABLE( frog frog.cxx image.cxx imageGroup.cxx stats.cxx ) 18 | TARGET_LINK_LIBRARIES(frog ${VTK_LIBRARIES} stdc++fs ${Boost_LIBRARIES} ) 19 | 20 | -------------------------------------------------------------------------------- /registration/image.h: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | #include "point.h" 9 | 10 | class Image { 11 | public: 12 | 13 | double refTranslation[ 3 ]; 14 | std::vector< Point > points; 15 | Stats stats; 16 | 17 | void addPoints( vtkBoundingBox &box ); 18 | 19 | void transformPoints( bool apply = false ); 20 | 21 | vtkAbstractTransform *transform; 22 | vtkSmartPointer gradient; 23 | 24 | vtkSmartPointer allTransforms; 25 | 26 | Image () : transform( 0 ) {}; 27 | 28 | }; 29 | -------------------------------------------------------------------------------- /registration/point.h: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | #include "stats.h" 5 | #include "../tools/pointIdType.h" 6 | 7 | #define print( v, size ) { for (int __i = 0; __i < size; __i++ ) { std::cout << v[ __i ]; if ( __i < ( size - 1 ) ) std::cout<< " " ;} std::cout << std::endl;} 8 | 9 | typedef unsigned short imageIdType; 10 | 11 | struct Link { 12 | 13 | imageIdType image; 14 | pointIdType point; 15 | 16 | }; 17 | 18 | 19 | class Point { 20 | 21 | public: 22 | 23 | float other[ 3 ]; // scale, laplacian sign, detector response 24 | 25 | float xyz[3]; // current coordinates 26 | float xyz2[3]; // transformed coordinates 27 | 28 | std::vector< Link > links; // links 29 | 30 | std::vector< Link > hardLinks; // hard constraints 31 | 32 | }; 33 | -------------------------------------------------------------------------------- /match/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | find_package( OpenMP REQUIRED ) 2 | find_package( Boost COMPONENTS filesystem REQUIRED iostreams REQUIRED ) 3 | 4 | option(USE_SSE_FOR_MATCHING "Use SSE instructions for matching (may crash on old computers)" OFF) 5 | 6 | if(USE_SSE_FOR_MATCHING) 7 | 8 | add_definitions(-D USE_SSE_FOR_MATCHING) 9 | set( CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS} -mavx" ) 10 | set( CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS} -mavx" ) 11 | 12 | else(USE_SSE_FOR_MATCHING) 13 | 14 | set( CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS}" ) 15 | set( CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}" ) 16 | 17 | endif(USE_SSE_FOR_MATCHING) 18 | 19 | include_directories(${Boost_INCLUDE_DIRS}) 20 | ADD_EXECUTABLE( match match.cpp ) 21 | target_link_libraries(match ${Boost_LIBRARIES} ${VTK_LIBRARIES}) 22 | -------------------------------------------------------------------------------- /tools/grid.py: -------------------------------------------------------------------------------- 1 | import SimpleITK as sitk 2 | 3 | import sys 4 | finalSpacing = 0.5; 5 | gridSpacing = 30 / finalSpacing; 6 | sigma = 0.5 7 | 8 | file = sys.argv[1] 9 | image = sitk.ReadImage( file ) 10 | gridSource = sitk.GridImageSource() 11 | 12 | gridSource.SetGridSpacing( [ gridSpacing, gridSpacing, gridSpacing ] ) 13 | gridSource.SetOrigin( image.GetOrigin() ); 14 | spacing = image.GetSpacing(); 15 | gridSource.SetSigma( [ sigma, sigma, sigma] ); 16 | gridSource.SetDirection( image.GetDirection() ); 17 | size = image.GetSize(); 18 | 19 | s =[ 0,0,0]; 20 | sp =[ 0,0,0]; 21 | 22 | for i in [ 0, 1, 2 ]: 23 | s[ i ] = int ( round( size[ i ] * spacing[ i ] / finalSpacing ) + 1 ); 24 | sp[ i ] = finalSpacing; 25 | 26 | gridSource.SetSize( s ); 27 | gridSource.SetSpacing( sp ); 28 | output = gridSource.Execute() 29 | sitk.WriteImage( output , "output.nii.gz" ) 30 | -------------------------------------------------------------------------------- /params.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # Inspired by Niftireg script : https://sourceforge.net/p/niftyreg/git/ci/master/tree/reg-apps/groupwise_niftyreg_params.sh 4 | 5 | # Array that contains the input images to create the atlas 6 | export IMG_INPUT=(`ls ~/path/to/images/*.nii.gz`) 7 | 8 | # folder where the results will be written 9 | export RES_FOLDER=`pwd`/output 10 | 11 | # SURF parameters 12 | export SPACING=0.75 13 | export THRESHOLD=0; 14 | export NPOINTS=20000; 15 | export SURF_OTHER_PARAMS="" # other possible parameters, see documentation here: https://github.com/valette/vtkOpenSURF3D 16 | 17 | # MATCH parameters 18 | export MAX_DISTANCE=1 19 | export MATCH_OTHER_PARAMS="" # other possible match parameters 20 | 21 | # FROG parameters 22 | 23 | #export LANDMARKS=/path/to/landmarks # path to landmarks to measure registration quality 24 | export REGISTRATION_OTHER_PARAMS="" 25 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.30) 2 | project(FROG) 3 | 4 | SET (LIBRARY_OUTPUT_PATH ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single output directory for building all libraries.") 5 | SET (EXECUTABLE_OUTPUT_PATH ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single output directory for building all executables.") 6 | 7 | OPTION(BUILD_SHARED_LIBS "Build FROG with shared libraries." ON) 8 | OPTION(USE_UINT_FOR_POINT_IDS "Use unsigned ints for point ids" ON) 9 | 10 | IF (USE_UINT_FOR_POINT_IDS) 11 | ADD_DEFINITIONS(-D INT_PTIDS) 12 | ENDIF (USE_UINT_FOR_POINT_IDS) 13 | 14 | find_package(OpenCV REQUIRED) 15 | find_package(VTK REQUIRED COMPONENTS CommonSystem FiltersGeneral FiltersHybrid ImagingColor IOGeometry IOExport IOPLY ) 16 | include_directories(${VTK_INCLUDE_DIRS}) 17 | 18 | ADD_SUBDIRECTORY(match) 19 | ADD_SUBDIRECTORY(registration) 20 | ADD_SUBDIRECTORY(tools) 21 | ADD_SUBDIRECTORY(vtkOpenSURF3D) 22 | -------------------------------------------------------------------------------- /tools/transformPoints.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from transformIO import readTransform 3 | import os 4 | from os import listdir 5 | from os.path import isdir, join 6 | 7 | parser = argparse.ArgumentParser( description = "Transforms points" , formatter_class=argparse.ArgumentDefaultsHelpFormatter ) 8 | parser.add_argument( 'inputPoints', help = 'input points' ) 9 | parser.add_argument( 'transform', help = 'transform to apply' ) 10 | args = parser.parse_args() 11 | 12 | transform = readTransform( args.transform ) 13 | file = open( args.inputPoints, mode = 'r') 14 | outputPoints = []; 15 | 16 | for line in file.readlines() : 17 | coords = [ float( n.strip() ) for n in line.split( "," ) ] 18 | coords2 = transform.TransformPoint( coords ) 19 | output = str( coords2[ 0 ] ) + "," + str( coords2[ 1 ] ) + "," + str( coords2[ 2 ] ) 20 | outputPoints.append( output ) 21 | 22 | with open( 'output.xyz', 'w') as f: 23 | f.write( '\n'.join( outputPoints ) ) 24 | -------------------------------------------------------------------------------- /tools/checkerBoard.py: -------------------------------------------------------------------------------- 1 | import SimpleITK as sitk 2 | import sys 3 | 4 | gridSpacing = 30; 5 | 6 | file = sys.argv[1] 7 | image = sitk.ReadImage( file ) 8 | 9 | size = image.GetSize(); 10 | black = sitk.Image( size, sitk.sitkUInt8 ) 11 | black.SetOrigin( image.GetOrigin() ) 12 | spacing = image.GetSpacing(); 13 | black.SetSpacing( spacing ) 14 | black.SetDirection( image.GetDirection() ) 15 | 16 | threshold = sitk.ThresholdImageFilter() 17 | threshold.SetLower( 10 ) 18 | threshold.SetOutsideValue ( 100 ) 19 | white = threshold.Execute( black ) 20 | 21 | threshold.SetOutsideValue ( 50 ) 22 | grey = threshold.Execute( black ) 23 | 24 | checker = sitk.CheckerBoardImageFilter(); 25 | pattern = [ 0, 0, 0 ] 26 | 27 | 28 | for i in [ 0, 1, 2 ] : 29 | pattern[ i ] = int( size[ i ] * spacing[ i ] / gridSpacing ) 30 | 31 | pattern[ 0 ] = 1 32 | 33 | print pattern 34 | checker.SetCheckerPattern( pattern ); 35 | board = checker.Execute( grey, white ); 36 | sitk.WriteImage( board , "output.nii.gz" ) 37 | -------------------------------------------------------------------------------- /js/bin/LSRegistration.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | 'use strict'; 4 | 5 | const async = require ( 'async' ), 6 | fs = require ( 'fs' ), 7 | LSRegistration = require ( __dirname + '/../lib/LSRegistration.js' ), 8 | { promisify } = require ( 'util' ); 9 | 10 | ( async function () { 11 | 12 | try { 13 | 14 | const inputFile = process.argv[ 2 ]; 15 | console.log( "Loading : " + inputFile ); 16 | console.time( "total computation" ); 17 | const obj = require( inputFile ); 18 | const volumes = obj.volumes || obj; 19 | console.log( "Computing registration..." ); 20 | const registration = new LSRegistration(); 21 | registration.registerAsync = promisify( registration.register ); 22 | const res = await registration.registerAsync( volumes ); 23 | res.volumes = volumes; 24 | fs.writeFileSync( "registration.json" , JSON.stringify( res ) ); 25 | console.log( "... Done" ); 26 | console.timeEnd( "total computation" ); 27 | process.exit( 0 ); 28 | 29 | } catch ( e ) { 30 | 31 | console.log( e ); 32 | process.exit( 1 ); 33 | 34 | } 35 | 36 | } )(); 37 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | #from https://github.com/Kitware/vtk-ci-example/blob/master/.github/workflows/ci.yml 2 | name: CI 3 | 4 | on: 5 | pull_request: 6 | types: [opened, synchronize, reopened, ready_for_review] 7 | push: 8 | branches: 9 | - master 10 | schedule: 11 | # every two months" 12 | - cron: '0 0 1 */2 *' 13 | 14 | 15 | jobs: 16 | build_and_test: 17 | if: github.event.pull_request.draft == false 18 | 19 | runs-on: ubuntu-latest 20 | strategy: 21 | matrix: 22 | version: [ v9.0, v9.1, v9.2, v9.3, v9.4, latest ] 23 | container: kitware/vtk-for-ci:${{ matrix.version }} 24 | 25 | steps: 26 | 27 | - name: Checkout 28 | uses: actions/checkout@v2 29 | with: 30 | path: 'source' 31 | fetch-depth: 0 32 | lfs: 'false' 33 | submodules: 'recursive' 34 | 35 | - name: Install opencv and boost 36 | run: | 37 | apt-get update 38 | apt-get -y install libopencv-dev libboost-dev libboost-filesystem-dev libboost-iostreams-dev 39 | 40 | - name: Setup Directories 41 | working-directory: ${{github.workspace}} 42 | run: mkdir build 43 | 44 | - name: Configure 45 | working-directory: ${{github.workspace}}/build 46 | run: cmake -DCMAKE_PREFIX_PATH=/opt/vtk/install/ -DBUILD_TESTING=ON -DCMAKE_CXX_FLAGS="-lstdc++fs -std=c++17" -DCMAKE_BUILD_TYPE=Release ../source 47 | 48 | - name: Build 49 | working-directory: ${{github.workspace}}/build 50 | run: cmake --build . --parallel 2 51 | 52 | -------------------------------------------------------------------------------- /tools/DummyVolumeGenerator.cxx: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | #include "../vtkOpenSURF3D/picojson.h" 6 | 7 | using namespace picojson; 8 | 9 | int main( int argc, char *argv[] ) { 10 | 11 | if ( argc < 3 ) { 12 | 13 | cout << "Usage : DummyVolumeGenerator bbox.json spacing" << endl; 14 | exit( -2 ); 15 | 16 | } 17 | 18 | char *file = argv[ 1 ]; 19 | double spacing = atof( argv[ 2 ] ); 20 | 21 | std::ifstream bboxFile( file ); 22 | std::string str, str2; 23 | 24 | while( bboxFile ){ 25 | 26 | std::getline( bboxFile, str2 ); 27 | str.append( str2 ); 28 | 29 | } 30 | 31 | bboxFile.close(); 32 | picojson::value v; 33 | std::string err; 34 | picojson::parse( v, str.c_str(), str.c_str() + strlen(str.c_str()), &err ); 35 | 36 | if ( !err.empty() ) { 37 | 38 | std::cerr << err << std::endl; 39 | 40 | } 41 | 42 | object trans = v.get(); 43 | array bbox = trans[ "bbox" ].get(); 44 | array aMin = bbox[ 0 ].get(); 45 | array aMax = bbox[ 1 ].get(); 46 | double min[ 3 ], max[ 3 ]; 47 | 48 | for ( int i = 0; i < 3; i++ ) { 49 | 50 | min[ i ] = aMin[ i ].get(); 51 | max[ i ] = aMax[ i ].get(); 52 | 53 | } 54 | 55 | vtkImageData *Volume = vtkImageData::New(); 56 | Volume->SetOrigin( min[ 0 ], min[ 1 ], min[ 2 ] ); 57 | Volume->SetSpacing( spacing, spacing, spacing ); 58 | int dims[ 3 ]; 59 | 60 | for ( int i = 0; i < 3; i++ ) { 61 | 62 | dims[ i ] = ceil( ( max[ i ] - min[ i ] ) / spacing ); 63 | 64 | } 65 | 66 | Volume->SetDimensions( dims ); 67 | Volume->AllocateScalars(VTK_FLOAT, 1); 68 | vtkMetaImageWriter *writer = vtkMetaImageWriter::New(); 69 | writer->SetInputData(Volume); 70 | writer->SetFileName( "dummy.mhd" ); 71 | writer->Write(); 72 | Volume->Delete(); 73 | 74 | } 75 | -------------------------------------------------------------------------------- /tools/MeshTransform.cxx: -------------------------------------------------------------------------------- 1 | /*========================================================================= 2 | 3 | Program: MeshTransform : Transform a mesh 4 | Module: FROG 5 | Language: C++ 6 | Date: 2021/06 7 | Auteur: Sebastien Valette 8 | 9 | =========================================================================*/ 10 | // .NAME MeshTransform 11 | // .SECTION Description 12 | 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | 20 | #include "transformIO.h" 21 | 22 | int main( int argc, char *argv[] ) { 23 | if ( argc < 3 ) { 24 | std::cout << "Usage : MeshTransform source [-t transform] [-ti inverse_transform] [-o outputFileName]" << std::endl; 25 | exit( 1 ); 26 | } 27 | 28 | const char *outputFile = "output.obj" ; 29 | 30 | vtkGeneralTransform *transform = vtkGeneralTransform::New(); 31 | transform->Identity(); 32 | 33 | int argumentsIndex = 2; 34 | vtkTimerLog *timer = vtkTimerLog::New(); 35 | timer->StartTimer(); 36 | 37 | while ( argumentsIndex < argc ) { 38 | 39 | char * key = argv[ argumentsIndex ]; 40 | char * value = argv[ argumentsIndex + 1 ]; 41 | 42 | if ( strcmp( key ,"-t" ) == 0 ) { 43 | transform->Concatenate( readTransform( value ) ); 44 | } 45 | 46 | if ( strcmp( key ,"-ti" ) == 0 ) { 47 | vtkGeneralTransform *trans2 = readTransform( value ); 48 | trans2->Inverse(); 49 | transform->Concatenate( trans2 ); 50 | } 51 | 52 | if ( strcmp( key ,"-o" ) == 0 ) { 53 | outputFile = value; 54 | } 55 | 56 | argumentsIndex += 2; 57 | } 58 | 59 | 60 | vtkTransformPolyDataFilter *polyDataTransform = vtkTransformPolyDataFilter::New(); 61 | polyDataTransform->SetTransform( transform ); 62 | polyDataTransform->SetInputData( ReadPolyData( argv[ 1 ] ) ); 63 | polyDataTransform->Update(); 64 | WritePolyData( polyDataTransform->GetOutput(), outputFile ); 65 | timer->StopTimer(); 66 | std::cout << "Transform computed in " << timer->GetElapsedTime() << "s" << std::endl; 67 | 68 | 69 | } 70 | -------------------------------------------------------------------------------- /tools/PointsTransform.cxx: -------------------------------------------------------------------------------- 1 | /*========================================================================= 2 | 3 | Program: PointsTransform : Transform a point 4 | Module: FROG 5 | Language: C++ 6 | Date: 2021/05 7 | Auteur: Sebastien Valette 8 | 9 | =========================================================================*/ 10 | // .NAME PointsTransform 11 | // .SECTION Description 12 | 13 | #include 14 | #include 15 | #include "transformIO.h" 16 | 17 | int main( int argc, char *argv[] ) { 18 | 19 | if ( argc < 3 ) { 20 | std::cout << "Usage : PointsTransform [-p x y z] [-t transform] [-ti inverse_transform] [-o outputFileName]" << std::endl; 21 | exit( 1 ); 22 | } 23 | 24 | char *outputFile = 0; 25 | vtkGeneralTransform *transform = vtkGeneralTransform::New(); 26 | transform->Identity(); 27 | 28 | int argumentsIndex = 1; 29 | double *point = 0; 30 | 31 | while ( argumentsIndex < argc ) { 32 | 33 | char * key = argv[ argumentsIndex ]; 34 | char * value = argv[ argumentsIndex + 1 ]; 35 | 36 | if ( strcmp( key ,"-t" ) == 0 ) { 37 | transform->Concatenate( readTransform( value ) ); 38 | } 39 | 40 | if ( strcmp( key ,"-ti" ) == 0 ) { 41 | vtkGeneralTransform *trans2 = readTransform( value ); 42 | trans2->Inverse(); 43 | transform->Concatenate( trans2 ); 44 | } 45 | 46 | if ( strcmp( key ,"-o" ) == 0 ) { 47 | outputFile = value; 48 | } 49 | 50 | if ( strcmp( key ,"-p" ) == 0 ) { 51 | 52 | point = new double[ 3 ]; 53 | 54 | for ( int i = 0; i < 3; i++ ) { 55 | 56 | point[ i ] = atof( argv[ argumentsIndex + 1 + i ] ); 57 | 58 | } 59 | 60 | argumentsIndex += 2; 61 | 62 | } 63 | 64 | argumentsIndex += 2; 65 | 66 | } 67 | 68 | if ( point ) { 69 | 70 | double newPoint[ 3 ]; 71 | cout << "Input point : " << point[ 0 ] << " " << point[ 1 ] 72 | << " " << point[ 2 ] << endl; 73 | transform->TransformPoint( point, newPoint ); 74 | cout << "Output point : " << newPoint[ 0 ] << " " << newPoint[ 1 ] 75 | << " " << newPoint[ 2 ] << endl; 76 | 77 | 78 | } 79 | 80 | 81 | if ( point ) delete [] point; 82 | 83 | } 84 | -------------------------------------------------------------------------------- /checkDiffeomorphism.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | start=`date +%s` 4 | 5 | function launch { 6 | 7 | echo "*************************************************************" 8 | echo "Executing : $1 " 9 | $1 10 | RET=$? 11 | echo "*************************************************************" 12 | return $RET 13 | } 14 | 15 | # Inspired from NiftyReg script : https://sourceforge.net/p/niftyreg/git/ci/master/tree/reg-apps/groupwise_niftyreg_run.sh 16 | 17 | if [ $# -lt 1 ] 18 | then 19 | echo "" 20 | echo "*******************************************************************************" 21 | echo "At least one argument is expected to run this script:" 22 | echo "- File with contains parameters" 23 | echo "- spacing (optional)" 24 | echo "example: $0 params.sh 2 " 25 | echo "*******************************************************************************" 26 | echo "" 27 | exit 28 | fi 29 | 30 | # read input parameters 31 | . $1 32 | 33 | # check arguments 34 | 35 | if [ ${#IMG_INPUT[@]} -lt 2 ] 36 | then 37 | echo "Less than 2 images have been specified" 38 | echo "Exit ..." 39 | exit 40 | fi 41 | 42 | IMG_NUMBER=${#IMG_INPUT[@]} 43 | 44 | echo "" 45 | echo "******************************************************" 46 | echo ">>> There are ${IMG_NUMBER} input images :" 47 | 48 | for (( CUR_IT=0; CUR_IT<${IMG_NUMBER}; CUR_IT++ )) 49 | do 50 | echo ${IMG_INPUT[CUR_IT]} 51 | done 52 | echo "******************************************************" 53 | 54 | # SETUP EXECUTABLES 55 | ROOT_DIR=$(cd `dirname $0` && pwd) 56 | CHECKDIFFEOMORPHISM=$ROOT_DIR/bin/CheckDiffeomorphism 57 | 58 | cd $RES_FOLDER 59 | N=0; 60 | SPACING=${2:--1} 61 | 62 | for (( CUR_IT=0; CUR_IT<${IMG_NUMBER}; CUR_IT++ )) 63 | do 64 | IMG=${IMG_INPUT[CUR_IT]}; 65 | TRANS=transformed$CUR_IT.nii.gz 66 | launch "$CHECKDIFFEOMORPHISM $IMG tfm/$CUR_IT.tfm $SPACING" 67 | if [ $? -ne 0 ] 68 | then 69 | N=1 70 | fi 71 | done 72 | 73 | end=`date +%s` 74 | runtime=$((end-start)) 75 | echo "Total processing time : $runtime seconds" 76 | 77 | if [ $N -eq 0 ] 78 | then 79 | echo "All transforms are diffeomorphic" 80 | else 81 | echo "Some transforms are not diffeomorphic. Please check log" 82 | fi 83 | -------------------------------------------------------------------------------- /transform.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | start=`date +%s` 4 | 5 | function launch { 6 | 7 | echo "*************************************************************" 8 | echo "Executing : $1 " 9 | $1 10 | echo "*************************************************************" 11 | 12 | } 13 | 14 | # Inspired from NiftyReg script : https://sourceforge.net/p/niftyreg/git/ci/master/tree/reg-apps/groupwise_niftyreg_run.sh 15 | 16 | if [ $# -lt 2 ] 17 | then 18 | echo "" 19 | echo "*******************************************************************************" 20 | echo "Two arguments are expected to run this script:" 21 | echo "- File with contains parameters" 22 | echo "- output spacing" 23 | echo "example: $0 params.sh 2 " 24 | echo "*******************************************************************************" 25 | echo "" 26 | exit 27 | fi 28 | 29 | # read input parameters 30 | . $1 31 | 32 | # check arguments 33 | 34 | if [ ${#IMG_INPUT[@]} -lt 2 ] 35 | then 36 | echo "Less than 2 images have been specified" 37 | echo "Exit ..." 38 | exit 39 | fi 40 | 41 | IMG_NUMBER=${#IMG_INPUT[@]} 42 | 43 | echo "" 44 | echo "******************************************************" 45 | echo ">>> There are ${IMG_NUMBER} input images to transform :" 46 | 47 | for (( CUR_IT=0; CUR_IT<${IMG_NUMBER}; CUR_IT++ )) 48 | do 49 | echo ${IMG_INPUT[CUR_IT]} 50 | done 51 | echo "******************************************************" 52 | 53 | # SETUP EXECUTABLES 54 | ROOT_DIR=$(cd `dirname $0` && pwd) 55 | AVERAGEVOLUMES=$ROOT_DIR/bin/AverageVolumes 56 | DUMMYVOLUMEGENERATOR=$ROOT_DIR/bin/DummyVolumeGenerator 57 | VOLUMETRANSFORM=$ROOT_DIR/bin/VolumeTransform 58 | 59 | cd $RES_FOLDER 60 | 61 | launch "$DUMMYVOLUMEGENERATOR bbox.json $2" 62 | files=""; 63 | 64 | for (( CUR_IT=0; CUR_IT<${IMG_NUMBER}; CUR_IT++ )) 65 | do 66 | IMG=${IMG_INPUT[CUR_IT]}; 67 | TRANS=transformed$CUR_IT.nii.gz 68 | if [ ! -e "$TRANS" ]; then 69 | launch "$VOLUMETRANSFORM $IMG dummy.mhd -t transforms/$CUR_IT.json -o $TRANS" 70 | else 71 | echo "Transformed file $TRANS already exists, skip transformation" 72 | fi 73 | files+=" $TRANS" 74 | done 75 | 76 | launch "$AVERAGEVOLUMES $files" 77 | 78 | end=`date +%s` 79 | runtime=$((end-start)) 80 | echo "Total processing time : $runtime seconds" -------------------------------------------------------------------------------- /tools/AverageVolumes.cxx: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | #include "../vtkOpenSURF3D/vtkRobustImageReader.h" 9 | 10 | int main( int argc, char *argv[] ) { 11 | 12 | vtkNew reader; 13 | vtkNew average, stdev; 14 | vtkNew cast; 15 | cast->SetOutputScalarTypeToFloat(); 16 | float nImages = argc - 1; 17 | 18 | for ( int i = 1; i < argc; i++ ) { 19 | 20 | char *file = argv[ i ]; 21 | std::cout << "load : " << file << std::endl; 22 | reader->SetFileName( file ); 23 | reader->Update(); 24 | vtkSmartPointer image = reader->GetOutput(); 25 | int dims[ 3 ]; 26 | image->GetDimensions( dims ); 27 | int nbVoxels = dims[ 0 ] * dims[ 1 ] * dims[ 2 ]; 28 | 29 | if ( i == 1 ) { 30 | 31 | average->DeepCopy( image ); 32 | stdev->DeepCopy( image ); 33 | average->AllocateScalars(VTK_FLOAT, 1); 34 | stdev->AllocateScalars(VTK_FLOAT, 1); 35 | float *ptrOut = ( float * ) average->GetScalarPointer(); 36 | float *ptrOut2 = ( float * ) average->GetScalarPointer(); 37 | 38 | for ( int j = 0; j < nbVoxels; j++ ) { 39 | 40 | ptrOut[ j ] = 0; 41 | ptrOut2[ j ] = 0; 42 | 43 | } 44 | 45 | } 46 | 47 | cast->SetInputData( image ); 48 | cast->Update(); 49 | float *ptrIn = ( float * ) cast->GetOutput()->GetScalarPointer(); 50 | float *ptrOut = ( float * ) average->GetScalarPointer(); 51 | float *ptrOut2 = ( float * ) stdev->GetScalarPointer(); 52 | 53 | for ( int j = 0; j < nbVoxels; j++ ) { 54 | 55 | ptrOut[ j ] += ptrIn[ j ] / nImages; 56 | ptrOut2[ j ] += ( ptrIn[ j ] * ptrIn[ j ] ) / nImages; 57 | 58 | } 59 | 60 | } 61 | 62 | vtkNew writer; 63 | writer->SetInputData( average ); 64 | writer->SetFileName( "average.nii.gz" ); 65 | writer->Write(); 66 | 67 | float *ptrOut = ( float * ) average->GetScalarPointer(); 68 | float *ptrOut2 = ( float * ) stdev->GetScalarPointer(); 69 | int dims[ 3 ]; 70 | average->GetDimensions( dims ); 71 | int nbVoxels = dims[ 0 ] * dims[ 1 ] * dims[ 2 ]; 72 | 73 | for ( int j = 0; j < nbVoxels; j++ ) { 74 | 75 | ptrOut2[ j ] = sqrt( ptrOut2[ j ] - ptrOut[ j ] * ptrOut[ j ] ); 76 | 77 | } 78 | 79 | writer->SetInputData( stdev ); 80 | writer->SetFileName( "stdev.nii.gz" ); 81 | writer->Write(); 82 | 83 | } 84 | -------------------------------------------------------------------------------- /tools/flip_tool.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import nibabel as nib 3 | 4 | def flip_image( filename, output = "output.nii.gz", orientation = "RAS", threshold = None, fake_orientation = None ): 5 | 6 | image_obj = nib.load( filename ) 7 | if orientation == None : orientation = "RAS" 8 | current_orientation = nib.aff2axcodes( image_obj.affine ) 9 | print( "Current orientation is : ", current_orientation ) 10 | 11 | if not fake_orientation and ( current_orientation == tuple( orientation ) ): 12 | return filename 13 | 14 | else : 15 | img_orentation = nib.orientations.io_orientation( image_obj.affine ) 16 | wanted_orientation = nib.orientations.axcodes2ornt( orientation ) 17 | transform = nib.orientations.ornt_transform( img_orentation, wanted_orientation ) 18 | flipped = image_obj.as_reoriented( transform ) 19 | affine = flipped.affine 20 | if fake_orientation: 21 | wanted_orientation2 = nib.orientations.axcodes2ornt( fake_orientation ) 22 | transform2 = nib.orientations.ornt_transform( img_orentation, wanted_orientation2 ) 23 | flipped2 = image_obj.as_reoriented( transform2 ) 24 | affine = flipped2.affine 25 | print( "Real orientation is : ", nib.aff2axcodes( flipped.affine ) ) 26 | 27 | 28 | #Set Qcode to 1 that the Qform matrix can be used into the further processing 29 | flipped.header['qform_code'] = 1 30 | img_data = flipped.get_fdata() 31 | if threshold : img_data[ img_data < threshold ] = 0 32 | new_img = img_data.astype( flipped.header.get_data_dtype() ) 33 | img_conv = nib.Nifti1Image( new_img, affine, flipped.header) 34 | nib.save( img_conv, output ) 35 | print("New orientation is : ", nib.aff2axcodes( img_conv.affine )) 36 | return output 37 | 38 | 39 | if __name__ == '__main__': 40 | parser = argparse.ArgumentParser(description='Flip NIFTI image') 41 | parser.add_argument('--orientation', help = "Output volume orientation" ) 42 | parser.add_argument('--fake_orientation', help = "Fake output volume orientation" ) 43 | parser.add_argument('--threshold', help = "Threshold", type = float ) 44 | parser.add_argument('input_volume', help = "Input volume" ) 45 | parser.add_argument('--output', help = "Output file name", default = "output.nii.gz" ) 46 | args = parser.parse_args() 47 | flip_image( args.input_volume, output = args.output, orientation = args.orientation, fake_orientation = args.fake_orientation, threshold = args.threshold ) 48 | -------------------------------------------------------------------------------- /tools/transformIO.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | import vtk 3 | import json 4 | 5 | def readNIFTI( file ): 6 | reader = vtk.vtkNIFTIImageReader() 7 | reader.SetFileName( file ) 8 | reader.Update() 9 | image = reader.GetOutput() 10 | origin = [ 0, 0, 0 ] 11 | qForm = reader.GetQFormMatrix() 12 | if not qForm : qForm = vtkMatrix4x4() 13 | 14 | for i in range( 3 ) : 15 | origin[ i ] = qForm.GetElement( i, 3 ) 16 | 17 | image.SetOrigin( origin ) 18 | return image 19 | 20 | def readTransform( file ): 21 | 22 | transforms = vtk.vtkGeneralTransform() 23 | transforms.PostMultiply(); 24 | 25 | with open( file ) as content: 26 | 27 | data = json.load( content ) 28 | 29 | for transform in data[ 'transforms' ]: 30 | 31 | ttype = transform[ 'type' ] 32 | 33 | if ttype == "vtkMatrixToLinearTransform" : 34 | 35 | linear = vtk.vtkMatrixToLinearTransform() 36 | matrix = vtk.vtkMatrix4x4(); 37 | array = transform[ "matrix" ]; 38 | index = 0; 39 | 40 | for i in range ( 0, 4 ): 41 | for j in range ( 0, 4 ): 42 | matrix.SetElement( i, j, array[ index ] ); 43 | index += 1 44 | 45 | linear.SetInput( matrix ); 46 | linear.Update(); 47 | transforms.Concatenate( linear ); 48 | 49 | elif ttype == "vtkBSplineTransform" : 50 | 51 | coefficients = vtk.vtkImageData() 52 | 53 | if "file" in transform : 54 | 55 | niiFile = transform[ "file" ] 56 | transDir = os.path.dirname( file ) 57 | image = readNIFTI( os.path.join( transDir, niiFile ) ) 58 | coefficients.ShallowCopy( image ) 59 | 60 | else : 61 | 62 | dimensions = transform[ "dimensions" ] 63 | origin = transform[ "origin" ] 64 | spacing = transform[ "spacing" ] 65 | 66 | coefficients.SetOrigin( origin ) 67 | coefficients.SetSpacing( spacing ) 68 | coefficients.SetDimensions( dimensions ) 69 | coefficients.AllocateScalars( vtk.VTK_FLOAT, 3 ) 70 | scalars = coefficients.GetPointData().GetScalars() 71 | coeffs = transform[ "coeffs" ] 72 | n = 3 * dimensions[ 0 ] * dimensions[ 1 ] * dimensions[ 2 ]; 73 | 74 | for i in range( 0, n ) : 75 | scalars.SetValue( i, coeffs[ i ] ) 76 | 77 | bspline = vtk.vtkBSplineTransform() 78 | bspline.SetCoefficientData( coefficients ) 79 | bspline.Update() 80 | transforms.Concatenate( bspline ) 81 | 82 | else : 83 | 84 | print( "Error : transform type ", ttype, " not supported" ) 85 | exit( 1 ) 86 | 87 | return transforms 88 | -------------------------------------------------------------------------------- /registration/stats.h: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | 6 | #ifndef STATS_H 7 | #define STATS_H 8 | 9 | // compute Chi probability (from normalized values) 10 | inline float chipdf( const float &x ) { 11 | 12 | float c = 0.797884560802865; 13 | float x2 = x*x; 14 | return c * x2 *exp( - 0.5 * x2 ); 15 | 16 | } 17 | 18 | class Stats { 19 | 20 | public: 21 | 22 | static int maxSize; 23 | static int maxIterations; 24 | static float epsilon; 25 | 26 | std::vector< float > samples; 27 | std::vector< float > weights; 28 | int size; 29 | int virtualSize; 30 | float c1,c2,ratio; 31 | std::mt19937 rng; 32 | bool needsRandom; 33 | 34 | std::vector < float > histogram; 35 | 36 | void addSlot() { 37 | 38 | this->virtualSize++; 39 | 40 | if ( this->virtualSize > Stats::maxSize ) { 41 | 42 | this->needsRandom = true; 43 | return; 44 | 45 | } 46 | 47 | this->samples.push_back( -1 ); 48 | this->weights.push_back( -1 ); 49 | 50 | }; 51 | 52 | void reset() { 53 | 54 | this->size = 0; 55 | 56 | }; 57 | 58 | void addSample( const float &sample, const float &weight = 1 ) { 59 | 60 | if ( !this->needsRandom ) { 61 | 62 | this->samples[ this->size ] = sample; 63 | this->weights[ this->size ] = weight; 64 | this->size++; 65 | return; 66 | 67 | } 68 | 69 | if ( this->size == this->samples.size() ) return; 70 | float random = ( float ) this->rng() / this->rng.max(); 71 | if ( random > (float) this->samples.size() / this->virtualSize ) return; 72 | this->samples[ this->size ] = sample; 73 | this->weights[ this->size ] = weight; 74 | this->size++; 75 | 76 | }; 77 | 78 | void getHistogram( float binSize = 1 ); 79 | void saveHistogram( const char *file ); 80 | void saveSamples( const char *file, int subsampling = 1 ); 81 | void estimateDistribution(); 82 | void displayParameters(); 83 | 84 | inline float getInlierProbability( const float &d ) { 85 | 86 | const float eps = 1e-10; 87 | if ( d < 0.1 ) return 1; // fix for very small distances 88 | float x1 = ratio * chipdf( d / ( c1 + eps ) ) / ( c1 + eps ); 89 | float x2 = ( 1.0 - ratio ) * chipdf( d / ( c2 + eps ) )/ ( c2 + eps); 90 | return x1 / ( x1 + x2 + eps ); 91 | 92 | }; 93 | 94 | Stats() : c1( 10 ), c2( 300 ), ratio ( 0.5 ), needsRandom( false ), 95 | virtualSize( 0 ) { 96 | 97 | this->rng.seed( 0 ); 98 | 99 | }; 100 | 101 | }; 102 | 103 | #endif 104 | -------------------------------------------------------------------------------- /tools/CheckDiffeomorphism.cxx: -------------------------------------------------------------------------------- 1 | /*====================================================================== 2 | 3 | Program: CheckDiffeomorphism : Check that a transform is diffeomorphic 4 | Module: FROG 5 | Language: C++ 6 | Date: 2019/04 7 | Auteur: Sebastien Valette 8 | 9 | ======================================================================*/ 10 | // .NAME VolumeTransform 11 | // .SECTION Description 12 | 13 | #include 14 | #include 15 | #include 16 | #include 17 | 18 | #include "../vtkOpenSURF3D/vtkRobustImageReader.h" 19 | #include "transformIO.h" 20 | 21 | int main( int argc, char *argv[] ) { 22 | 23 | if ( argc < 3 ) { 24 | 25 | std::cout << "Usage : CheckDiffeomorphism image transform [spacing]" << std::endl; 26 | exit( 1 ); 27 | 28 | } 29 | 30 | // Load Volume 31 | std::cout << "load : " << argv[ 1 ] << std::endl; 32 | vtkRobustImageReader *imageReader = vtkRobustImageReader::New(); 33 | imageReader->SetFileName( argv[ 1 ] ); 34 | imageReader->Update(); 35 | vtkImageData *image = imageReader->GetOutput(); 36 | 37 | vtkGeneralTransform *transform = readTransform( argv[ 2 ] ); 38 | 39 | if ( argc > 3 ) { 40 | 41 | double sp = atof( argv[ 3 ] ); 42 | 43 | if ( sp > 0 ) { 44 | 45 | cout << "Resizing image with spacing : " << sp << endl; 46 | vtkImageResize *resize = vtkImageResize::New(); 47 | resize->SetResizeMethodToOutputSpacing(); 48 | resize->SetOutputSpacing( sp, sp, sp ); 49 | resize->SetInputData( image ); 50 | resize->Update(); 51 | image = resize->GetOutput(); 52 | 53 | } 54 | 55 | } 56 | 57 | double origin[ 3 ], spacing[ 3 ]; 58 | int dimensions[ 3 ]; 59 | vtkIdType inc[ 3 ]; 60 | image->GetOrigin( origin ); 61 | image->GetSpacing( spacing ); 62 | image->GetDimensions( dimensions ); 63 | image->GetIncrements( inc ); 64 | int n = 0; 65 | cout << "Computing Jacobian determinants..." << endl; 66 | 67 | #pragma omp parallel for reduction ( + : n ) 68 | for ( int k = 0; k < dimensions[ 2 ]; k++ ) { 69 | 70 | for ( int j = 0; j < dimensions[ 1 ]; j++ ) { 71 | 72 | for ( int i = 0; i < dimensions[ 0 ]; i++ ) { 73 | 74 | double in[ 3 ], out[ 3 ], der[3][3]; 75 | in[ 0 ] = origin[ 0 ] + i * spacing[ 0 ]; 76 | in[ 1 ] = origin[ 1 ] + j * spacing[ 1 ]; 77 | in[ 2 ] = origin[ 2 ] + k * spacing[ 2 ]; 78 | transform->InternalTransformDerivative( in, out, der ); 79 | if ( vtkMath::Determinant3x3( der ) < 0 ) n++; 80 | 81 | } 82 | 83 | } 84 | 85 | } 86 | 87 | cout << n << " negative jacobian determinant values (" 88 | << std::setprecision( 3 ) << (float) 100.0 * n / 89 | ( dimensions[ 0 ] * dimensions[ 1 ] * dimensions[ 2 ] ) << "%) " << endl;; 90 | 91 | return n > 0; 92 | 93 | } 94 | -------------------------------------------------------------------------------- /run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | startTime=`date +%s` 4 | 5 | function launch { 6 | 7 | echo "*************************************************************" 8 | echo "Executing : $1 " 9 | $1 10 | echo "*************************************************************" 11 | 12 | } 13 | 14 | # Inspired by NiftyReg script : https://sourceforge.net/p/niftyreg/git/ci/master/tree/reg-apps/groupwise_niftyreg_run.sh 15 | 16 | if [ $# -lt 1 ] 17 | then 18 | echo "" 19 | echo "*******************************************************************************" 20 | echo "One argument is expected to run this script:" 21 | echo "- File with contains parameters" 22 | echo "example: $0 params.sh " 23 | echo "*******************************************************************************" 24 | echo "" 25 | exit 26 | fi 27 | 28 | # read input parameters 29 | . $1 30 | 31 | # check arguments 32 | 33 | if [ ${#IMG_INPUT[@]} -lt 2 ] 34 | then 35 | echo "Less than 2 images have been specified" 36 | echo "Exit ..." 37 | exit 38 | fi 39 | 40 | IMG_NUMBER=${#IMG_INPUT[@]} 41 | 42 | echo "" 43 | echo "******************************************************" 44 | echo ">>> There are ${IMG_NUMBER} input images to register :" 45 | 46 | for (( CUR_IT=0; CUR_IT<${IMG_NUMBER}; CUR_IT++ )) 47 | do 48 | echo ${IMG_INPUT[CUR_IT]} 49 | done 50 | echo "******************************************************" 51 | 52 | # SETUP EXECUTABLES 53 | ROOT_DIR=$(cd `dirname $0` && pwd) 54 | SURF=$ROOT_DIR/bin/surf3d 55 | MATCH=$ROOT_DIR/bin/match 56 | REG=$ROOT_DIR/bin/frog 57 | 58 | # CREATE THE RESULT FOLDER 59 | if [ ! -d ${RES_FOLDER} ] 60 | then 61 | echo "The output image folder (${RES_FOLDER}) does not exist" 62 | mkdir ${RES_FOLDER} 63 | if [ ! -d ${RES_FOLDER} ] 64 | then 65 | echo "Unable to create the ${RES_FOLDER} folder" 66 | echo "Exit ..." 67 | exit 68 | else 69 | echo "The output image folder (${RES_FOLDER}) has been created" 70 | fi 71 | fi 72 | 73 | cd $RES_FOLDER 74 | PointsFile=points.txt; 75 | 76 | if [ -f ${PointsFile} ] 77 | then 78 | rm ${PointsFile} 79 | fi 80 | 81 | for (( CUR_IT=0; CUR_IT<${IMG_NUMBER}; CUR_IT++ )) 82 | do 83 | IMG=${IMG_INPUT[CUR_IT]}; 84 | OUTPUT_POINTS=$RES_FOLDER/points$CUR_IT 85 | launch "$SURF $IMG -o $OUTPUT_POINTS -s $SPACING -t $THRESHOLD -n $NPOINTS $SURF_OTHER_PARAMS" 86 | echo ${OUTPUT_POINTS}.csv.gz>> ${PointsFile} 87 | 88 | done 89 | 90 | matchTime=`date +%s` 91 | 92 | OUTPUT_PAIRS=pairs.bin 93 | launch "$MATCH $PointsFile -o $OUTPUT_PAIRS -d $MAX_DISTANCE $MATCH_OTHER_PARAMS" 94 | 95 | if [ -n "$LANDMARKS" ] 96 | then 97 | LOPTIONS="-l ${LANDMARKS}" 98 | else 99 | LOPTIONS="" 100 | fi 101 | 102 | registrationTime=`date +%s` 103 | 104 | launch "$REG $OUTPUT_PAIRS $LOPTIONS $REGISTRATION_OTHER_PARAMS" 105 | 106 | endTime=`date +%s` 107 | 108 | echo "Keypoint extraction time : $((matchTime-startTime)) seconds" 109 | echo "Match time : $((registrationTime-matchTime)) seconds" 110 | echo "Registration time : $((endTime-registrationTime)) seconds" 111 | echo "Total registration time : $((endTime-startTime)) seconds" 112 | -------------------------------------------------------------------------------- /registration/stats.cxx: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | 6 | #include "stats.h" 7 | 8 | #define print( v, size ) { for (int __i = 0; __i < size; __i++ ) { std::cout << v[ __i ]; if ( __i < ( size - 1 ) ) std::cout<< " " ;} std::cout << std::endl;} 9 | 10 | int Stats::maxSize = 10000; 11 | int Stats::maxIterations = 10000; 12 | float Stats::epsilon = 1e-6; 13 | 14 | void Stats::estimateDistribution() { 15 | 16 | const float esp = 1.59576912160573; 17 | int iteration = 0; 18 | const float epsilon = Stats::epsilon; 19 | 20 | while ( iteration++ < Stats::maxIterations ) { 21 | 22 | float sum1 = 0; 23 | float sum2 = 0; 24 | float sum3 = 0; 25 | float sum4 = 0; 26 | float sum5 = 0; 27 | 28 | for ( int i = 0; i < this->size; i++ ) { 29 | 30 | float f1 = ratio * chipdf( samples[ i ] / c1 ) / c1; 31 | float f2 = ( 1.0 - ratio ) * chipdf( samples[ i ] / c2 ) / c2; 32 | float t = f1/ ( f1 + f2 + 1e-16); 33 | float p = this->samples[ i ] * this->weights[ i ]; 34 | sum1 += t * p; 35 | sum2 += t * this->weights[ i ]; 36 | sum3 += ( 1.0 - t ) * p; 37 | sum4 += ( 1.0 - t ) * this->weights[ i ]; 38 | sum5 += this->weights[ i ]; 39 | 40 | } 41 | 42 | sum2 = std::max( sum2, epsilon); 43 | sum3 = std::max( sum3, epsilon); 44 | sum5 = std::max( sum5, epsilon); 45 | float nc1 = std::max( epsilon, sum1 / sum2 / esp ); 46 | float nc2 = std::max( epsilon, sum3 / sum4 / esp ); 47 | float nRatio = std::max( epsilon, sum2 / sum5 ); 48 | 49 | if ( abs( ( c1 - nc1 ) / nc1 ) < 0.001 50 | && abs( ( c2 - nc2 ) / nc2 ) < 0.001 51 | && abs( ( nRatio - ratio ) / nRatio ) < 0.001 ) { 52 | 53 | c1 = nc1; 54 | c2 = nc2; 55 | ratio = nRatio; 56 | break; 57 | 58 | } else { 59 | 60 | c1 = nc1; 61 | c2 = nc2; 62 | ratio = nRatio; 63 | 64 | } 65 | 66 | } 67 | 68 | return; 69 | 70 | } 71 | 72 | void Stats::displayParameters() { 73 | 74 | int s = this->size; 75 | std::cout << "c1=" << c1 << ","; 76 | std::cout << "c2=" << c2 << ","; 77 | std::cout << "r=" << ratio << ","; 78 | std::cout << "nSamples=" << s; 79 | 80 | double sum = std::accumulate( samples.begin(), samples.begin() + s , 0.0 ); 81 | double mean = sum / s; 82 | 83 | double sq_sum = std::inner_product( samples.begin(), samples.begin() + s, 84 | samples.begin(), 0.0 ); 85 | 86 | auto max = max_element( samples.begin(), samples.begin() + s ); 87 | 88 | double stdev = sqrt( sq_sum / s - mean * mean); 89 | std::cout <<",max=" << *max 90 | << ",mean="<< mean << ",stdev=" << stdev << std::endl; 91 | 92 | 93 | } 94 | 95 | void Stats::saveHistogram( const char *file ) { 96 | 97 | this->getHistogram(); 98 | 99 | std::fstream fs; 100 | fs.open ( file, std::fstream::out | std::fstream::trunc ); 101 | 102 | for ( int i = 0; i < this->histogram.size(); i++ ) 103 | fs << histogram[ i ] << std::endl; 104 | 105 | fs.close(); 106 | 107 | } 108 | 109 | void Stats::saveSamples( const char *file, int subsampling ) { 110 | 111 | std::fstream fs; 112 | fs.open ( file, std::fstream::out | std::fstream::trunc ); 113 | 114 | for ( int i = 0; i < this->size; i++ ) 115 | if ( !( i % subsampling ) ) fs << this->samples[ i ] << std::endl; 116 | 117 | fs.close(); 118 | 119 | } 120 | 121 | void Stats::getHistogram( float binSize ) { 122 | 123 | float max = *std::max_element( samples.begin(), samples.begin() + this->size ); 124 | int size = round( max / binSize ) + 1; 125 | this->histogram.resize( size ); 126 | std::fill( this->histogram.begin(), this->histogram.begin() + size, 0 ); 127 | 128 | for ( int i = 0; i < this->size; i++ ) 129 | this->histogram[ round( this->samples[ i ] / binSize ) ]++; 130 | 131 | } 132 | -------------------------------------------------------------------------------- /js/pointsViewer.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | console.clear(); 3 | const { async, THREE, desk, qx } = window; 4 | 5 | const params = { 6 | volume : "big/visceral/volumes/10000100_1_CTce_ThAb.nii.gz", 7 | spacing : 0.75, 8 | numberOfPoints : 20000, 9 | visibilityRadius : 5 10 | }; 11 | 12 | const viewer = new desk.THREE.Viewer(); 13 | const MPR = new desk.MPR.Container(); 14 | viewer.getWindow().add( MPR, { flex : 1 } ); 15 | 16 | let volume; 17 | let volumeMeshes; 18 | let pointsMesh; 19 | let points; 20 | 21 | const label = new qx.ui.basic.Label( "" ); 22 | viewer.add( label, { right : 10, bottom : 10 } ); 23 | 24 | const surf = new desk.Action( "SURF3D"); 25 | surf.setParameters( { type : 3, writeJSON : 1, writeCSVGZ : 0, writeCSV : 0, writeBIN : 0, outputFileName : "points"}, true ); 26 | surf.setParameters( { spacing : params.spacing, numberOfPoints : params.numberOfPoints } ); 27 | surf.getForm( "inputVolume").addListener( "changeValue", updateVolume ); 28 | surf.addListener( "actionUpdated", updatePoints ); 29 | viewer.getWindow().addAt( surf, 0 ); 30 | 31 | const viewerVisible = new qx.ui.form.ToggleButton( "hide slices" ); 32 | viewerVisible.addListener( "changeValue", e => MPR.setVisibility( !e.getData() ? "visible" : "excluded" ) ); 33 | surf.add( viewerVisible ); 34 | 35 | async function updateVolume() { 36 | 37 | if ( volumeMeshes ) viewer.removeMeshes( volumeMeshes ); 38 | volumeMeshes = null; 39 | if ( pointsMesh ) viewer.removeMeshes( pointsMesh ); 40 | pointsMesh = null; 41 | MPR.removeAllVolumes(); 42 | const newFile = surf.getForm( "inputVolume").getValue(); 43 | volume = await MPR.addVolumeAsync( newFile ); 44 | const slices = volume.getSlices(); 45 | for ( let slice of slices ) slice.addListener( "changePosition", updatePointsVisibility ); 46 | volumeMeshes = viewer.attachVolumeSlices( slices ); 47 | 48 | } 49 | 50 | 51 | async function updatePoints( event ) { 52 | 53 | if ( pointsMesh ) viewer.removeMesh( pointsMesh ); 54 | pointsMesh = null; 55 | 56 | const txt = await desk.FileSystem.readFileAsync( surf.getOutputDirectory() + "points.json", 57 | { cache : event.getData().response } ); 58 | 59 | points = JSON.parse( txt ).points; 60 | const geometry = new THREE.SphereGeometry( 1, 32, 16 ); 61 | const material = new THREE.MeshLambertMaterial( { color: 0xffffff } ); 62 | pointsMesh = new THREE.InstancedMesh( geometry, material, points.length ); 63 | viewer.addMesh( pointsMesh ); 64 | label.setValue( points.length + " points" ); 65 | updatePointsVisibility(); 66 | 67 | } 68 | 69 | function updatePointsVisibility() { 70 | 71 | if ( !pointsMesh ) return; 72 | const slices = volume.getSlices(); 73 | const pos = [ 1, 2, 0 ].map( i => slices[ i ].getPosition() ); 74 | const matrix = new THREE.Matrix4(); 75 | const colors = [ 0xCD1719, 0xFFED00, 0x009FE3 ].map( c => new THREE.Color( c ) ); 76 | let color = colors[ 0 ]; 77 | 78 | for ( let i = 0; i < points.length; i ++ ) { 79 | 80 | const pt = points[ i ]; 81 | const xyz = [ pt.x, pt.y, pt.z ]; 82 | let visible = false; 83 | for ( let j = 0; j < 3; j++) 84 | if ( Math.abs( xyz[ j ] - pos[ j ] ) < params.visibilityRadius ) { 85 | visible = true; 86 | color = colors[ j ]; 87 | } 88 | 89 | const scale = visible ? pt.scale : 0; 90 | matrix.makeScale( scale, scale, scale ); 91 | matrix.setPosition( points[ i ].x, points[ i ].y, points[ i ].z ); 92 | pointsMesh.setColorAt( i, color ); 93 | pointsMesh.setMatrixAt( i, matrix ); 94 | } 95 | 96 | pointsMesh.instanceMatrix.needsUpdate = true; 97 | pointsMesh.instanceColor.needsUpdate = true; 98 | viewer.render(); 99 | } 100 | 101 | surf.setParameters( { inputVolume : params.volume } ); 102 | -------------------------------------------------------------------------------- /js/lib/TextSpriteHelper.js: -------------------------------------------------------------------------------- 1 | 2 | var bbox = new THREE.Box3(); 3 | 4 | THREE.SpriteHelper = function ( canvas ) { 5 | texture = new THREE.Texture( canvas ); 6 | texture.minFilter = THREE.LinearFilter; // to avoid a warning on texture size 7 | texture.needsUpdate = true; 8 | var material = new THREE.SpriteMaterial( { map :texture, depthTest : false } ); 9 | var sprite = new THREE.Sprite( material ); 10 | return sprite; 11 | }; 12 | 13 | THREE.TextSpriteHelper = function (obj, text, options) { 14 | options = options || {}; 15 | var texture, canvas, context; 16 | var sprite = obj.userData.sprite; 17 | var size = options.size || 100; 18 | var height = 64; 19 | text = text.trim(); 20 | if (!sprite) { 21 | canvas = document.createElement('canvas'); 22 | canvas.height = height; 23 | context = canvas.getContext("2d"); 24 | context.font = height + 'px bold Arial'; 25 | var width = context.measureText( text + '' ).width; 26 | canvas.width = width; 27 | sprite = new THREE.SpriteHelper( canvas ); 28 | bbox.setFromObject( obj ); 29 | if ( bbox.isEmpty() ) bbox.expandByPoint( obj.position ); 30 | bbox.translate( obj.position.clone().negate() ); 31 | sprite.position.copy( bbox.getCenter( new THREE.Vector3() ) ); 32 | sprite.position.z = bbox.max.z; 33 | sprite.scale.x = size * width / height; 34 | sprite.scale.y = size; 35 | obj.add(sprite); 36 | } 37 | 38 | context = canvas.getContext( '2d' ); 39 | texture = sprite.material.map; 40 | canvas = texture.image; 41 | context.clearRect(0, 0, canvas.width, canvas.height); 42 | context.fillStyle = options.backgroundColor || 'rgba(0, 0, 0, 0.5)'; 43 | context.fillRect(0, 0, canvas.width, canvas.height); 44 | context.fillStyle = options.color || 'yellow'; 45 | context.font = height + 'px bold Arial'; 46 | context.fillText( text, 0, height - 7 ); 47 | if (options.strokeColor) { 48 | context.strokeStyle = options.strokeColor; 49 | context.lineWidth = 2; 50 | context.strokeText( text, 0, height - 7 ); 51 | } 52 | texture.needsUpdate = true; 53 | return sprite; 54 | }; 55 | 56 | // getColorFromValue is a function which should return an rgb color 57 | // where each color component is between 0 and 1 58 | THREE.ColorBarSpriteHelper = function ( getColorFromValue, options ) { 59 | options = options || {}; 60 | var scale = options.scale || 10; 61 | var fontHeight = options.fontHeight || 100; 62 | var width = options.width || 50; 63 | var height = options.height || 500; 64 | var yOffset = options.yOffset || 10; 65 | var max = options.max || 1; 66 | 67 | var bCanvas = document.createElement('canvas'); 68 | var bHeight = height + fontHeight; 69 | bCanvas.width = 400; 70 | bCanvas.height = bHeight; 71 | var bContext = bCanvas.getContext('2d'); 72 | bContext.font = 100 + 'px bold Arial'; 73 | bContext.fillText( '0', width + 10, bHeight - yOffset); 74 | bContext.fillText( max.toPrecision(3) , width + 10, fontHeight - yOffset); 75 | 76 | var canvas = document.createElement('canvas'); 77 | canvas.width = width; 78 | canvas.height = height; 79 | var context = canvas.getContext("2d"); 80 | var imageData = context.createImageData(width, height); 81 | var data = imageData.data; 82 | for (var i = 0; i < height; i++) { 83 | for (var j = 0; j < width; j ++) { 84 | var col = getColorFromValue(1 - i / height); 85 | var offset = 4 * i * width + 4 * j; 86 | data[ offset ] = 255 * col[0]; 87 | data[ offset + 1 ] = 255 * col[1]; 88 | data[ offset + 2 ] = 255 * col[2]; 89 | data[ offset + 3 ] = 255; 90 | } 91 | } 92 | bContext.putImageData(imageData, 0, fontHeight / 2); 93 | var sprite = new THREE.SpriteHelper( bCanvas ); 94 | sprite.scale.multiplyScalar(bHeight); 95 | sprite.frustumCulled = false; 96 | return sprite; 97 | }; 98 | -------------------------------------------------------------------------------- /tools/register.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from flip_tool import flip_image 3 | import nibabel as nib 4 | import os 5 | from os import listdir 6 | from os.path import abspath, dirname, join, normpath 7 | import time 8 | 9 | startTime = time.time() 10 | cwd = os.getcwd() 11 | volumesList = "volumes.txt" 12 | frogPath = normpath( join( dirname( __file__ ), "../bin" ) ) 13 | 14 | parser = argparse.ArgumentParser( description = 'Register a volume against an already registered group', formatter_class=argparse.ArgumentDefaultsHelpFormatter ) 15 | parser.add_argument( '-a', dest = 'all', help = 'register all', action="store_true" ) 16 | parser.add_argument( '-d', dest = 'inputDir', help = 'input registered group directory', required = True ) 17 | parser.add_argument( '-dl', dest = 'nDeformableLevels', help = 'number of deformable levels for registration', type = int ) 18 | parser.add_argument( '-l', dest = 'landmarks', help = 'path to landmarks file' ) 19 | parser.add_argument( '-li', dest = 'nLinearIterations', help = 'number of linear iterations for registration', type = int ) 20 | parser.add_argument( '-i', dest = 'inputVolume', help = 'input volume', required = True ) 21 | parser.add_argument( '--orientation', help = 'Force image orientation' ) 22 | parser.add_argument('--fake_orientation', help = "Fake image orientation" ) 23 | parser.add_argument( '-p', dest = 'numberOfPoints', type = int, help = 'number of keypoints to extract with SURF3D', default = 20000 ) 24 | parser.add_argument( '-n', dest = 'numberOfReferences', type = int, help = 'number of reference volumes to register against' ) 25 | parser.add_argument( '-s', dest = 'spacing', type = float, help = 'spacing for SURF3D', default = 0.75 ) 26 | parser.add_argument( '-t', dest = 'threshold', type = float, help = 'detector threshold for SURF3D', default = 0 ) 27 | parser.add_argument( '-vt', dest = 'volumeThreshold', type = float, help = 'volume Threshold' ) 28 | parser.add_argument( '-cmin', type = float, help = 'min clamp image values' ) 29 | parser.add_argument( '-cmax', type = float, help = 'max clamp image values' ) 30 | args = parser.parse_args() 31 | 32 | def separate(): 33 | print( "******************************************************" ) 34 | 35 | def execute( cmd ): 36 | start_time = time.time() 37 | separate() 38 | print( "Executing : " + cmd ) 39 | code = os.system( cmd ) 40 | print( "Command : " + cmd ) 41 | print( "Executed in " + str( round( time.time() - start_time ) ) + "s" ) 42 | print( "Exit code : " + str( code ) ) 43 | if code : raise( OSError( code ) ) 44 | 45 | points = [] 46 | 47 | def getId( elem ): 48 | return int( elem[ 6: -7] ) 49 | 50 | ### get reference points 51 | for f in os.listdir( args.inputDir ): 52 | if not f.endswith( ".csv.gz" ) : continue 53 | if not f.startswith( "points" ) : continue 54 | points.append( f ) 55 | 56 | points.sort( key = getId ) 57 | if args.numberOfReferences : points = points[ 0 : args.numberOfReferences ] 58 | print( str( len( points ) ) + " references" ) 59 | 60 | #### compute input volume keypoints if needed 61 | inputPoints = ""; 62 | 63 | if not args.inputVolume.endswith( '.csv.gz' ) : 64 | if args.orientation or args.fake_orientation: 65 | inputVolume = flip_image( args.inputVolume, threshold = args.volumeThreshold, orientation = args.orientation, fake_orientation = args.fake_orientation ) 66 | else : inputVolume = args.inputVolume 67 | surfBin = join( frogPath, "surf3d" ) 68 | surfArgs = [ surfBin, inputVolume, "-s", str( args.spacing ), "-t", str( args.threshold ), "-n", str( args.numberOfPoints )] 69 | if args.cmin != None : surfArgs.extend( [ "-cmin", str( args.cmin ) ] ) 70 | if args.cmax != None : surfArgs.extend( [ "-cmax", str( args.cmax ) ] ) 71 | execute( " ".join( surfArgs ) ) 72 | inputPoints = join( cwd, "points.csv.gz" ) 73 | else: 74 | print ( "Input is a .csv.gz file, no need for extraction" ) 75 | inputPoints = abspath( args.inputVolume ) 76 | 77 | #### compute pairs 78 | volumes = open( volumesList, "w" ) 79 | for pt in points : volumes.write( join( args.inputDir, pt ) + "\n" ) 80 | volumes.write( inputPoints ) 81 | volumes.close() 82 | matchBin = join( frogPath, "match" ) 83 | matchCmd = matchBin + " " + volumesList + " -o pairs.bin -d 1" 84 | if not args.all : matchCmd += " -targ " + str( len( points ) ) 85 | execute( matchCmd ) 86 | 87 | #### register 88 | frogBin = join( frogPath, "frog" ) 89 | frogArgs = [ frogBin, "pairs.bin", "-j -fd", args.inputDir + "/transforms" ] 90 | if args.landmarks : frogArgs.extend( [ "-l", args.landmarks ] ) 91 | if args.nDeformableLevels != None : frogArgs.extend( [ "-dl", str( args.nDeformableLevels ) ] ) 92 | if args.nLinearIterations : frogArgs.extend( [ "-li", str( args.nLinearIterations ) ] ) 93 | if not args.all : frogArgs.extend( [ "-fi", str( len( points ) ) ] ) 94 | execute( " ".join( frogArgs ) ) 95 | 96 | separate() 97 | print( "Registration done in " + str( round( time.time() - startTime ) ) + "s" ) 98 | separate() 99 | -------------------------------------------------------------------------------- /registration/imageGroup.h: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | 6 | #include "image.h" 7 | #include "stats.h" 8 | #include "../vtkOpenSURF3D/picojson.h" 9 | 10 | class ImageGroup { 11 | 12 | public: 13 | 14 | void run(); 15 | void readPairs( char *fileName ); // read pair file from match 16 | 17 | bool printStats; // print stats at each iteration 18 | bool printLinear; // print linear transforms at each iteration 19 | 20 | int linearIterations; // number of iterations for linear registration 21 | bool useScale; // use scale for linear registration 22 | int deformableLevels; // number of deformable levels 23 | int deformableIterations; // number of iteration for each level 24 | float linearInitializationAnchor[ 3 ]; 25 | float linearAlpha; // update ratio 26 | float deformableAlpha; // update ratio 27 | int statIntervalUpdate; // update stats every n iterations 28 | float initialGridSize; 29 | float boundingBoxMargin; 30 | float inlierThreshold; // to filter outliers completely 31 | bool guaranteeDiffeomorphism; // to guarantee that transforms will be diffeomorpic 32 | float maxDisplacementRatio; // max ratio to guarantee diffeomorphism 33 | bool invertLandmarksCoordinates; // if true the landmarks coordinates x and y will be inverted when loading 34 | float landmarksConstraintsWeight; // weight for landmark constraints 35 | 36 | const char* outputFileName = "measures.csv"; // output filename of measure.csv 37 | void writeLinksDistances(); // write distances and probabilities between pairs to file 38 | bool writePairs; // if true the pairs and their distances will be written to pairs.csv 39 | 40 | int numberOfFixedImages; // number of already registered images 41 | bool useRANSAC; // use RANSAC when registering with fixed images 42 | int numberOfRANSACIterations; // number of RANSAC iterations 43 | float RANSACInlierDistance; // maximum inlier distance for RANSAC 44 | float RANSACMaxScale; // maximum allowed scale for RANSAC 45 | char *fixedTransformsDirectory; 46 | bool writeSingleFileTransforms; // outputs a single big JSON file for each transform 47 | 48 | std::string transformSubdirectory; 49 | std::string errorMapsSubdirectory; 50 | void addLandmarks( const char *path, bool asConstraints = false ); 51 | 52 | ImageGroup() { 53 | boundingBoxMargin = 0.1; 54 | deformableAlpha = 0.02; 55 | deformableIterations = 200; 56 | fixedTransformsDirectory = 0; 57 | guaranteeDiffeomorphism = true; 58 | invertLandmarksCoordinates = true; 59 | landmarksConstraintsWeight = 50; 60 | initialGridSize = 100; 61 | inlierThreshold = 0.5; 62 | linearAlpha = 0.5; 63 | linearInitializationAnchor[ 0 ] = 0.5; 64 | linearInitializationAnchor[ 1 ] = 0.5; 65 | linearInitializationAnchor[ 2 ] = 0.5; 66 | linearIterations = 50; 67 | maxDisplacementRatio = 0.4; 68 | deformableLevels = 3; 69 | numberOfFixedImages = 0; 70 | numberOfRANSACIterations = 5000; 71 | printLinear = false; 72 | printStats = false; 73 | RANSACInlierDistance = 50; 74 | RANSACMaxScale = 10; 75 | statIntervalUpdate = 10; 76 | useRANSAC = true; 77 | useScale = true; 78 | writePairs = false; 79 | writeSingleFileTransforms = false; 80 | transformSubdirectory = std::string( "transforms" ); 81 | errorMapsSubdirectory = std::string( "errorMaps" ); 82 | }; 83 | 84 | protected: 85 | 86 | class Measure { 87 | 88 | public: 89 | 90 | float E, landmarkAv, landmarkMax, landmarkSTD; 91 | Measure( float e ) : landmarkAv( 0 ), landmarkMax( 0 ), landmarkSTD( 0 ), 92 | E ( e ) {}; 93 | 94 | }; 95 | 96 | 97 | struct Landmark { 98 | 99 | imageIdType image; 100 | pointIdType point; 101 | 102 | }; 103 | typedef std::vector < Landmark > Landmarks; 104 | 105 | std::vector < Image > images; 106 | std::vector < Measure > measures; 107 | 108 | void setupStats(); 109 | 110 | void setupDeformableTransforms( int level ); 111 | void setupLinearTransforms(); 112 | 113 | typedef std::pair< int, vtkMatrix4x4* > RANSACResult; 114 | void RANSAC( int image ); 115 | RANSACResult RANSACBatch( int image, int nIterations, int batch ); 116 | 117 | void transformPoints( const bool &apply = false ); 118 | 119 | void updateStats(); // compute Maxwell distribution parameters 120 | void countInliers(); // displays number of inliers/outliers 121 | 122 | double updateDeformableTransforms( const float alpha ); // returns error value or -1 if diffeomorphism is not guaranteed 123 | void saveErrorMaps(); 124 | double updateLinearTransforms(); 125 | 126 | void displayStats(); 127 | void displayLinearTransforms(); 128 | 129 | void saveIndividualDistanceHistograms(); 130 | void saveDistanceHistograms( const char *file ); 131 | void saveMeasures( const char *file ); 132 | void getBoundingBox( vtkBoundingBox &box, const bool &all = false ); 133 | 134 | void saveTransforms(); 135 | void saveBoundingBox(); 136 | 137 | // reference landmarks 138 | std::map < std::string, Landmarks > landmarks; 139 | void computeLandmarkDistances( float e ); 140 | bool saveTransformedLandmarks(); 141 | void saveLandmarkDistances(); 142 | 143 | 144 | void readAndApplyFixedImagesTransforms(); 145 | 146 | picojson::object stats; // to log stats 147 | }; 148 | -------------------------------------------------------------------------------- /tools/register2volumes.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | from os import listdir 4 | from os.path import abspath, dirname, isdir, join, normpath 5 | import tempfile 6 | import time 7 | 8 | startTime = time.time() 9 | cwd = os.getcwd() 10 | volumesList = "toRegister.txt" 11 | masksList = "masks.txt" 12 | rootPath = normpath( join( dirname( __file__ ), ".." ) ) 13 | 14 | parser = argparse.ArgumentParser( description = 'Register 2 images', formatter_class=argparse.ArgumentDefaultsHelpFormatter ) 15 | regParse = parser.add_argument_group('General options') 16 | regParse.add_argument( 'source', help = 'source image' ) 17 | regParse.add_argument( 'target', help = 'target image' ) 18 | regParse.add_argument( '-m1', dest = 'mask1', help = 'source mask' ) 19 | regParse.add_argument( '-m2', dest = 'mask2', help = 'target mask' ) 20 | regParse.add_argument( '-o', dest = 'output', help = 'output image' ) 21 | frogParse = parser.add_argument_group('Registration options') 22 | frogParse.add_argument( '-dl', dest = 'deformableLevels', help = 'number of deformable levels', type = int ) 23 | frogParse.add_argument( '-di', dest = 'deformableIterations', help = 'number of deformable iterations per level', type = int ) 24 | frogParse.add_argument( '-g', dest = 'gridSpacing', type = float, help = 'initial grid spacing' ) 25 | frogParse.add_argument( '-li', dest = 'linearIterations', help = 'number of linear iterations', type = int ) 26 | frogParse.add_argument( '-l', dest = 'landmarks', help = 'path to landmarks file' ) 27 | frogParse.add_argument( '-wp', dest = 'writePairs', help = 'write list of pairs to file', action="store_true" ) 28 | SURFParser = parser.add_argument_group('SURF3D options') 29 | SURFParser.add_argument( '-m', dest = 'masks', help = 'path to masks' ) 30 | SURFParser.add_argument( '-p', dest = 'numberOfPoints', type = int, help = 'number of keypoints to extract', default = 20000 ) 31 | SURFParser.add_argument( '-s', dest = 'spacing', type = float, help = 'spacing', default = 0.75 ) 32 | SURFParser.add_argument( '-t', dest = 'threshold', type = float, help = 'detector threshold', default = 0 ) 33 | args = parser.parse_args() 34 | 35 | def separate(): 36 | print( "******************************************************" ) 37 | 38 | def execute( cmd ): 39 | start_time = time.time() 40 | separate() 41 | print( "Executing : " + cmd ) 42 | code = os.system( cmd ) 43 | print( "Command : " + cmd ) 44 | print( "Executed in " + str( round( time.time() - start_time ) ) + "s" ) 45 | print( "Exit code : " + str( code ) ) 46 | if code : raise( OSError( code ) ) 47 | 48 | 49 | def computeAverageImage( images ) : 50 | if not args.imageSpacing : return 51 | separate() 52 | print( "Compute average image" ) 53 | startTime = time.time() 54 | dummyBin = join( frogPath, "DummyVolumeGenerator" ) 55 | execute( " ".join( [ dummyBin, "bbox.json", str( args.imageSpacing ) ] ) ) 56 | transformedImages = [] 57 | 58 | for i, image in enumerate( images ) : 59 | separate() 60 | transformBin = join( frogPath, "VolumeTransform" ) 61 | transformedImage = "transformed" + str( i ) + ".nii.gz" 62 | if args.flipToRAS: image = flipAndSaveToRAS( image ) 63 | execute( " ".join( [ transformBin, image, "dummy.mhd", "-t transforms/" + str( i ) + ".json -o " + transformedImage ] ) ) 64 | transformedImages.append( transformedImage ) 65 | 66 | averageBin = join( frogPath, "AverageVolumes" ) 67 | execute( " ".join( [ averageBin, " ".join( transformedImages ) ] ) ) 68 | print( "Average image computed in " + str( round( time.time() - startTime ) ) + "s" ) 69 | 70 | files = [ abspath( args.source ), abspath( args.target ) ] 71 | masks = [] 72 | if args.mask1 and args.mask2 : 73 | masks = [ abspath( args.mask1 ), abspath( args.mask2 ) ] 74 | 75 | print( "There are " + str( len( files ) ) + " files to register : " ) 76 | for f in files : print( f ) 77 | separate() 78 | 79 | frogBin = join( rootPath, "FROG.py" ) 80 | volumes = open( volumesList, "w" ) 81 | volumes.write( "\n".join( files ) ) 82 | volumes.close() 83 | 84 | frogArgs = [ "python", frogBin, volumesList, "-s", str( args.spacing ), "-t", str( args.threshold ), "-p", str( args.numberOfPoints ) ] 85 | 86 | if len( masks ) : 87 | print( "Using masks : " ) 88 | for f in masks : print( f ) 89 | mFile = open( masksList, "w" ) 90 | mFile.write( "\n".join( masks ) ) 91 | mFile.close() 92 | frogArgs.extend( [ "-m", abspath( masksList ) ] ) 93 | 94 | if args.deformableLevels : frogArgs.extend( [ "-dl", str( args.deformableLevels ) ] ) 95 | if args.linearIterations : frogArgs.extend( [ "-li", str( args.linearIterations ) ] ) 96 | if args.deformableIterations : frogArgs.extend( [ "-di", str( args.deformableIterations ) ] ) 97 | if args.writePairs : frogArgs.append( "-wp" ) 98 | if args.landmarks : frogArgs.extend( [ "-l", args.landmarks ] ) 99 | if args.gridSpacing : frogArgs.extend( [ "-g", str( args.gridSpacing ) ] ) 100 | if args.spacing : frogArgs.extend( [ "-s", str( args.spacing ) ] ) 101 | if args.numberOfPoints : frogArgs.extend( [ "-p", str( args.numberOfPoints ) ] ) 102 | execute( " ".join( frogArgs ) ) 103 | 104 | 105 | separate() 106 | transformBin = join( rootPath, "bin/VolumeTransform" ) 107 | transformArgs = [ transformBin, files[ 0 ], files[ 1 ] ] 108 | transformArgs.append( "-t transforms/0.json" ) 109 | transformArgs.append( "-ti transforms/1.json" ) 110 | if ( args.output ) : transformArgs.extend( [ "-o", args.output ] ) 111 | execute( " ".join( transformArgs ) ) 112 | 113 | separate() 114 | print( "All done in " + str( round( time.time() - startTime ) ) + "s" ) 115 | -------------------------------------------------------------------------------- /Readme.md: -------------------------------------------------------------------------------- 1 | [![CI](https://github.com/valette/FROG/actions/workflows/ci.yml/badge.svg)](https://github.com/valette/FROG/actions/workflows/ci.yml) 2 | 3 | FROG : Fast Registration Of image Groups 4 | ======================================== 5 |

6 | 7 |

8 | 9 | ### Info ### 10 | This code is the implementation deriving from those papers: 11 | 12 | [1] Rémi Agier, Sébastien Valette, Laurent Fanton, Pierre Croisille and Rémy Prost, Hubless 3D Medical Image Bundle Registration, In Proceedings of the 11th Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications - Volume 3: VISAPP, 265-272, 2016, Rome, Italy. https://hal.archives-ouvertes.fr/hal-01284240 13 | 14 | [2] Rémi Agier, Sébastien Valette, Razmig Kéchichian, Laurent Fanton, Rémy Prost, Hubless keypoint-based 3D deformable groupwise registration, Medical Image Analysis, Volume 59, January 2020, https://arxiv.org/abs/1809.03951 15 | 16 | 17 | Authors: 18 | * Rémi Agier : match code 19 | * Sébastien Valette : registration and tools code 20 | 21 | ### Licence ### 22 | This code is distributed under the CeCILL-B license (BSD-compatible) 23 | (copyright CNRS, INSA-Lyon, UCBL, INSERM.) 24 | 25 | ### Dependencies ### 26 | * Boost www.boost.org 27 | * CMAKE www.cmake.org 28 | * OpenCV www.opencv.org 29 | * VTK www.vtk.org 30 | 31 | ### Simple compilation HowTo under Linux ### 32 | git clone --recursive git@github.com:valette/FROG.git FROG 33 | cd FROG 34 | cmake . -DCMAKE_BUILD_TYPE=Release 35 | make 36 | 37 | Note for FEDORA linux : in case of crashes, please use a self-compiled version of VTK. 38 | 39 | ### Usage with global script run.sh ### 40 | 41 | Groupwise registration is computed via the run.sh script, in three steps: 42 | * Keypoint extraction from input images 43 | * Keypoint matching 44 | * groupwise registration of keypoints 45 | 46 | As parameter, run.sh should be fed with a params.sh script containing all input parameters. The run.sh script should not be modified whereas one can copy, rename and edit the params.sh script at will. 47 | 48 | To launch registration, execute 49 | 50 | ./run.sh ./params.sh 51 | 52 | After registration is computed, input images can be transformed into the common space via the transform.sh script, which should also be fed with the parameters file as well as the desired spacing for the transformed images. As an example: 53 | 54 | ./transform.sh ./params.sh 2 55 | 56 | will transform input images and compute their average with a spacing equal to 2 57 | 58 | To check that the output transforms are diffeomorphic: 59 | 60 | ./checkDiffeomorphism.sh ./params.sh 61 | 62 | 63 | ### Usage with custom commandline execution ### 64 | 65 | Three steps are needed to compute groupwise registration: 66 | * Keypoint extraction from input images 67 | 68 | use the surf3d executable to extract keypoints from each 3D image: 69 | 70 | bin/surf3d inputImage [options] 71 | 72 | Documentation for this program is available here: https://github.com/valette/vtkOpenSURF3D 73 | 74 | * Keypoint matching 75 | 76 | Edit a text file (e.g. myPointFiles.txt) containing the list of keypoint files to match. 77 | use the match executable. Then: 78 | 79 | bin/match myPointFiles.txt [options] 80 | 81 | Options: 82 | 83 | * -d distance : set max distance between matches. Default : 0.22 84 | * -d2 distance2secondRatio : maximum closestDistance / distance2second ratio. Default : 1 85 | 86 | This program outputs a binary pairs.bin file describing matches between keypoints. Reading the binary file can easily be done, as shown in the frog code: https://github.com/valette/FROG/blob/master/registration/imageGroup.cxx#L953 87 | 88 | * Groupwise registration of keypoints 89 | 90 | Finally, groupwise registration can occur, with the frog program: 91 | 92 | bin/frog pairs.bin [options] 93 | 94 | Options : 95 | 96 | * -da : set alpha for deformable registration. Default : 0.02 97 | * -dlinear 0/1 : display linear parameters during registration. default : 0 98 | * -dstats 0/1 : display stats during registration. Default : 0 99 | * -di number : number of iterations for each deformable level. Default : 200 100 | * -dl number : number of deformable levels. Default : 3 101 | * -emi number : max number of iterations for EM weighting. Default : 10000 102 | * -fi number : number of fixed images. Default : 0 103 | * -fd path : fixed images transforms directory. 104 | * -g spacing : initial grid spacing for deformable. Default : 100 105 | * -gd 0/1 : guaranteed diffeomorphism. Default : 1 106 | * -gm ratio : maximal displacement ratio to guarantee diffeomorphism. Default : 0.4 107 | * -il 0/1 : invert landmarks x and y coordinates. Default : 1 108 | * -la : set alpha for linear registration. Default : 0.5 109 | * -li number : number of iterations for linear registration. Default : 50 110 | * -nt : set number of threads. Default : number of cores 111 | * -s 0/1 : use scale for linear registration. Default : 1 112 | * -se number : stats epsilon. Default : 1e-06 113 | * -si number : interval update for statistics. Default : 10 114 | * -ss number : stats maximal sample size. Default : 10000 115 | * -t threshold : inlier probability threshold. Default : 0.5 116 | * -l path : path containing reference landmarks. 117 | 118 | FROG outputs several files: 119 | * a set of transform files (transformXX.json) which can be used to transform images using the VolumeTransform executable 120 | * histograms_linear.csv and histograms.csv, which provide histograms of distances (bin size = 1) between matching keypoints after registration (one histogram per input image). An example of such histograms is shown in figure 5 of the paper [2]. histograms_linear.csv contains histograms after linear registration, histograms.csv contains histograms after deformable regstration. 121 | 122 | comments, suggestions : https://github.com/valette/FROG/issues 123 | -------------------------------------------------------------------------------- /tools/VolumeTransform.cxx: -------------------------------------------------------------------------------- 1 | /*========================================================================= 2 | 3 | Program: VolumeTransform : Transform a volume 4 | Module: FROG 5 | Language: C++ 6 | Date: 2013/11 7 | Auteur: Sebastien Valette 8 | 9 | =========================================================================*/ 10 | // .NAME VolumeTransform 11 | // .SECTION Description 12 | 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | 23 | #include "../vtkOpenSURF3D/vtkRobustImageReader.h" 24 | #include "transformIO.h" 25 | 26 | int main( int argc, char *argv[] ) { 27 | if ( argc < 3 ) { 28 | std::cout << "Usage : VolumeTransform source reference [-t transform] [-ti inverse_transform] [-i interpolation] [-o outputFileName] [-rx reverseX]" << std::endl; 29 | exit( 1 ); 30 | } 31 | 32 | int reverseX = 0 ; 33 | char *outputFile = 0; 34 | int interpolation = 1; // linear 35 | 36 | vtkGeneralTransform *transform = vtkGeneralTransform::New(); 37 | transform->Identity(); 38 | 39 | int argumentsIndex = 3; 40 | bool backGroundSet = false; 41 | float backGroundLevel = 0; 42 | 43 | while ( argumentsIndex < argc ) { 44 | 45 | char * key = argv[ argumentsIndex ]; 46 | char * value = argv[ argumentsIndex + 1 ]; 47 | 48 | if ( strcmp( key ,"-b" ) == 0 ) { 49 | backGroundLevel = atof( value ); 50 | backGroundSet = true; 51 | } 52 | 53 | if ( strcmp( key ,"-t" ) == 0 ) { 54 | vtkGeneralTransform *trans2 = readTransform( value ); 55 | trans2->Inverse(); 56 | transform->Concatenate( trans2 ); 57 | } 58 | 59 | if ( strcmp( key ,"-ti" ) == 0 ) { 60 | transform->Concatenate( readTransform( value ) ); 61 | } 62 | 63 | if ( strcmp( key ,"-o" ) == 0 ) { 64 | outputFile = value; 65 | } 66 | 67 | if ( strcmp( key ,"-i" ) == 0 ) { 68 | interpolation = atoi( value ); 69 | } 70 | 71 | if (strcmp(key ,"-rx") == 0) { 72 | reverseX = atoi(value); 73 | } 74 | 75 | argumentsIndex += 2; 76 | } 77 | 78 | vtkTimerLog *Timer = vtkTimerLog::New(); 79 | vtkImageData *images[ 2 ]; 80 | // If we want to use a reader factory, we have to delete readers ourselves 81 | vtkRobustImageReader *imageReaders[ 2 ]; 82 | 83 | for ( int i = 0; i < 2; i++ ) { 84 | 85 | // Create a reader for image and try to load it 86 | char *file = argv[ i + 1 ]; 87 | imageReaders[ i ] = vtkRobustImageReader::New(); 88 | 89 | // Load Volume 90 | std::cout << "load : " << file << std::endl; 91 | Timer->StartTimer(); 92 | imageReaders[ i ]->SetFileName( file ); 93 | imageReaders[ i ]->Update(); 94 | Timer->StopTimer(); 95 | std::cout << "Image loaded in " << Timer->GetElapsedTime() << "s" << std::endl; 96 | images[ i ] = imageReaders[ i ]->GetOutput(); 97 | 98 | } 99 | 100 | double *bounds = images[ 0 ]->GetBounds(); 101 | std::cout << "image bounds :"; 102 | for ( unsigned int i = 0; i < 6; i++) { 103 | std::cout << bounds[ i ] << " "; 104 | } 105 | std::cout << std::endl; 106 | 107 | double center[3], transformedCenter[3]; 108 | for ( unsigned int i = 0; i < 3; i++ ) { 109 | center[ i ] = 0.5 * ( bounds[ i * 2 ] + bounds[ i * 2 + 1 ] ); 110 | } 111 | std::cout << "center :" << center[ 0 ] << " " << center[ 1 ] << " " << center[ 2 ] << std::endl; 112 | 113 | transform->TransformPoint( center, transformedCenter ); 114 | std::cout << "transformed center :" << transformedCenter[ 0 ] << " " << transformedCenter[ 1 ] << " " << transformedCenter[ 2 ] << std::endl; 115 | 116 | double valueRange[ 2 ] ; 117 | images[ 0 ]->GetPointData()->GetScalars()->GetRange( valueRange ); 118 | 119 | // reslice 120 | vtkImageReslice *reslice = vtkImageReslice::New(); 121 | reslice->SetInputData( images[ 0 ] ); 122 | reslice->SetOutputSpacing( images[ 1 ]->GetSpacing() ); 123 | reslice->SetOutputExtent( images[ 1 ]->GetExtent() ); 124 | reslice->SetOutputOrigin( images[ 1 ]->GetOrigin() ); 125 | reslice->SetResliceTransform( transform ); 126 | reslice->SetBackgroundLevel( backGroundSet ? backGroundLevel : valueRange[ 0 ] ); 127 | 128 | if (interpolation == 0 ) { 129 | reslice->SetInterpolationModeToNearestNeighbor(); 130 | } else { 131 | reslice->SetInterpolationModeToLinear(); 132 | } 133 | 134 | Timer->StartTimer(); 135 | vtkObject::GlobalWarningDisplayOff(); 136 | reslice->Update(); 137 | vtkObject::GlobalWarningDisplayOn(); 138 | Timer->StopTimer(); 139 | std::cout << "Transform computed in " << Timer->GetElapsedTime() << "s" << std::endl; 140 | 141 | Timer->StartTimer(); 142 | 143 | // write output 144 | vtkImageWriter *writer; 145 | 146 | if ( outputFile ) { 147 | 148 | char fin[ 10 ]; 149 | char filename[ 1000 ]; 150 | 151 | strcpy( filename, outputFile ); 152 | 153 | if (filename != NULL) { 154 | char *p; 155 | for (p = filename; *p; ++p) { 156 | *p = tolower(*p); 157 | } 158 | } 159 | 160 | strcpy ( fin, ".mhd" ); 161 | 162 | if ( strstr( filename, fin) != NULL) { 163 | 164 | writer = vtkMetaImageWriter::New(); 165 | 166 | } else { 167 | 168 | strcpy ( fin, ".nii.gz" ); 169 | if ( strstr( filename, fin) != NULL) { 170 | 171 | writer = vtkNIFTIImageWriter::New(); 172 | 173 | } else { 174 | std::cout << "not able to read " << outputFile << std::endl; 175 | } 176 | 177 | } 178 | 179 | writer->SetFileName( outputFile ); 180 | 181 | } else { 182 | 183 | writer = vtkMetaImageWriter::New(); 184 | writer->SetFileName( "output.mhd" ); 185 | 186 | } 187 | 188 | vtkImageFlip* flipper = vtkImageFlip::New() ; 189 | 190 | if (reverseX) { 191 | flipper->SetInputConnection(reslice->GetOutputPort()) ; 192 | flipper->SetFilteredAxis(0) ; 193 | flipper->Update() ; 194 | writer->SetInputData(flipper->GetOutput()) ; 195 | } 196 | else 197 | writer->SetInputData(reslice->GetOutput()) ; 198 | 199 | writer->Write() ; 200 | 201 | Timer->StopTimer(); 202 | std::cout << "File written in " << Timer->GetElapsedTime() << "s" << std::endl; 203 | 204 | Timer->Delete(); 205 | flipper->Delete() ; 206 | reslice->Delete() ; 207 | writer->Delete() ; 208 | 209 | for ( int i = 0 ; i < 2 ; ++i ) 210 | imageReaders[ i ]->Delete() ; 211 | } 212 | -------------------------------------------------------------------------------- /js/lib/LSRegistration.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | //const { require } = self; 4 | 5 | function getLSRegistration() { 6 | 7 | const async = require ('async'); 8 | var _ = require ('lodash'); 9 | 10 | let desk; 11 | try { 12 | desk = require ('desk-client'); 13 | } catch ( e ) { 14 | desk = window.desk; 15 | } 16 | var getSolver; 17 | 18 | if (typeof process !== "undefined") { 19 | getSolver = () => require(__dirname + '/laplaceSolver.js'); 20 | //desk.include(__dirname + '/../../frog.json'); 21 | } else { 22 | getSolver = () => laplaceSolver.worker(); 23 | } 24 | 25 | var LSRegistration = function () { 26 | 27 | this.defaultParameters = { 28 | use3D : true, 29 | 30 | match3d : { 31 | RansacDist : 40, 32 | MatchingScale : 1.5, 33 | MatchingDist : 0.3, 34 | MatchingDist2 : 0.98, 35 | ComputeBoundingBoxes : 1, 36 | RansacMinInliers : 1 //[5, 20, 25, 30, 40] 37 | }, 38 | surf3d : { 39 | threshold : 8000, 40 | spacing: 1.5 41 | }, 42 | match2d : { 43 | // RANSAC_iterations : 2000, 44 | spacing : 2 45 | // maximum_dimension :150 46 | }, 47 | edgeRemovalRatio : 0.04, 48 | finalEdgesRatio : 3 49 | }; 50 | 51 | 52 | var volumes; 53 | var matches; 54 | var result; 55 | var options; 56 | 57 | this.register = function (vols, opts, callback) { 58 | if (typeof opts === 'function') { 59 | callback = opts; 60 | opts = {}; 61 | } 62 | volumes = vols.map(volume => volume instanceof Object ? volume : {file : volume}); 63 | options = _.merge(this.defaultParameters, opts); 64 | async.waterfall([ 65 | getMatches, 66 | computeLSRegistration, 67 | ], callback); 68 | }; 69 | 70 | var pointsFile = []; 71 | var startTime; 72 | 73 | function surf2d (volume, callback) { 74 | var index = volumes.indexOf(volume); 75 | async.eachLimit(volumes, 4, function (vol, callback) { 76 | var index2 = volumes.indexOf(vol); 77 | if ( index >= index2) { 78 | callback(); 79 | return; 80 | } 81 | 82 | var bounds = {}; 83 | var box = vol.boundingBox; 84 | if (box) { 85 | bounds.xmin = box[0]; 86 | bounds.xmax = box[1]; 87 | bounds.ymin = box[2]; 88 | bounds.ymax = box[3]; 89 | bounds.zmin = box[4]; 90 | bounds.zmax = box[5]; 91 | } 92 | 93 | var par = Object.assign({ action : "MATCH2D", 94 | input_volume1 : vol.file, 95 | input_volume2 : volume.file 96 | }, options.match2d, bounds); 97 | 98 | desk.Actions.execute(par, function (err, res) { 99 | desk.FileSystem.readFile(res.outputDirectory + "/transform.json", 100 | function (err, res) { 101 | if (err) { 102 | callback(err); 103 | return; 104 | } 105 | res = JSON.parse(res); 106 | if (!matches[index]) { 107 | matches[index]= []; 108 | } 109 | matches[index][index2] = res; 110 | res.scale = 1 / res.scale; 111 | res.translation = res.translation.map(function (c) { 112 | return -c; 113 | }); 114 | var temp = res.bboxA; 115 | res.bboxA = res.bboxB; 116 | res.bboxB = temp; 117 | 118 | if (res.scale < 0) { 119 | res.fail = true; 120 | } 121 | callback(); 122 | }); 123 | } 124 | ); 125 | }, callback); 126 | } 127 | 128 | function getMatches ( cb ) { 129 | matches = volumes.map( () => [] ); 130 | 131 | if ( options.use3D ) { 132 | 133 | get3DMatches( cb ); 134 | 135 | } else { 136 | 137 | async.eachSeries( volumes, surf2d, function ( err ) { 138 | cb ( err, matches ); 139 | }); 140 | 141 | } 142 | } 143 | 144 | function get3DMatches( callback ) { 145 | 146 | async.mapLimit( volumes, 3, function ( volume, callback ) { 147 | 148 | desk.Actions.execute( Object.assign( { 149 | 150 | action : "SURF3D", 151 | input_volume : volume.file, 152 | writeJSON : 1, 153 | writeCSVGZ : 0, 154 | silent : true 155 | 156 | }, options.surf3d ), function ( err, response ) { 157 | 158 | callback( err, response.outputDirectory + "points.json" ); 159 | 160 | } 161 | ); 162 | 163 | }, function ( err, result ) { 164 | pointsFile = result; 165 | var tasks = []; 166 | volumes.forEach( function ( volume, index ) { 167 | for ( var j = 0; j < index; j++ ) { 168 | tasks.push( { i : index, j : j } ); 169 | } 170 | }); 171 | 172 | async.each( tasks, function ( task, callback ) { 173 | 174 | desk.Actions.execute( 175 | Object.assign( { action : "MATCH3D", 176 | input_volume1 : pointsFile[ task.j ], 177 | input_volume2 : pointsFile[ task.i ], 178 | stdout : true, 179 | silent : true 180 | }, options.match3d ), 181 | 182 | function ( err, response ) { 183 | if ( err ) { 184 | callback( err ); 185 | return; 186 | } 187 | matches[ task.j ][ task.i ] = JSON.parse( response.stderr ); 188 | callback(); 189 | } 190 | ); 191 | }, function ( err ) { 192 | callback ( err, matches ); 193 | } ); 194 | 195 | }); 196 | } 197 | 198 | function computeLSRegistration (res, callback) { 199 | matches = res; 200 | var solver = getSolver(); 201 | var numberOfEdges; 202 | var ok; 203 | 204 | solver.setInput(matches, options.edgeRemovalRatio, volumes, next); 205 | 206 | function next() { 207 | async.doWhilst( 208 | function (callback) { 209 | solver.iterate(true, function (state) { 210 | if (state.error) { 211 | ok = false; 212 | console.log("error : " + state.error); 213 | callback(state.error); 214 | return; 215 | } 216 | result = state; 217 | numberOfEdges = state.numberOfEdges; 218 | ok = numberOfEdges > matches.length * options.finalEdgesRatio; 219 | callback(); 220 | }); 221 | }, 222 | function () { 223 | return ok; 224 | }, function (err) { 225 | // convert typed arrays to arrays 226 | if (result) { 227 | result.valences = Array.prototype.slice.call(result.valences); 228 | result.positions = Array.prototype.slice.call(result.positions); 229 | } 230 | callback (err, result); 231 | }); 232 | } 233 | } 234 | 235 | }; 236 | 237 | return LSRegistration; 238 | } 239 | 240 | const LSRegistration = getLSRegistration(); 241 | 242 | if ( typeof define === 'function' && define.amd ) { 243 | 244 | define( 'LSRegistration', LSRegistration ); 245 | 246 | } else if ( 'undefined' !== typeof exports && 'undefined' !== typeof module ) { 247 | 248 | module.exports = LSRegistration; 249 | 250 | } else { 251 | self.LSRegistration = LSRegistration; 252 | } 253 | 254 | -------------------------------------------------------------------------------- /registration/frog.cxx: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | #include "imageGroup.h" 5 | 6 | using std::cout, std::endl; 7 | 8 | int main( int argc, char *argv[] ) { 9 | 10 | auto start = std::chrono::system_clock::now(); 11 | ImageGroup group; 12 | 13 | if ( argc < 2 ) { 14 | 15 | cout << "Usage : frog inputPairs.bin [options]"<< endl; 16 | cout << "Options : " << endl; 17 | 18 | cout << endl << "*Linear registration:" << endl; 19 | cout << "-dlinear 0/1 : display linear parameters during registration. default : " << group.printLinear << endl; 20 | cout << "-lanchor x y z: set initialization anchor relative position. Default : " << 21 | group.linearInitializationAnchor[ 0 ] << " " << group.linearInitializationAnchor[ 1 ] << 22 | " " << group.linearInitializationAnchor[ 2 ] << endl; 23 | cout << "-la : set alpha. Default : " << group.linearAlpha << endl; 24 | cout << "-li number : number of iterations. Default : " << group.linearIterations << endl; 25 | cout << "-s 0/1 : use scale. Default : " << group.useScale << endl; 26 | 27 | cout << endl << "*Deformable registration:" << endl; 28 | cout << "-da value : set alpha. Default : " << group.deformableAlpha << endl; 29 | cout << "-di number : number of iterations for each level. Default : " << group.deformableIterations << endl; 30 | cout << "-dl number : number of levels. Default : " << group.deformableLevels << endl; 31 | cout << "-g spacing : initial grid spacing. Default : " << group.initialGridSize << endl; 32 | cout << "-gd 0/1 : guaranteed diffeomorphism. Default : " << group.guaranteeDiffeomorphism << endl; 33 | cout << "-gm ratio : maximal displacement ratio to guarantee diffeomorphism. Default : " << group.maxDisplacementRatio << endl; 34 | 35 | cout << endl << "*EM Weighting:" << endl; 36 | cout << "-dstats 0/1 : display stats during registration. Default : " << group.printStats << endl; 37 | cout << "-emi number : max number of iterations for EM weighting. Default : " << Stats::maxIterations << endl; 38 | cout << "-si number : interval update for statistics. Default : " << group.statIntervalUpdate << endl; 39 | cout << "-se number : stats epsilon. Default : " << Stats::epsilon << endl; 40 | cout << "-ss number : stats maximal sample size. Default : " << Stats::maxSize << endl; 41 | cout << "-t threshold : inlier probability threshold. Default : " << group.inlierThreshold << endl; 42 | 43 | cout << endl << "*Registration with fixed images:" << endl; 44 | cout << "-fi number : number of fixed images. Default : " << group.numberOfFixedImages << endl; 45 | cout << "-fd path : fixed images transforms directory." << endl; 46 | cout << "-r 0/1 : use RANSAC instead of linear registration. Default : " << group.useRANSAC << endl; 47 | cout << "-ri number : number of RANSAC iterations. Default : " << group.numberOfRANSACIterations << endl; 48 | cout << "-rs maxScale : maximum allowed scale for RANSAC iterations. Default : " << group.RANSACMaxScale << endl; 49 | cout << "-rid value : RANSAC inlier distance. Default : " << group.RANSACInlierDistance << endl; 50 | 51 | cout << endl << "*Reference landmarks:" << endl; 52 | cout << "-l path : path containing reference landmarks." << endl; 53 | cout << "-lc path : path containing constraint landmarks." << endl; 54 | cout << "-il 0/1 : invert landmarks x and y coordinates. Default : " << group.invertLandmarksCoordinates << endl; 55 | cout << "-lcw path : landmarks constraints weight. Default: " << group.landmarksConstraintsWeight << endl; 56 | 57 | cout << endl << "*Other parameters:" << endl; 58 | cout << "-nt number : set number of threads. Default : number of cores" << endl; 59 | cout << "-mf file : path+name of measure.csv file." << endl; 60 | cout << "-wp 0/1 : write pairs, distances and probabilities. Default : " << group.writePairs << endl; 61 | cout << "-j : outputs a single big JSON file for each transform. Default : " << group.writeSingleFileTransforms << endl; 62 | cout << "-ts subdir : subdirectory where transforms will be written. Default : " << group.transformSubdirectory << endl; 63 | 64 | return( 1 ); 65 | 66 | } 67 | 68 | cout << "Reading : " << argv[ 1 ] << endl; 69 | group.readPairs( argv[ 1 ] ); 70 | 71 | int argumentsIndex = 2; 72 | char *landmarks = 0; 73 | 74 | while (argumentsIndex < argc) { 75 | 76 | char* key = argv[argumentsIndex]; 77 | char *value = argv[argumentsIndex + 1]; 78 | int increment = 2; 79 | 80 | if ( strcmp( key, "-da" ) == 0) { 81 | group.deformableAlpha = atof( value ); 82 | } 83 | 84 | if ( strcmp( key, "-dlinear" ) == 0) { 85 | group.printLinear = atoi( value ); 86 | } 87 | 88 | if ( strcmp( key, "-dstats" ) == 0) { 89 | group.printStats = atoi( value ); 90 | } 91 | 92 | if ( strcmp( key, "-di" ) == 0 ) { 93 | group.deformableIterations = atoi( value ); 94 | } 95 | 96 | if ( strcmp( key, "-dl" ) == 0 ) { 97 | group.deformableLevels = atoi( value ); 98 | } 99 | 100 | if ( strcmp( key, "-emi" ) == 0 ) { 101 | Stats::maxIterations = atoi( value ); 102 | } 103 | 104 | if ( strcmp( key, "-fi" ) == 0 ) { 105 | group.numberOfFixedImages = atoi( value ); 106 | } 107 | 108 | if ( strcmp( key, "-fd" ) == 0 ) { 109 | group.fixedTransformsDirectory = value; 110 | } 111 | 112 | if ( strcmp( key, "-g" ) == 0 ) { 113 | group.initialGridSize = atof( value ); 114 | } 115 | 116 | if ( strcmp( key, "-gd" ) == 0 ) { 117 | group.guaranteeDiffeomorphism = atoi( value ); 118 | } 119 | 120 | if ( strcmp( key, "-gm" ) == 0 ) { 121 | group.maxDisplacementRatio = atof( value ); 122 | } 123 | 124 | if ( strcmp( key, "-il" ) == 0 ) { 125 | group.invertLandmarksCoordinates = atoi( value ); 126 | } 127 | 128 | if ( strcmp( key, "-lanchor" ) == 0) { 129 | increment = 4; 130 | for ( int i = 0; i < 3; i++ ) 131 | group.linearInitializationAnchor[ i ] = 132 | atof( argv[argumentsIndex + 1 + i ] ); 133 | } 134 | 135 | if ( strcmp( key, "-la" ) == 0) { 136 | group.linearAlpha = atof( value ); 137 | } 138 | 139 | if ( strcmp( key, "-li" ) == 0 ) { 140 | group.linearIterations = atoi( value ); 141 | } 142 | 143 | if ( strcmp( key, "-nt" ) == 0 ) { 144 | omp_set_num_threads( atoi( value ) ); 145 | } 146 | 147 | if ( strcmp( key, "-r" ) == 0 ) { 148 | group.useRANSAC = atoi( value ); 149 | } 150 | 151 | if ( strcmp( key, "-ri" ) == 0 ) { 152 | group.numberOfRANSACIterations = atoi( value ); 153 | } 154 | 155 | if ( strcmp( key, "-rs" ) == 0 ) { 156 | group.RANSACMaxScale = atof( value ); 157 | } 158 | 159 | if ( strcmp( key, "-rid" ) == 0 ) { 160 | group.RANSACInlierDistance = atof( value ); 161 | } 162 | 163 | if ( strcmp( key, "-s" ) == 0 ) { 164 | group.useScale = atoi( value ); 165 | } 166 | 167 | if ( strcmp( key, "-se" ) == 0 ) { 168 | Stats::epsilon = atof( value ); 169 | } 170 | 171 | if ( strcmp( key, "-si" ) == 0 ) { 172 | group.statIntervalUpdate = atoi( value ); 173 | } 174 | 175 | if ( strcmp( key, "-ss" ) == 0 ) { 176 | Stats::maxSize = atoi( value ); 177 | } 178 | 179 | if ( strcmp( key, "-t" ) == 0 ) { 180 | group.inlierThreshold = atof( value ); 181 | } 182 | 183 | if ( strcmp( key, "-ts" ) == 0 ) { 184 | group.transformSubdirectory = std::string( value ); 185 | } 186 | 187 | if ( strcmp( key, "-l" ) == 0 ) { 188 | group.addLandmarks( value ); 189 | } 190 | 191 | if ( strcmp( key, "-lc" ) == 0 ) { 192 | group.addLandmarks( value, true ); 193 | } 194 | 195 | if ( strcmp( key, "-lcw" ) == 0 ) { 196 | group.landmarksConstraintsWeight = atof( value ); 197 | } 198 | 199 | if ( strcmp( key, "-mf" ) == 0 ) { 200 | group.outputFileName = value; 201 | } 202 | 203 | if ( strcmp( key, "-wp" ) == 0 ) { 204 | group.writePairs = atoi( value ); 205 | } 206 | 207 | if ( strcmp( key, "-j" ) == 0 ) { 208 | group.writeSingleFileTransforms = true; 209 | increment = 1; 210 | } 211 | 212 | argumentsIndex += increment; 213 | 214 | } 215 | 216 | group.run(); 217 | auto end = std::chrono::system_clock::now(); 218 | cout << "Total time : " << std::chrono::duration(end-start).count() << "s" << endl; 219 | return 0; 220 | 221 | } 222 | -------------------------------------------------------------------------------- /js/lib/FROG.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | ( function () { 4 | 5 | const { THREE, _, async, desk, EventEmitter } = window; 6 | const FROG = {}; 7 | 8 | FROG.computeRigidGroupwiseRegistration = async function ( files, options ) { 9 | 10 | const registration = await desk.Actions.executeAsync( { 11 | 12 | action : 'LSRegistration', 13 | files : await desk.FileSystem.writeCachedFileAsync( 'volumes.json', JSON.stringify( files ) ) 14 | 15 | } ); 16 | 17 | const result = await desk.FileSystem.readFileAsync( registration.outputDirectory + "registration.json" ); 18 | const positions = JSON.parse( result ).positions; 19 | 20 | return files.map( ( file, index ) => ( { 21 | 22 | volume : file, 23 | translation : positions.slice( 4 * index, 4 + 4 * index ) 24 | 25 | } ) ); 26 | 27 | }; 28 | 29 | FROG.DeformableGroupwiseRegistration = function ( volumes, options ) { 30 | 31 | EventEmitter.call( this ); 32 | Object.assign( this, EventEmitter.prototype); 33 | 34 | this.volumes = volumes; 35 | this.options = options || {}; 36 | 37 | }; 38 | 39 | FROG.DeformableGroupwiseRegistration.prototype.execute = async function () { 40 | 41 | let volumes = this.volumes; 42 | const options = this.options; 43 | 44 | const globalParams = options.globalParams || {}; 45 | const RAWParams = options.RAWParams || {}; 46 | const SURF3DParams = options.SURF3DParams || {}; 47 | const SIFT3DParams = options.SIFT3DParams || {}; 48 | const matchParams = options.matchParams || {}; 49 | const registrationParams = options.registrationParams || {}; 50 | 51 | const useSURF = ( options.useSURF !== undefined ) ? options.useSURF : true; 52 | const useSIFT = ( options.useSIFT !== undefined ) ? options.useSIFT : false; 53 | const useRAW = ( options.useRAW !== undefined ) ? options.useRAW : false; 54 | 55 | const rootDir = ( await desk.Actions.executeAsync( { 56 | 57 | action : "getRootDir", 58 | stdout : true 59 | 60 | } ) ).stdout; 61 | 62 | const actions = []; 63 | 64 | if ( useRAW ) { 65 | 66 | actions.push( Object.assign( { action : "SURF3D" }, RAWParams, 67 | { type : 1 } ) ); 68 | 69 | } 70 | 71 | if ( useSURF ) { 72 | 73 | actions.push( Object.assign( { action : "SURF3D" }, SURF3DParams ) ); 74 | 75 | } 76 | 77 | if ( useSIFT ) { 78 | 79 | actions.push( Object.assign( { action : "SIFT3D" }, SIFT3DParams ) ); 80 | 81 | } 82 | 83 | if ( options.customPoints ) { 84 | 85 | actions.push( { action : "custom", customPoints : options.customPoints } ); 86 | 87 | } 88 | 89 | let matchs = []; 90 | const pointSets = []; 91 | 92 | for ( let action of actions ) { 93 | 94 | let count = 0; 95 | 96 | const points = await async.mapLimit( volumes, 3, async volume => { 97 | 98 | const extractor = action.customPoints ? { 99 | 100 | outputPoints : action.customPoints[ volumes.indexOf( volume ) ] 101 | 102 | } : await desk.Actions.executeAsync( Object.assign( { 103 | inputVolume : volume.volume }, action, globalParams ) ); 104 | 105 | let outputPoints = extractor.outputPoints; 106 | 107 | const output = { 108 | 109 | points : extractor.outputPoints, 110 | extractor : extractor, 111 | csv : rootDir + outputPoints + ',' + volume.translation.join( ',' ) 112 | 113 | }; 114 | 115 | if ( action.action === "SURF3D" && options.useTFModel ) { 116 | 117 | const descriptor = await desk.Actions.executeAsync( { 118 | action : "applyTFModel", inputCSV : extractor.outputPoints 119 | } ); 120 | 121 | outputPoints = descriptor.outputPoints; 122 | output.descriptor = descriptor; 123 | 124 | } 125 | 126 | count++; 127 | 128 | this.emit( 'log', action.action + ' keypoints: ' + 129 | count + '/' + volumes.length + ' done' ); 130 | 131 | return output; 132 | 133 | } ); 134 | 135 | pointSets.push( points ); 136 | 137 | const csvFile = await desk.FileSystem.writeCachedFileAsync( 138 | "volumes.csv", points.map( obj => obj.csv ).join( "\n" ) ); 139 | 140 | this.emit( 'log', 'Computing ' + action.action + ' matches' ); 141 | 142 | const match = await desk.Actions.executeAsync( Object.assign( { 143 | 144 | action : "match", 145 | inputFile: csvFile, 146 | stdout : true, 147 | cache : points.map( points => points.points ) 148 | 149 | }, matchParams, globalParams ), { listener : mes => this.emit( 'matchLog', mes.data ) } 150 | 151 | ); 152 | 153 | this.emit( 'log', 'Total pairing time (ms): ' + match.duration ); 154 | this.emit( 'log' , match.stdout.split( '\n').filter( line => line.includes( 'Nb Match' ) )[ 0 ] ); 155 | 156 | matchs.push( match ); 157 | } 158 | 159 | let match = matchs[ 0 ]; 160 | let points = pointSets[ 0 ]; 161 | 162 | if ( matchs.length > 1 ) { 163 | 164 | match = await desk.Actions.executeAsync( { 165 | 166 | action : "mergePairs", 167 | pairs1 : matchs[ 0 ].outputPairs, 168 | pairs2 : matchs[ 1 ].outputPairs 169 | 170 | } ); 171 | 172 | } 173 | 174 | this.emit( 'log', 'Matches computed' ); 175 | this.emit( 'log', 'Computing deformable registration' ); 176 | 177 | const registration = await desk.Actions.executeAsync( Object.assign( { 178 | 179 | action : "frog", 180 | inputPairs : match.outputPairs, 181 | nImages : volumes.length, 182 | 183 | }, registrationParams, globalParams ), { listener : mes => this.emit( 'registrationLog', mes.data ) } ); 184 | 185 | this.emit( 'log', 'Registration done' ); 186 | this.emit( 'log', 'Total registration time (ms): ' + registration.duration ); 187 | const output = _.cloneDeep( volumes ); 188 | output.match = match; 189 | 190 | output.forEach( ( file, index ) => { 191 | 192 | file.points = points[ index ].points; 193 | file.extractor = points[ index ].extractor; 194 | delete file.extractor.status; 195 | delete file.extractor.handle; 196 | file.transform = registration.outputDirectory + "transforms/" + index + '.json'; 197 | 198 | } ); 199 | 200 | await desk.FileSystem.writeJSONAsync( registration.outputDirectory + 201 | "registration.json", output ); 202 | 203 | return { match, registration, volumes : output }; 204 | }; 205 | 206 | 207 | FROG.CommonSpaceMeanImage = function ( opts ) { 208 | 209 | EventEmitter.call( this ); 210 | Object.assign( this, EventEmitter.prototype); 211 | this.opts = opts || {}; 212 | 213 | }; 214 | 215 | FROG.CommonSpaceMeanImage.prototype.execute = async function () { 216 | 217 | const opts = this.opts; 218 | const registration = opts.registration; 219 | 220 | const bbox = JSON.parse( await desk.FileSystem.readFileAsync( 221 | registration.outputDirectory + "bbox.json" ) ).bbox; 222 | 223 | let min = new THREE.Vector3().fromArray( bbox[ 0 ] ); 224 | let max = new THREE.Vector3().fromArray( bbox[ 1 ] ); 225 | 226 | let target; 227 | 228 | if ( opts.targetAverageVolume ) { 229 | 230 | target = opts.targetAverageVolume; 231 | let samplingRatio = opts.samplingRatio || 1; 232 | 233 | if ( samplingRatio < 1 ) { 234 | 235 | target = ( await desk.Actions.executeAsync( { 236 | 237 | action : "volume_subsample", 238 | input_volume : target, 239 | ratio : samplingRatio 240 | 241 | } ) ).outputDirectory + "out.mhd"; 242 | 243 | } 244 | 245 | } else { 246 | 247 | const spacing = opts.spacing || 2; 248 | max.sub( min ).divideScalar( spacing ).ceil(); 249 | 250 | const dummyVolumeGenerator = await desk.Actions.executeAsync( { 251 | 252 | action : "DummyVolumeGenerator", 253 | ox : min.x, 254 | oy : min.y, 255 | oz : min.z, 256 | sx : spacing, 257 | sy : spacing, 258 | sz : spacing, 259 | dx : max.x, 260 | dy : max.y, 261 | dz : max.z 262 | 263 | } ); 264 | 265 | target = dummyVolumeGenerator.outputDirectory + "volume.mhd"; 266 | 267 | } 268 | 269 | let count = 0; 270 | this.emit( 'log', 'Transforming images. ' ); 271 | 272 | const files = opts.volumes.map( volume => volume.volume ); 273 | const transformedVolumes = await async.mapLimit( files, 4, async file => { 274 | 275 | const index = files.indexOf( file ); 276 | 277 | const transformVolume = await desk.Actions.executeAsync( { 278 | 279 | action : 'VolumeTransform', 280 | source : file, 281 | transform : registration.outputDirectory + "transforms/" + index + '.json', 282 | reference : target 283 | // outputFileName : "output.mhd" 284 | 285 | } ); 286 | 287 | count++; 288 | this.emit( 'log', 'Transforming images. ' + count + '/' + files.length + ' done' ); 289 | return transformVolume.outputDirectory + "output.mhd"; 290 | 291 | } ); 292 | 293 | 294 | const averageVolumes = await desk.Actions.executeAsync( { 295 | 296 | action : 'averageVolumes', 297 | inputVolumes : transformedVolumes, 298 | normalize : 0 299 | 300 | } ); 301 | 302 | return { transformedVolumes, 303 | averageVolume : averageVolumes.outputDirectory + 'average.nii.gz' }; 304 | 305 | }; 306 | 307 | window.FROG = FROG; 308 | } ) (); -------------------------------------------------------------------------------- /js/lib/heap.js: -------------------------------------------------------------------------------- 1 | // Generated by CoffeeScript 1.8.0 2 | (function() { 3 | var Heap, defaultCmp, floor, heapify, heappop, heappush, heappushpop, heapreplace, insort, min, nlargest, nsmallest, updateItem, _siftdown, _siftup; 4 | 5 | floor = Math.floor, min = Math.min; 6 | 7 | 8 | /* 9 | Default comparison function to be used 10 | */ 11 | 12 | defaultCmp = function(x, y) { 13 | if (x < y) { 14 | return -1; 15 | } 16 | if (x > y) { 17 | return 1; 18 | } 19 | return 0; 20 | }; 21 | 22 | 23 | /* 24 | Insert item x in list a, and keep it sorted assuming a is sorted. 25 | 26 | If x is already in a, insert it to the right of the rightmost x. 27 | 28 | Optional args lo (default 0) and hi (default a.length) bound the slice 29 | of a to be searched. 30 | */ 31 | 32 | insort = function(a, x, lo, hi, cmp) { 33 | var mid; 34 | if (lo == null) { 35 | lo = 0; 36 | } 37 | if (cmp == null) { 38 | cmp = defaultCmp; 39 | } 40 | if (lo < 0) { 41 | throw new Error('lo must be non-negative'); 42 | } 43 | if (hi == null) { 44 | hi = a.length; 45 | } 46 | while (lo < hi) { 47 | mid = floor((lo + hi) / 2); 48 | if (cmp(x, a[mid]) < 0) { 49 | hi = mid; 50 | } else { 51 | lo = mid + 1; 52 | } 53 | } 54 | return ([].splice.apply(a, [lo, lo - lo].concat(x)), x); 55 | }; 56 | 57 | 58 | /* 59 | Push item onto heap, maintaining the heap invariant. 60 | */ 61 | 62 | heappush = function(array, item, cmp) { 63 | if (cmp == null) { 64 | cmp = defaultCmp; 65 | } 66 | array.push(item); 67 | return _siftdown(array, 0, array.length - 1, cmp); 68 | }; 69 | 70 | 71 | /* 72 | Pop the smallest item off the heap, maintaining the heap invariant. 73 | */ 74 | 75 | heappop = function(array, cmp) { 76 | var lastelt, returnitem; 77 | if (cmp == null) { 78 | cmp = defaultCmp; 79 | } 80 | lastelt = array.pop(); 81 | if (array.length) { 82 | returnitem = array[0]; 83 | array[0] = lastelt; 84 | _siftup(array, 0, cmp); 85 | } else { 86 | returnitem = lastelt; 87 | } 88 | return returnitem; 89 | }; 90 | 91 | 92 | /* 93 | Pop and return the current smallest value, and add the new item. 94 | 95 | This is more efficient than heappop() followed by heappush(), and can be 96 | more appropriate when using a fixed size heap. Note that the value 97 | returned may be larger than item! That constrains reasonable use of 98 | this routine unless written as part of a conditional replacement: 99 | if item > array[0] 100 | item = heapreplace(array, item) 101 | */ 102 | 103 | heapreplace = function(array, item, cmp) { 104 | var returnitem; 105 | if (cmp == null) { 106 | cmp = defaultCmp; 107 | } 108 | returnitem = array[0]; 109 | array[0] = item; 110 | _siftup(array, 0, cmp); 111 | return returnitem; 112 | }; 113 | 114 | 115 | /* 116 | Fast version of a heappush followed by a heappop. 117 | */ 118 | 119 | heappushpop = function(array, item, cmp) { 120 | var _ref; 121 | if (cmp == null) { 122 | cmp = defaultCmp; 123 | } 124 | if (array.length && cmp(array[0], item) < 0) { 125 | _ref = [array[0], item], item = _ref[0], array[0] = _ref[1]; 126 | _siftup(array, 0, cmp); 127 | } 128 | return item; 129 | }; 130 | 131 | 132 | /* 133 | Transform list into a heap, in-place, in O(array.length) time. 134 | */ 135 | 136 | heapify = function(array, cmp) { 137 | var i, _i, _j, _len, _ref, _ref1, _results, _results1; 138 | if (cmp == null) { 139 | cmp = defaultCmp; 140 | } 141 | _ref1 = (function() { 142 | _results1 = []; 143 | for (var _j = 0, _ref = floor(array.length / 2); 0 <= _ref ? _j < _ref : _j > _ref; 0 <= _ref ? _j++ : _j--){ _results1.push(_j); } 144 | return _results1; 145 | }).apply(this).reverse(); 146 | _results = []; 147 | for (_i = 0, _len = _ref1.length; _i < _len; _i++) { 148 | i = _ref1[_i]; 149 | _results.push(_siftup(array, i, cmp)); 150 | } 151 | return _results; 152 | }; 153 | 154 | 155 | /* 156 | Update the position of the given item in the heap. 157 | This function should be called every time the item is being modified. 158 | */ 159 | 160 | updateItem = function(array, item, cmp) { 161 | var pos; 162 | if (cmp == null) { 163 | cmp = defaultCmp; 164 | } 165 | pos = array.indexOf(item); 166 | if (pos === -1) { 167 | return; 168 | } 169 | _siftdown(array, 0, pos, cmp); 170 | return _siftup(array, pos, cmp); 171 | }; 172 | 173 | 174 | /* 175 | Find the n largest elements in a dataset. 176 | */ 177 | 178 | nlargest = function(array, n, cmp) { 179 | var elem, result, _i, _len, _ref; 180 | if (cmp == null) { 181 | cmp = defaultCmp; 182 | } 183 | result = array.slice(0, n); 184 | if (!result.length) { 185 | return result; 186 | } 187 | heapify(result, cmp); 188 | _ref = array.slice(n); 189 | for (_i = 0, _len = _ref.length; _i < _len; _i++) { 190 | elem = _ref[_i]; 191 | heappushpop(result, elem, cmp); 192 | } 193 | return result.sort(cmp).reverse(); 194 | }; 195 | 196 | 197 | /* 198 | Find the n smallest elements in a dataset. 199 | */ 200 | 201 | nsmallest = function(array, n, cmp) { 202 | var elem, i, los, result, _i, _j, _len, _ref, _ref1, _results; 203 | if (cmp == null) { 204 | cmp = defaultCmp; 205 | } 206 | if (n * 10 <= array.length) { 207 | result = array.slice(0, n).sort(cmp); 208 | if (!result.length) { 209 | return result; 210 | } 211 | los = result[result.length - 1]; 212 | _ref = array.slice(n); 213 | for (_i = 0, _len = _ref.length; _i < _len; _i++) { 214 | elem = _ref[_i]; 215 | if (cmp(elem, los) < 0) { 216 | insort(result, elem, 0, null, cmp); 217 | result.pop(); 218 | los = result[result.length - 1]; 219 | } 220 | } 221 | return result; 222 | } 223 | heapify(array, cmp); 224 | _results = []; 225 | for (i = _j = 0, _ref1 = min(n, array.length); 0 <= _ref1 ? _j < _ref1 : _j > _ref1; i = 0 <= _ref1 ? ++_j : --_j) { 226 | _results.push(heappop(array, cmp)); 227 | } 228 | return _results; 229 | }; 230 | 231 | _siftdown = function(array, startpos, pos, cmp) { 232 | var newitem, parent, parentpos; 233 | if (cmp == null) { 234 | cmp = defaultCmp; 235 | } 236 | newitem = array[pos]; 237 | while (pos > startpos) { 238 | parentpos = (pos - 1) >> 1; 239 | parent = array[parentpos]; 240 | if (cmp(newitem, parent) < 0) { 241 | array[pos] = parent; 242 | pos = parentpos; 243 | continue; 244 | } 245 | break; 246 | } 247 | return array[pos] = newitem; 248 | }; 249 | 250 | _siftup = function(array, pos, cmp) { 251 | var childpos, endpos, newitem, rightpos, startpos; 252 | if (cmp == null) { 253 | cmp = defaultCmp; 254 | } 255 | endpos = array.length; 256 | startpos = pos; 257 | newitem = array[pos]; 258 | childpos = 2 * pos + 1; 259 | while (childpos < endpos) { 260 | rightpos = childpos + 1; 261 | if (rightpos < endpos && !(cmp(array[childpos], array[rightpos]) < 0)) { 262 | childpos = rightpos; 263 | } 264 | array[pos] = array[childpos]; 265 | pos = childpos; 266 | childpos = 2 * pos + 1; 267 | } 268 | array[pos] = newitem; 269 | return _siftdown(array, startpos, pos, cmp); 270 | }; 271 | 272 | Heap = (function() { 273 | Heap.push = heappush; 274 | 275 | Heap.pop = heappop; 276 | 277 | Heap.replace = heapreplace; 278 | 279 | Heap.pushpop = heappushpop; 280 | 281 | Heap.heapify = heapify; 282 | 283 | Heap.updateItem = updateItem; 284 | 285 | Heap.nlargest = nlargest; 286 | 287 | Heap.nsmallest = nsmallest; 288 | 289 | function Heap(cmp) { 290 | this.cmp = cmp != null ? cmp : defaultCmp; 291 | this.nodes = []; 292 | } 293 | 294 | Heap.prototype.push = function(x) { 295 | return heappush(this.nodes, x, this.cmp); 296 | }; 297 | 298 | Heap.prototype.pop = function() { 299 | return heappop(this.nodes, this.cmp); 300 | }; 301 | 302 | Heap.prototype.peek = function() { 303 | return this.nodes[0]; 304 | }; 305 | 306 | Heap.prototype.contains = function(x) { 307 | return this.nodes.indexOf(x) !== -1; 308 | }; 309 | 310 | Heap.prototype.replace = function(x) { 311 | return heapreplace(this.nodes, x, this.cmp); 312 | }; 313 | 314 | Heap.prototype.pushpop = function(x) { 315 | return heappushpop(this.nodes, x, this.cmp); 316 | }; 317 | 318 | Heap.prototype.heapify = function() { 319 | return heapify(this.nodes, this.cmp); 320 | }; 321 | 322 | Heap.prototype.updateItem = function(x) { 323 | return updateItem(this.nodes, x, this.cmp); 324 | }; 325 | 326 | Heap.prototype.clear = function() { 327 | return this.nodes = []; 328 | }; 329 | 330 | Heap.prototype.empty = function() { 331 | return this.nodes.length === 0; 332 | }; 333 | 334 | Heap.prototype.size = function() { 335 | return this.nodes.length; 336 | }; 337 | 338 | Heap.prototype.clone = function() { 339 | var heap; 340 | heap = new Heap(); 341 | heap.nodes = this.nodes.slice(0); 342 | return heap; 343 | }; 344 | 345 | Heap.prototype.toArray = function() { 346 | return this.nodes.slice(0); 347 | }; 348 | 349 | Heap.prototype.insert = Heap.prototype.push; 350 | 351 | Heap.prototype.top = Heap.prototype.peek; 352 | 353 | Heap.prototype.front = Heap.prototype.peek; 354 | 355 | Heap.prototype.has = Heap.prototype.contains; 356 | 357 | Heap.prototype.copy = Heap.prototype.clone; 358 | 359 | return Heap; 360 | 361 | })(); 362 | 363 | (function(root, factory) { 364 | if (typeof define === 'function' && define.amd) { 365 | return define([], factory); 366 | } else if (typeof exports === 'object') { 367 | return module.exports = factory(); 368 | } else { 369 | return root.Heap = factory(); 370 | } 371 | })(this, function() { 372 | return Heap; 373 | }); 374 | 375 | }).call(this); 376 | -------------------------------------------------------------------------------- /frog.json: -------------------------------------------------------------------------------- 1 | { 2 | "dataDirs" : { 3 | "FROG" : { 4 | "path" : "./js", 5 | "hidden" : true 6 | } 7 | }, 8 | 9 | "init" : [ 10 | "FROG/lib/laplaceSolver.js", 11 | "FROG/lib/LSRegistration.js", 12 | "FROG/lib/TextSpriteHelper.js", 13 | "FROG/lib/FROG.js" 14 | ], 15 | 16 | "actions": { 17 | "transformPoints" : { 18 | "parameters" : [ 19 | { 20 | "name" : "inputPoints", 21 | "type" : "file", 22 | "required" : true 23 | }, 24 | { 25 | "name" : "transform", 26 | "type" : "file", 27 | "required" : true 28 | } 29 | ], 30 | "dependencies" : [ 31 | "tools/transformIO.py" 32 | ], 33 | "engine" : "python", 34 | "executable" : "tools/transformPoints.py" 35 | }, 36 | "dummyVolumeGenerator" : { 37 | "parameters" : [ 38 | { 39 | "name" : "boundingBox", 40 | "type" : "file", 41 | "required" : true 42 | }, 43 | { 44 | "name" : "spacing", 45 | "type" : "float", 46 | "required" : true 47 | } 48 | ], 49 | "executable" : "bin/DummyVolumeGenerator" 50 | }, 51 | "labelContour" : { 52 | "parameters" : [ 53 | { 54 | "name" : "inputVolume", 55 | "type" : "file", 56 | "required" : true 57 | } 58 | ], 59 | "executable" : "tools/labelContour.py", 60 | "engine" : "python" 61 | }, 62 | "checkerBoard" : { 63 | "parameters" : [ 64 | { 65 | "name" : "inputVolume", 66 | "type" : "file", 67 | "required" : true 68 | } 69 | ], 70 | "executable" : "tools/checkerBoard.py", 71 | "engine" : "python" 72 | }, 73 | "createGrid" : { 74 | 75 | "parameters" : [ 76 | { 77 | "name" : "inputVolume", 78 | "type" : "file", 79 | "required" : true 80 | } 81 | ], 82 | "executable" : "tools/grid.py", 83 | "engine" : "python" 84 | }, 85 | "frog" : { 86 | "parameters" : [ 87 | { 88 | "name" : "inputPairs", 89 | "required" : true, 90 | "type" : "file" 91 | }, 92 | { 93 | "prefix" : "-si ", 94 | "name" : "statsInterval", 95 | "type" : "int" 96 | }, 97 | { 98 | "prefix" : "-di ", 99 | "name" : "deformableIterations", 100 | "type" : "int" 101 | }, 102 | { 103 | "prefix" : "-dl ", 104 | "name" : "deformableLevels", 105 | "type" : "int" 106 | }, 107 | { 108 | "prefix" : "-da ", 109 | "name" : "deformableAlpha", 110 | "type" : "float" 111 | }, 112 | { 113 | "prefix" : "-g ", 114 | "name" : "initialGridSize", 115 | "type" : "float" 116 | }, 117 | { 118 | "prefix" : "-l ", 119 | "name" : "landmarks", 120 | "type" : "directory" 121 | }, 122 | { 123 | "prefix" : "-li ", 124 | "name" : "linearIterations", 125 | "type" : "int" 126 | }, 127 | { 128 | "prefix" : "-dstats ", 129 | "name" : "displayStats", 130 | "type" : "int" 131 | }, 132 | { 133 | "prefix" : "-dlinear ", 134 | "name" : "displayLinear", 135 | "type" : "int" 136 | }, 137 | { 138 | "prefix" : "-t ", 139 | "name" : "inlierThreshold", 140 | "type" : "float" 141 | } 142 | ], 143 | "executable" : "bin/frog" 144 | }, 145 | "match" : { 146 | "parameters" : [ 147 | { 148 | "name" : "inputFile", 149 | "required" : true, 150 | "type" : "file" 151 | }, 152 | { 153 | "name" : "numberOfVolumes", 154 | "type" : "int", 155 | "prefix" : "-n " 156 | }, 157 | { 158 | "name" : "numberOfThreads", 159 | "type" : "int", 160 | "prefix" : "-nt " 161 | }, 162 | { 163 | 164 | "name" : "numberOfPointsPerImage", 165 | "type" : "int", 166 | "prefix" : "-np " 167 | 168 | }, 169 | { 170 | 171 | "name" : "responseThreshold", 172 | "type" : "float", 173 | "prefix" : "-sp " 174 | 175 | }, 176 | { 177 | 178 | "name" : "distance", 179 | "type" : "float", 180 | "prefix" : "-d " 181 | 182 | }, 183 | { 184 | 185 | "name" : "all", 186 | "type" : "int", 187 | "prefix" : "-all " 188 | 189 | }, 190 | { 191 | 192 | "name" : "distanceToSecond", 193 | "type" : "float", 194 | "prefix" : "-d2 " 195 | 196 | }, 197 | { 198 | 199 | "name" : "zMin", 200 | "type" : "float", 201 | "prefix" : "-zmin " 202 | 203 | }, 204 | { 205 | 206 | "name" : "zMax", 207 | "type" : "float", 208 | "prefix" : "-zmax " 209 | 210 | }, 211 | { 212 | "text" : "-o pairs.bin" 213 | } 214 | 215 | ], 216 | "output" : { 217 | "pairs" : "pairs.bin" 218 | }, 219 | 220 | "executable" : "bin/match" 221 | 222 | }, 223 | "meshTransform" : { 224 | "alias" : "MeshTransform", 225 | "parameters" : [ 226 | { 227 | "name" : "inputMesh", 228 | "alias" : "source", 229 | "type" : "file", 230 | "required" : true 231 | }, 232 | { 233 | "name" : "transform", 234 | "prefix" : "-t ", 235 | "type" : "file" 236 | }, 237 | { 238 | "name" : "inverseTransform", 239 | "prefix" : "-ti ", 240 | "type" : "file" 241 | }, 242 | { 243 | "name" : "outputFile", 244 | "prefix" : "-o ", 245 | "type" : "string" 246 | } 247 | ], 248 | "executable" : "bin/MeshTransform" 249 | }, 250 | "trimTransform" : { 251 | 252 | "parameters" : [ 253 | { 254 | "name" : "transform", 255 | "type" : "file", 256 | "required" : true 257 | }, 258 | { 259 | "name" : "numberOfLevels", 260 | "type" : "int" 261 | } 262 | ], 263 | "executable" : "tools/trimTransform.py", 264 | "engine" : "python" 265 | }, 266 | "VolumeTransform" : { 267 | 268 | "parameters" : [ 269 | { 270 | "name" : "source", 271 | "type" : "file", 272 | "required" : true 273 | }, 274 | { 275 | "name" : "reference", 276 | "type" : "file", 277 | "required" : true 278 | }, 279 | { 280 | "name" : "transform", 281 | "prefix" : "-t ", 282 | "type" : "file" 283 | }, 284 | { 285 | "name" : "outputFileName", 286 | "prefix" : "-o ", 287 | "type" : "string" 288 | }, 289 | { 290 | "name" : "inverseTransform", 291 | "prefix" : "-ti ", 292 | "type" : "file" 293 | }, 294 | { 295 | "name" : "interpolation", 296 | "prefix" : "-i ", 297 | "type" : "int" 298 | }, 299 | { 300 | "name" : "invertX", 301 | "prefix" : "-rx ", 302 | "type" : "int" 303 | }, 304 | { 305 | "name" : "backGroundLevel", 306 | "prefix" : "-b ", 307 | "type" : "float" 308 | } 309 | ], 310 | "executable" : "bin/VolumeTransform" 311 | }, 312 | "SURF3D": { 313 | "parameters": [ 314 | { 315 | "name": "inputVolume", 316 | "aliases" : [ "input_volume" ], 317 | "type": "file", 318 | "required": true 319 | }, 320 | { 321 | "prefix": "-type ", 322 | "name" : "type", 323 | "type": "int", 324 | "description" : "desccriptor type : 0 for SURF, 1 for raw sub volumes" 325 | }, 326 | { 327 | "prefix": "-n ", 328 | "name" : "numberOfPoints", 329 | "type": "int", 330 | "description" : "maxium number of points to extract" 331 | }, 332 | { 333 | "prefix": "-s ", 334 | "name" : "spacing", 335 | "aliases": [ "target_spacing" ], 336 | "type": "float" 337 | }, 338 | { 339 | "prefix": "-d ", 340 | "name": "maximum_dimension", 341 | "type": "int" 342 | }, 343 | { 344 | "prefix": "-t ", 345 | "name": "threshold", 346 | "type": "float" 347 | }, 348 | { 349 | "prefix": "-m ", 350 | "name": "mask", 351 | "type": "file" 352 | }, 353 | { 354 | "prefix": "-json ", 355 | "name": "writeJSON", 356 | "type": "int" 357 | }, 358 | { 359 | "prefix": "-bin ", 360 | "name": "writeBIN", 361 | "type": "int" 362 | }, 363 | { 364 | "prefix": "-csv ", 365 | "name": "writeCSV", 366 | "type": "int" 367 | }, 368 | { 369 | "prefix": "-csvgz ", 370 | "name": "writeCSVGZ", 371 | "type": "int" 372 | }, 373 | { 374 | "prefix": "-r ", 375 | "name": "radius", 376 | "type": "int" 377 | }, 378 | { 379 | 380 | "name" : "outputFileName", 381 | "type" : "string", 382 | "prefix" : "-o " 383 | 384 | } 385 | ], 386 | "output" : { 387 | "points" : "points.csv.gz" 388 | }, 389 | "executable": "bin/surf3d" 390 | }, 391 | "MATCH3D": { 392 | "parameters": [ 393 | { 394 | "name": "input_volume1", 395 | "type": "file", 396 | "required": true 397 | }, 398 | { 399 | "name": "input_volume2", 400 | "type": "file", 401 | "required": true 402 | }, 403 | { 404 | "prefix": "-Rd ", 405 | "name": "RansacDist", 406 | "type": "float" 407 | }, 408 | { 409 | "prefix": "-Ri ", 410 | "name": "RansacMinInliers", 411 | "type": "int" 412 | }, 413 | { 414 | "prefix": "-Md ", 415 | "name": "MatchingDist", 416 | "type": "float" 417 | }, 418 | { 419 | "prefix": "-Md2 ", 420 | "name": "MatchingDist2Second", 421 | "type": "float" 422 | }, 423 | { 424 | "prefix": "-Ms ", 425 | "name": "MatchingScale", 426 | "type": "float" 427 | }, 428 | { 429 | "prefix": "-b ", 430 | "name": "ComputeBoundingBoxes", 431 | "type": "int" 432 | }, 433 | { 434 | "prefix": "-bb ", 435 | "name": "BoundingBox", 436 | "type": "string" 437 | }, 438 | { 439 | "prefix": "-i ", 440 | "name": "writeInliers", 441 | "type": "flag" 442 | } 443 | ], 444 | "executable": "bin/match3d" 445 | }, 446 | "averageVolumes": { 447 | "parameters": [ 448 | { 449 | "name": "inputVolumes", 450 | "type": "fileArray", 451 | "required": true 452 | } 453 | ], 454 | "executable": "bin/AverageVolumes" 455 | }, 456 | "LSRegistration" : { 457 | "parameters": [ 458 | { 459 | "name": "files", 460 | "type": "file", 461 | "required": true 462 | } 463 | ], 464 | "executable": "js/bin/LSRegistration.js", 465 | "dependencies" : [ 466 | "SURF3D", 467 | "MATCH3D", 468 | "js/lib/LSRegistration.js", 469 | "js/lib/laplaceSolver.js", 470 | "js/lib/TextSpriteHelper.js", 471 | "js/lib/FROG.js" 472 | ] 473 | } 474 | 475 | } 476 | } 477 | -------------------------------------------------------------------------------- /js/lib/laplaceSolver.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var Heap = require ('heap'); 4 | 5 | 6 | var laplaceSolver = { 7 | worker : function () { 8 | const heapScript = 'FROG/lib/heap.js'; 9 | desk.FileSystem.getFileURL( heapScript ); // statification hack 10 | return operative( laplaceSolver, [ heapScript] ); 11 | }, 12 | 13 | laplacianValues : null, 14 | values : null, 15 | newValues : null, 16 | laplacianWeights : null, 17 | 18 | solve : function() { 19 | var matches = this.matches; 20 | var i, j, i3, j3, weight, pos; 21 | 22 | if(!this.isGraphConnex(true)) { 23 | console.error("Graph non connexe"); 24 | } 25 | 26 | if (!this.laplacianValues) { 27 | // init arrays 28 | this.laplacianValues = new Float64Array(matches.length * 4); 29 | this.values = new Float64Array(matches.length * 4); 30 | this.newValues = new Float64Array(matches.length * 4); 31 | this.laplacianWeights = new Uint16Array(matches.length); 32 | } 33 | 34 | for (i = 0 ; i < this.values.length ; i++) { 35 | this.values[i] = 0; 36 | } 37 | 38 | var laplacianValues = this.laplacianValues; 39 | var values = this.values; 40 | var newValues = this.newValues; 41 | var laplacianWeights = this.laplacianWeights; 42 | 43 | // reset arrays 44 | for (i = 0 ; i < laplacianValues.length ; i++) { 45 | laplacianValues[i] = 0; 46 | } 47 | 48 | for (i = 0 ; i < laplacianWeights.length ; i++) { 49 | laplacianWeights[i] = 0; 50 | } 51 | 52 | // compute laplacian 53 | for (i = 0 ; i < matches.length ; i++) { 54 | i3 = 4 * i; 55 | for (j = i + 1 ; j < matches.length ; j++) { 56 | if (matches[i][j].fail === true) { 57 | continue; 58 | } 59 | j3 = 4 * j; 60 | var translation = matches[i][j].translation; 61 | laplacianValues[i3] += translation[0]; 62 | laplacianValues[i3 + 1] += translation[1]; 63 | laplacianValues[i3 + 2] += translation[2]; 64 | laplacianValues[i3 + 3] += Math.log(matches[i][j].scale); 65 | laplacianValues[j3] -= translation[0]; 66 | laplacianValues[j3 + 1] -= translation[1]; 67 | laplacianValues[j3 + 2] -= translation[2]; 68 | laplacianValues[j3 + 3] -= Math.log(matches[i][j].scale); 69 | laplacianWeights[i] ++; 70 | laplacianWeights[j] ++; 71 | } 72 | } 73 | 74 | // normalize laplacian 75 | for (i = 0 ; i < matches.length ; i++) { 76 | weight = laplacianWeights[i]; 77 | laplacianValues[4 * i] /= weight; 78 | laplacianValues[4 * i + 1] /= weight; 79 | laplacianValues[4 * i + 2] /= weight; 80 | laplacianValues[4 * i + 3] /= weight; 81 | } 82 | 83 | for (var loop = 0; loop < 10 * matches.length ; loop++) { 84 | // reset positions 85 | for (i = 0 ; i < 4 * matches.length ; i++) { 86 | newValues[i] = 0; 87 | } 88 | 89 | for (i = 0 ; i < matches.length ; i++) { 90 | i3 = 4 * i; 91 | for (j = i + 1 ; j < matches.length ; j++) { 92 | if (matches[i][j].fail === true) { 93 | continue; 94 | } 95 | 96 | j3 = 4 * j; 97 | newValues[i3] += values[j3]; 98 | newValues[i3 + 1] += values[j3 + 1]; 99 | newValues[i3 + 2] += values[j3 + 2]; 100 | newValues[i3 + 3] += values[j3 + 3]; 101 | 102 | newValues[j3] += values[i3]; 103 | newValues[j3 + 1] += values[i3 + 1]; 104 | newValues[j3 + 2] += values[i3 + 2]; 105 | newValues[j3 + 3] += values[i3 + 3]; 106 | } 107 | } 108 | 109 | for (i = 0 ; i < matches.length ; i++) { 110 | i3 = 4 * i; 111 | weight = laplacianWeights[i]; 112 | newValues[i3] = laplacianValues[i3] + newValues[i3] / weight; 113 | newValues[i3 + 1] = laplacianValues[i3 + 1] + newValues[i3 + 1] / weight; 114 | newValues[i3 + 2] = laplacianValues[i3 + 2] + newValues[i3 + 2] / weight; 115 | newValues[i3 + 3] = laplacianValues[i3 + 3] + newValues[i3 + 3] / weight; 116 | } 117 | // swap arrays 118 | var temp = newValues; 119 | newValues = values; 120 | values = temp; 121 | } 122 | this.values = values; 123 | this.newValues = newValues; 124 | return; 125 | }, 126 | 127 | // members for connexity trests 128 | tags : null, 129 | stack : null, 130 | 131 | isGraphConnex : function ( debug, fix ) { 132 | 133 | const matches = this.matches; 134 | 135 | if ( !this.tags ) { 136 | 137 | this.tags = new Uint8Array( matches.length ); 138 | this.stack = new Uint16Array( matches.length * matches.length ); 139 | 140 | } 141 | 142 | const tags = this.tags; 143 | const stack = this.stack; 144 | for ( let i = 0 ; i < matches.length ; i++) tags[ i ] = 0; 145 | let stackPointer = 1; 146 | stack[ 0 ] = 0; 147 | 148 | while (stackPointer--) { 149 | 150 | let s = stack[ stackPointer ]; 151 | tags[ s ] = 1; 152 | 153 | for ( let t = 0 ; t < matches.length ; t++) { 154 | 155 | if ((t !=s ) && (matches[Math.min(t, s)][Math.max(t, s)] === undefined)) 156 | console.log("missing couple ", t, s); 157 | 158 | if ( (t !== s && tags[t] === 0) && 159 | ( matches[Math.min(t, s)][Math.max(t, s)].fail !== true) ) 160 | stack[stackPointer++] = t; 161 | 162 | } 163 | 164 | } 165 | 166 | let connex = true; 167 | 168 | for ( let i = 0; i < tags.length; i++) { 169 | 170 | if ( tags[ i ] === 0 ) { 171 | 172 | if (debug) { 173 | 174 | console.log("volume " + i + " is disconnected "); 175 | console.log(this.volumes[i]); 176 | 177 | } 178 | 179 | connex = false; 180 | 181 | } 182 | 183 | } 184 | 185 | if ( !connex && fix ) { 186 | 187 | console.log( "fix disconnected volumes:" ); 188 | const disconnected = []; 189 | let firstConnected = -1; 190 | 191 | for ( let i = 0; i < tags.length; i++) { 192 | 193 | if ( tags[ i ] === 0 ) disconnected.push( i ); 194 | else if ( firstConnected < 0 ) firstConnected = i; 195 | 196 | } 197 | 198 | console.log( disconnected ); 199 | console.log( "First connected volume : " + firstConnected ); 200 | 201 | for ( let i of disconnected ) { 202 | 203 | matches[ Math.min( firstConnected, i)][Math.max(firstConnected, i) ] = 204 | { 205 | fail : false, 206 | scale : 1, 207 | translation : [ 0, 0, 0 ] 208 | }; 209 | 210 | } 211 | 212 | } 213 | 214 | return connex; 215 | 216 | }, 217 | 218 | // members for edge removal 219 | heap : null, 220 | 221 | removeEdgeBatch : function () { 222 | 223 | if ( this.numberOfEdges === 0 ) this.getNumberOfEdges(); 224 | const matches = this.matches; 225 | 226 | if ( !this.heap ) { 227 | 228 | this.heap = new Heap(function(a, b) { 229 | if ( a.note !== b.note ) return a.note - b.note; 230 | if ( a.i !== b.i ) return a.i - b.i; 231 | return a.j - b.j; 232 | }); 233 | 234 | for ( let i = 0 ; i < matches.length ; i++) { 235 | for ( let j = i + 1 ; j < matches.length ; j++ ) { 236 | if (matches[i][j].fail === true) { 237 | continue; 238 | } 239 | 240 | this.heap.push( { note : matches[ i ][ j ].inliers, i, j } ); 241 | } 242 | } 243 | } 244 | 245 | const heap = this.heap; 246 | var toRemove = []; 247 | 248 | for ( let i = 0; i < this.edgeRemovalRatio * heap.size(); i++) { 249 | 250 | if ( heap.empty() ) break; 251 | toRemove.push(heap.pop()); 252 | 253 | } 254 | 255 | function removeEdge( edge ) { 256 | matches[ edge.i ][ edge.j ].fail = true; 257 | } 258 | 259 | function addEdge( edge ) { 260 | const e = matches[edge.i][edge.j]; 261 | e.fail = e.f; 262 | } 263 | 264 | const stack = [ toRemove ]; 265 | 266 | while ( stack.length ) { 267 | 268 | const edges = stack.pop(); 269 | for( let edge of edges ) removeEdge( edge ); 270 | 271 | if ( !this.isGraphConnex() ) { 272 | 273 | for( let edge of edges ) addEdge( edge ); 274 | if ( edges.length === 1 ) continue; 275 | const half = Math.round( edges.length / 2 ); 276 | stack.push( edges.slice( half, edges.length ) ); 277 | stack.push( edges.slice( 0, half ) ); 278 | 279 | } 280 | 281 | } 282 | 283 | }, 284 | 285 | numberOfEdges : 0, 286 | valence : null, 287 | 288 | getNumberOfEdges : function () { 289 | 290 | const matches = this.matches; 291 | if ( !this.valence ) this.valence = new Uint32Array(matches.length); 292 | this.numberOfEdges = 0; 293 | for( let i = 0 ; i < matches.length; i++) this.valence[ i ] = 0; 294 | 295 | for( let i = 0 ; i < matches.length ; i++) { 296 | for (let j = i + 1 ; j < matches.length ; j++) { 297 | if ( !matches[i][j].fail ) { 298 | this.numberOfEdges++; 299 | this.valence[ i ]++; 300 | this.valence[ j ]++; 301 | } 302 | } 303 | } 304 | }, 305 | 306 | iterate : function ( removeEdges, callback ) { 307 | /* if (!this.isGraphConnex(true)) { 308 | callback({error : "graph non connex"}); 309 | return; 310 | }*/ 311 | 312 | this.isGraphConnex(true, true ); 313 | if (removeEdges) this.removeEdgeBatch(); 314 | this.solve(); 315 | // set first coordinates to 0; 316 | var anchor = [this.values[0], this.values[1], this.values[2], this.values[3]]; 317 | 318 | // normalize laplacian 319 | for (let i = 0 ; i < this.matches.length ; i++) { 320 | for (let j = 0; j < 4; j++) { 321 | this.values[4 * i + j] -= anchor[j]; 322 | } 323 | } 324 | 325 | this.getNumberOfEdges(); 326 | 327 | callback({positions : this.values, valences : this.valence, numberOfEdges : this.numberOfEdges}); 328 | }, 329 | 330 | setInput : function ( matches, edgeRemovalRatio, volumes, callback ) { 331 | this.matches = matches; 332 | this.laplacianValues = 0; 333 | this.edgeRemovalRatio = edgeRemovalRatio; 334 | this.volumes = volumes; 335 | for ( let line of this.matches ) { 336 | for ( let t of line ) { 337 | if (!t) continue; 338 | t.f = t.fail; 339 | } 340 | } 341 | if (typeof callback === "function") callback(); 342 | }, 343 | 344 | resetEdges : function ( callback ) { 345 | 346 | this.numberOfEdges = this.heap = 0; 347 | 348 | for ( let line of this.matches ) { 349 | for ( let t of line ) { 350 | if (!t) continue; 351 | t.fail = t.f; 352 | } 353 | } 354 | 355 | callback(); 356 | 357 | } 358 | 359 | }; 360 | 361 | if ( typeof define === 'function' && define.amd ) { 362 | 363 | define( 'laplaceSolver', laplaceSolver ); 364 | 365 | } else if ( 'undefined' !== typeof exports && 'undefined' !== typeof module ) { 366 | 367 | module.exports = laplaceSolver; 368 | 369 | } else { 370 | self.laplaceSolver = laplaceSolver; 371 | } 372 | -------------------------------------------------------------------------------- /FROG.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import nibabel as nib 3 | import os 4 | from os import listdir 5 | from os.path import abspath, dirname, isdir, join, normpath 6 | import tempfile 7 | import time 8 | 9 | startTime = time.time() 10 | cwd = os.getcwd() 11 | volumesList = "volumes.txt" 12 | pairsFile = "pairs.bin" 13 | frogPath = normpath( join( dirname( __file__ ), "bin" ) ) 14 | 15 | parser = argparse.ArgumentParser( description = 'Register a group of volumes', formatter_class=argparse.ArgumentDefaultsHelpFormatter ) 16 | regParse = parser.add_argument_group('General options') 17 | regParse.add_argument( 'input', help = 'input list of files or directory' ) 18 | regParse.add_argument( '-se', '--skip-existing', dest="skipExisting", help = 'Do not recompute files if they already exist', action = "store_true" ) 19 | regParse.add_argument( '-limit', dest = 'limit', type = int, help = 'limit number of input files' ) 20 | regParse.add_argument( '-o', dest = 'outputDirectory', help = 'outputDirectory' ) 21 | regParse.add_argument( '-j', dest = 'useSingleJSONTransformFile', help = 'use a single JSON file to store each transform', action="store_true" ) 22 | frogParse = parser.add_argument_group('Registration options') 23 | frogParse.add_argument( '-dl', dest = 'deformableLevels', help = 'number of deformable levels', type = int ) 24 | frogParse.add_argument( '-di', dest = 'deformableIterations', help = 'number of deformable iterations per level', type = int ) 25 | frogParse.add_argument( '-g', dest = 'gridSpacing', type = float, help = 'initial grid spacing' ) 26 | frogParse.add_argument( '-lanchor', dest = 'linearInitializationAnchor', help = 'Linear initialization Anchor', type = float, nargs = 3, default = [0.5,0.5,0.5] ) 27 | frogParse.add_argument( '-li', dest = 'linearIterations', help = 'number of linear iterations', type = int ) 28 | frogParse.add_argument( '-ri', dest = 'RANSACIterations', help = 'number of RANSAC iterations', type = int ) 29 | frogParse.add_argument( '-l', dest = 'landmarks', help = 'path to landmarks file' ) 30 | frogParse.add_argument( '-il', dest = 'invertLandmarks', help = 'revert landmarks coordinates', type = int, default = 1 ) 31 | frogParse.add_argument( '-wp', dest = 'writePairs', help = 'write list of pairs to file', action="store_true" ) 32 | matchParser = parser.add_argument_group('Match options') 33 | matchParser.add_argument( '-md', dest = 'matchDistance', type = float, default = 10000000000, help = 'maximum descriptor distance' ) 34 | matchParser.add_argument( '-d2', dest = 'ratio', type = float, default = 1, help = 'maximum second match distance ratio' ) 35 | 36 | SURFParser = parser.add_argument_group('SURF3D options') 37 | SURFParser.add_argument( '-m', dest = 'masks', help = 'path to masks' ) 38 | SURFParser.add_argument( '-cmin', type = float, help = 'min clamp image values' ) 39 | SURFParser.add_argument( '-cmax', type = float, help = 'max clamp image values' ) 40 | SURFParser.add_argument( '-p', dest = 'numberOfPoints', type = int, help = 'number of keypoints to extract', default = 20000 ) 41 | SURFParser.add_argument( '-pad', dest = 'padding', type = float, help = 'mirror padding length' ) 42 | SURFParser.add_argument( '-s', dest = 'spacing', type = float, help = 'spacing', default = 0.75 ) 43 | SURFParser.add_argument( '-t', dest = 'threshold', type = float, help = 'detector threshold', default = 0 ) 44 | SURFParser.add_argument( '-ras', dest = 'flipToRAS', help = 'ensure RAS orientation for input images', action="store_true" ) 45 | SURFParser.add_argument( '-rast', dest = 'useTempd', help = 'use in-memory temporary directory when converting to RAS', action="store_true" ) 46 | averageParse = parser.add_argument_group('Average image computing') 47 | averageParse.add_argument( '-a', dest = 'imageSpacing', type = float, help = 'spacing for average image. Not computed if not specified') 48 | averageParse.add_argument( '-ao', dest = 'averageImageOnly', help = 'only compute average image, skip registration', action="store_true" ) 49 | averageParse.add_argument( '-ad', dest = 'averageImageDirectory', help = 'Average image subdirectory' ) 50 | args = parser.parse_args() 51 | 52 | def separate(): 53 | print( "******************************************************" ) 54 | 55 | def execute( cmd ): 56 | start_time = time.time() 57 | separate() 58 | print( "Executing : " + cmd ) 59 | code = os.system( cmd ) 60 | print( "Command : " + cmd ) 61 | print( "Executed in " + str( round( time.time() - start_time ) ) + "s" ) 62 | print( "Exit code : " + str( code ) ) 63 | if code : raise( OSError( code ) ) 64 | 65 | def flipAndSaveToRAS( filename ): 66 | 67 | #Recover the image object 68 | imageObj = nib.load( filename ) 69 | 70 | #Get the current orientation 71 | CurrentOrientation = nib.aff2axcodes(imageObj.affine) 72 | print("The current orientation is : ", CurrentOrientation) 73 | 74 | #Check if the current orientation is already RAS+ 75 | if CurrentOrientation == ('R', 'A', 'S') : 76 | 77 | print("Image already recorded into the RAS+ orientation, nothing to do") 78 | return filename 79 | 80 | else : 81 | #Flip the image to RAS 82 | flippedImage = nib.as_closest_canonical(imageObj) 83 | 84 | ##Check the new orientation 85 | NewOrientation = nib.aff2axcodes(flippedImage.affine) 86 | img_data = flippedImage.get_fdata() 87 | img_conv = nib.Nifti1Image(img_data.astype(flippedImage.header.get_data_dtype()), flippedImage.affine, flippedImage.header) 88 | 89 | #Set Qcode to 1 that the Qform matrix can be used into the further processing 90 | img_conv.header['qform_code'] = 1 91 | 92 | #Save the flipped image 93 | nib.save(img_conv, RASFile ) 94 | 95 | print("The new orientation is now : ", NewOrientation) 96 | return RASFile 97 | 98 | def computeAverageImage( images ) : 99 | if not args.imageSpacing : return 100 | separate() 101 | print( "Compute average image" ) 102 | startTime = time.time() 103 | dummyBin = join( frogPath, "DummyVolumeGenerator" ) 104 | execute( " ".join( [ dummyBin, "bbox.json", str( args.imageSpacing ) ] ) ) 105 | dummyFile = "dummy.mhd"; 106 | if args.averageImageDirectory: 107 | if not os.path.exists( args.averageImageDirectory ): 108 | os.mkdir( args.averageImageDirectory ) 109 | for file in [ "dummy.mhd", "dummy.zraw" ]: 110 | os.rename( file, join( args.averageImageDirectory, file ) ) 111 | dummyFile = join( args.averageImageDirectory, dummyFile ) 112 | transformedImages = [] 113 | 114 | for i, image in enumerate( images ) : 115 | separate() 116 | transformBin = join( frogPath, "VolumeTransform" ) 117 | transformedImage = "transformed" + str( i ) + ".nii.gz" 118 | if args.averageImageDirectory: 119 | transformedImage = join( args.averageImageDirectory, transformedImage ) 120 | if args.flipToRAS: image = flipAndSaveToRAS( image ) 121 | execute( " ".join( [ transformBin, image, dummyFile, "-t transforms/" + str( i ) + ".json -o " + transformedImage ] ) ) 122 | transformedImages.append( transformedImage ) 123 | 124 | averageBin = join( frogPath, "AverageVolumes" ) 125 | execute( " ".join( [ averageBin, " ".join( transformedImages ) ] ) ) 126 | print( "Average image computed in " + str( round( time.time() - startTime ) ) + "s" ) 127 | if args.averageImageDirectory: 128 | for file in [ "average.nii.gz", "stdev.nii.gz" ]: 129 | os.rename( file, join( args.averageImageDirectory, file ) ) 130 | 131 | def getFileList( inputPath ) : 132 | 133 | files = [] 134 | 135 | if isdir( inputPath ) : 136 | for f in sorted( listdir( inputPath ) ) : 137 | for ext in [ ".nii.gz", ".mhd", ".csv.gz" ] : 138 | if f.endswith( ext ) : files.append( abspath( join( inputPath, f ) ) ) 139 | else: 140 | f = open( inputPath, mode = 'r' ) 141 | for element in f.read().split( "\n" ) : 142 | if element.startswith( "#" ) : continue 143 | for ext in [ ".nii.gz", ".mhd", ".csv.gz" ] : 144 | if element.endswith( ext ) : files.append( join( dirname( inputPath) , element ) ) 145 | 146 | f.close() 147 | 148 | return files 149 | 150 | 151 | files = getFileList( args.input ) 152 | if ( args.limit ) : files = files[ :args.limit ] 153 | print( "There are " + str( len( files ) ) + " files to register : " ) 154 | for f in files : print( f ) 155 | 156 | if args.outputDirectory : 157 | if not os.path.exists( args.outputDirectory ): 158 | print( "Output directory does not exist, create it : " + args.outputDirectory ) 159 | os.mkdir( args.outputDirectory ) 160 | os.chdir( args.outputDirectory ) 161 | 162 | tempD = 0 163 | RASFile = 0 164 | if args.useTempd : 165 | tempD = tempfile.TemporaryDirectory(); 166 | RASFile = join( tempD.name, "RAS.nii" ) 167 | else: 168 | RASFile = join( os.getcwd(), "RAS.nii.gz" ) 169 | 170 | if args.averageImageOnly: 171 | computeAverageImage( files ) 172 | exit( 0 ) 173 | 174 | maskFiles = [] 175 | if args.masks: maskFiles = getFileList( args.masks ) 176 | 177 | 178 | #### compute input volume keypoints if needed 179 | keypointFiles = [] 180 | 181 | for index, f in enumerate( files ): 182 | if f.endswith( ".csv.gz" ) : 183 | keypointFiles.append( f ) 184 | continue 185 | 186 | separate() 187 | pointsFile = "points" + str( len ( keypointFiles ) ) 188 | fullPointsFile = join( os.getcwd(), pointsFile + ".csv.gz") 189 | keypointFiles.append( fullPointsFile ) 190 | if ( args.skipExisting and os.path.exists( fullPointsFile ) ): 191 | print( "Points file ", fullPointsFile, "already exists, skipping" ) 192 | continue 193 | print ( "Extracting points from " + f ) 194 | if args.flipToRAS: f = flipAndSaveToRAS( f ) 195 | surfBin = join( frogPath, "surf3d" ) 196 | surfArgs = [ surfBin, f, "-s", str( args.spacing ), "-t", str( args.threshold ), "-n", str( args.numberOfPoints ), "-o", pointsFile ] 197 | if len( maskFiles ) : surfArgs.extend( [ "-m", maskFiles[ index ] ] ) 198 | if args.cmin != None : surfArgs.extend( [ "-cmin", str( args.cmin ) ] ) 199 | if args.cmax != None : surfArgs.extend( [ "-cmax", str( args.cmax ) ] ) 200 | if args.padding != None : surfArgs.extend( [ "-pad", str( args.padding ) ] ) 201 | execute( " ".join( surfArgs ) ) 202 | 203 | separate() 204 | 205 | #### compute pairs 206 | if args.skipExisting and os.path.exists( pairsFile ): 207 | print( "Pairs file pairs.bin already exists, skipping computation" ) 208 | else: 209 | volumes = open( volumesList, "w" ) 210 | volumes.write( "\n".join( keypointFiles ) ) 211 | volumes.close() 212 | matchBin = join( frogPath, "match" ) 213 | matchCmd = " ".join( [ matchBin, volumesList, "-o", pairsFile, "-d", str( args.matchDistance ), "-np", str( args.numberOfPoints ), "-d2", str( args.ratio) ] ) 214 | execute( matchCmd ) 215 | 216 | #### register 217 | frogBin = join( frogPath, "frog" ) 218 | frogArgs = [ frogBin, "pairs.bin" ] 219 | if args.deformableLevels != None : frogArgs.extend( [ "-dl", str( args.deformableLevels ) ] ) 220 | if args.RANSACIterations : frogArgs.extend( [ "-ri", str( args.RANSACIterations ) ] ) 221 | if args.linearIterations : frogArgs.extend( [ "-li", str( args.linearIterations ) ] ) 222 | if args.deformableIterations : frogArgs.extend( [ "-di", str( args.deformableIterations ) ] ) 223 | if args.writePairs : frogArgs.append( "-wp 1" ) 224 | if args.landmarks : frogArgs.extend( [ "-l", args.landmarks ] ) 225 | if args.gridSpacing : frogArgs.extend( [ "-g", str( args.gridSpacing ) ] ) 226 | if args.invertLandmarks : frogArgs.extend( [ "-il", str( args.invertLandmarks ) ] ) 227 | if args.useSingleJSONTransformFile : frogArgs.extend( [ "-j" ] ) 228 | if args.linearInitializationAnchor : frogArgs.extend( [ "-lanchor", *map( lambda x : str( x ), args.linearInitializationAnchor ) ] ) 229 | execute( " ".join( frogArgs ) ) 230 | 231 | separate() 232 | print( "Registration done in " + str( round( time.time() - startTime ) ) + "s" ) 233 | 234 | computeAverageImage( files ) 235 | -------------------------------------------------------------------------------- /tools/transformIO.h: -------------------------------------------------------------------------------- 1 | #ifndef __readTransform__ 2 | #define __readTransform__ 3 | 4 | #include 5 | #include 6 | #include 7 | 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include "../vtkOpenSURF3D/picojson.h" 26 | 27 | using namespace picojson; 28 | 29 | vtkSmartPointer ReadPolyData(const std::string& fileName) { 30 | vtkSmartPointer polyData; 31 | std::string extension = fileName.substr(fileName.find_last_of(".") + 1); 32 | 33 | if (extension == "ply") { 34 | vtkSmartPointer reader = vtkSmartPointer::New(); 35 | reader->SetFileName(fileName.c_str()); 36 | reader->Update(); 37 | polyData = reader->GetOutput(); 38 | } else if (extension == "obj") { 39 | vtkSmartPointer reader = vtkSmartPointer::New(); 40 | reader->SetFileName(fileName.c_str()); 41 | reader->Update(); 42 | polyData = reader->GetOutput(); 43 | } else if (extension == "vtk") { 44 | vtkSmartPointer reader = vtkSmartPointer::New(); 45 | reader->SetFileName(fileName.c_str()); 46 | reader->Update(); 47 | polyData = reader->GetOutput(); 48 | } else if (extension == "stl") { 49 | vtkSmartPointer reader = vtkSmartPointer::New(); 50 | reader->SetFileName(fileName.c_str()); 51 | reader->Update(); 52 | polyData = reader->GetOutput(); 53 | } else { 54 | std::cerr << "Unsupported file format: " << extension << std::endl; 55 | return nullptr; 56 | } 57 | 58 | if (polyData->GetNumberOfPoints() == 0 || polyData->GetNumberOfCells() == 0) { 59 | std::cerr << "Failed to read polydata from file: " << fileName << std::endl; 60 | return nullptr; 61 | } 62 | 63 | return polyData; 64 | } 65 | 66 | bool WritePolyData( vtkSmartPointer polyData, const std::string& fileName) { 67 | std::string extension = fileName.substr(fileName.find_last_of(".") + 1); 68 | 69 | if (extension == "ply") { 70 | vtkSmartPointer writer = vtkSmartPointer::New(); 71 | writer->SetFileName(fileName.c_str()); 72 | writer->SetInputData(polyData); 73 | writer->Write(); 74 | } else if (extension == "stl") { 75 | vtkSmartPointer writer = vtkSmartPointer::New(); 76 | writer->SetFileName(fileName.c_str()); 77 | writer->SetInputData(polyData); 78 | writer->Write(); 79 | } else if (extension == "obj") { 80 | vtkSmartPointer writer = vtkSmartPointer::New(); 81 | writer->SetFileName(fileName.c_str()); 82 | writer->SetInputData(polyData); 83 | writer->Write(); 84 | } else if (extension == "vtk") { 85 | vtkSmartPointer writer = vtkSmartPointer::New(); 86 | writer->SetFileName(fileName.c_str()); 87 | writer->SetInputData(polyData); 88 | writer->Write(); 89 | } else if (extension == "vtp") { 90 | vtkSmartPointer writer = vtkSmartPointer::New(); 91 | writer->SetFileName(fileName.c_str()); 92 | writer->SetInputData(polyData); 93 | writer->Write(); 94 | } else { 95 | std::cerr << "Unsupported file format: " << extension << std::endl; 96 | return false; 97 | } 98 | 99 | return true; 100 | } 101 | 102 | void writeTFM( vtkGeneralTransform *generalTransform, const char *fileName ) { 103 | 104 | fstream fs; 105 | fs.open( fileName, fstream::out | fstream::trunc ); 106 | 107 | vtkAbstractTransform *trans = generalTransform->GetConcatenatedTransform( 0 ); 108 | vtkMatrixToLinearTransform *linear = ( vtkMatrixToLinearTransform *) trans; 109 | vtkMatrix4x4 *matrix = linear->GetInput(); 110 | 111 | for ( int i = 0; i < 3; i++ ) fs << matrix->GetElement( i, 3 ) << " "; 112 | fs << "-123456 "; 113 | 114 | for ( int i = 0; i < 3; i++ ) 115 | fs << matrix->GetElement( i, i ) << ( i < 2 ? " " : "" ); 116 | 117 | fs << endl; 118 | 119 | for ( int i = 1; i < generalTransform->GetNumberOfConcatenatedTransforms(); i++ ) { 120 | 121 | vtkBSplineTransform *transform = ( vtkBSplineTransform * ) 122 | generalTransform->GetConcatenatedTransform( i ); 123 | 124 | vtkImageData *imageData = transform->GetCoefficientData(); 125 | 126 | int dims[ 3 ]; 127 | double origin[ 3 ]; 128 | double spacing[ 3 ]; 129 | 130 | imageData->GetDimensions( dims ); 131 | imageData->GetSpacing( spacing ); 132 | imageData->GetOrigin( origin ); 133 | 134 | for ( int k = 0; k < 3; k ++ ) fs << dims[ k ] - 3 << " "; 135 | 136 | for ( int k = 0; k < 3; k++ ) { 137 | 138 | fs << origin[ k ] + spacing[ k ] << " "; 139 | fs << origin[ k ] + spacing[ k ] * ( dims[ k ] - 2 ); 140 | if ( k < 2 ) fs << " "; 141 | else fs << endl; 142 | 143 | } 144 | 145 | int count = 0; 146 | float *values = ( float * ) imageData->GetScalarPointer(); 147 | int nValues = dims[ 0 ] * dims[ 1 ] * dims[ 2 ]; 148 | 149 | for ( int j = 0; j < nValues; j++ ) { 150 | 151 | for ( int k = 0; k < 3; k++ ) fs << values[ count++ ] << " "; 152 | 153 | fs << "-123456 -123456" << endl; 154 | 155 | } 156 | 157 | } 158 | 159 | fs.close(); 160 | 161 | } 162 | 163 | void writeFrogJSON( vtkGeneralTransform *generalTransform, const char *fileName, bool compact = false ) { 164 | 165 | picojson::array transforms; 166 | int niiCounter = 0; 167 | 168 | for ( int i = 0; i < generalTransform->GetNumberOfConcatenatedTransforms(); i++ ) { 169 | 170 | auto transform = generalTransform->GetConcatenatedTransform( i ); 171 | auto name = transform->GetClassName(); 172 | picojson::object trans; 173 | trans[ "type" ] = picojson::value( name ); 174 | 175 | if ( strcmp( name, "vtkMatrixToLinearTransform" ) == 0 ) { 176 | 177 | picojson::array matrix; 178 | auto *m = ( ( vtkMatrixToLinearTransform *) transform )->GetInput(); 179 | 180 | for ( int i = 0; i < 4; i++ ) { 181 | 182 | for ( int j = 0; j < 4; j++ ) { 183 | 184 | matrix.push_back( picojson::value( m->GetElement( i, j ) ) ); 185 | 186 | } 187 | 188 | } 189 | 190 | trans[ "matrix" ] = picojson::value( matrix ); 191 | 192 | } else if ( strcmp( name, "vtkBSplineTransform" ) == 0 ) { 193 | 194 | vtkImageData *imageData = ( ( vtkBSplineTransform * ) transform )->GetCoefficientData(); 195 | 196 | if ( compact ) { 197 | 198 | std::string name( fileName ); 199 | name += "."; 200 | name += std::to_string( niiCounter++ ); 201 | name += ".nii.gz"; 202 | std::filesystem::path path( name ); 203 | trans[ "file" ] = picojson::value( path.filename() ); 204 | vtkNIFTIImageWriter *writer = vtkNIFTIImageWriter::New(); 205 | writer->SetInputData( imageData ); 206 | writer->SetFileName( name.c_str() ); 207 | writer->Write(); 208 | writer->Delete(); 209 | 210 | } else { 211 | 212 | int dims[ 3 ]; 213 | double ori[ 3 ]; 214 | double sp[ 3 ]; 215 | imageData->GetDimensions( dims ); 216 | imageData->GetSpacing( sp ); 217 | imageData->GetOrigin( ori ); 218 | picojson::array dimensions; 219 | picojson::array origin; 220 | picojson::array spacing; 221 | 222 | for ( int k = 0; k < 3; k ++ ) { 223 | 224 | dimensions.push_back( picojson::value( ( double ) dims[ k ] ) ); 225 | origin.push_back( picojson::value( ori[ k ] ) ); 226 | spacing.push_back( picojson::value( sp[ k ] ) ); 227 | 228 | } 229 | 230 | trans[ "dimensions" ] = picojson::value( dimensions ); 231 | trans[ "origin" ] = picojson::value( origin ); 232 | trans[ "spacing" ] = picojson::value( spacing ); 233 | int count = 0; 234 | float *values = ( float * ) imageData->GetScalarPointer(); 235 | int nValues = 3 * dims[ 0 ] * dims[ 1 ] * dims[ 2 ]; 236 | picojson::array coeffs; 237 | 238 | for ( int j = 0; j < nValues; j++ ) 239 | coeffs.push_back ( picojson::value( values[ j ] ) ); 240 | 241 | trans[ "coeffs" ] = picojson::value( coeffs ); 242 | 243 | } 244 | 245 | } 246 | 247 | transforms.push_back( picojson::value( trans ) ); 248 | 249 | } 250 | 251 | std::fstream fs; 252 | fs.open( fileName, fstream::out | fstream::trunc ); 253 | picojson::object root; 254 | root[ "transforms" ] = picojson::value( transforms ); 255 | fs << picojson::value( root ).serialize(); 256 | fs.close(); 257 | 258 | } 259 | 260 | vtkGeneralTransform *readTFM ( const char *fileName ) { 261 | 262 | std::ifstream file( fileName, std::ios::in ); 263 | 264 | if( file.fail() ) { 265 | 266 | std::cerr << "Cannot read transform file " << fileName << std::endl; 267 | exit( 0 ); 268 | 269 | } 270 | 271 | double translation[ 3 ]; 272 | file >> translation[ 0 ] >> translation[ 1 ] >> translation[ 2 ]; 273 | double magicNumber; 274 | file >> magicNumber; 275 | vtkGeneralTransform *transform = vtkGeneralTransform::New(); 276 | transform->PostMultiply(); 277 | vtkMatrixToLinearTransform *linear = vtkMatrixToLinearTransform::New(); 278 | vtkMatrix4x4 *matrix = vtkMatrix4x4::New(); 279 | matrix->Identity(); 280 | 281 | double scale[ 3 ] = { 1, 1, 1 }; 282 | 283 | if ( magicNumber == -123456 ) { 284 | 285 | // we have scales 286 | file >> scale[ 0 ] >> scale[ 1 ] >> scale[ 2 ]; 287 | 288 | } 289 | 290 | for ( int i = 0; i < 3; i++) { 291 | 292 | matrix->SetElement( i, i, scale[ i ] ); 293 | matrix->SetElement( i, 3, translation[ i ] ); 294 | 295 | } 296 | 297 | file.ignore ( std::numeric_limits< std::streamsize >::max(), '\n' ); 298 | linear->SetInput( matrix ); 299 | linear->Update(); 300 | transform->Concatenate( linear ); 301 | 302 | int level = 0; 303 | while ( 1 ) { 304 | if ( file.eof() ) 305 | break; 306 | bool end = false; 307 | int dims[ 3 ]; 308 | for (int i = 0; i < 3; i++) { 309 | float value; 310 | file >> value; 311 | if (file.eof()) { 312 | end = true; 313 | break; 314 | } 315 | dims[ i ] = value; 316 | } 317 | if ( end ) break; 318 | 319 | double bboxMin[ 3 ], bboxMax[ 3 ], spacing[ 3 ], origin[ 3 ]; 320 | 321 | for (int i = 0 ; i < 3 ; i++) { 322 | 323 | file >> bboxMin[ i ] >> bboxMax[ i ]; 324 | spacing[ i ] = (double) ( bboxMax[ i ] - bboxMin[ i ] ) / dims[ i ]; 325 | dims[ i ] += 3; 326 | origin[ i ] = bboxMin[ i ] - spacing[ i ]; 327 | 328 | } 329 | 330 | file.ignore ( std::numeric_limits::max(), '\n' ); 331 | 332 | vtkImageData *coeffs = vtkImageData::New(); 333 | coeffs->SetOrigin( origin ); 334 | coeffs->SetSpacing( spacing ); 335 | coeffs->SetDimensions( dims ); 336 | coeffs->AllocateScalars( VTK_FLOAT, 3); 337 | float *p = ( float *) coeffs->GetScalarPointer(); 338 | int nb = dims[ 0 ] * dims[ 1 ] * dims[ 2 ]; 339 | 340 | for ( int i = 0; i < nb ; i++ ) { 341 | 342 | file >> p[ 0 ] >> p[ 1 ] >> p[ 2 ]; 343 | file.ignore ( std::numeric_limits::max(), '\n' ); 344 | p += 3; 345 | 346 | } 347 | 348 | vtkBSplineTransform *trans = vtkBSplineTransform::New(); 349 | trans->SetCoefficientData( coeffs ); 350 | trans->Update(); 351 | transform->Concatenate( trans ); 352 | 353 | if ( file.eof() ) { 354 | 355 | std::cerr << "Error while loading Transform" << std::endl; 356 | exit( 1 ); 357 | 358 | } 359 | 360 | } 361 | 362 | file.close(); 363 | return transform; 364 | 365 | } 366 | 367 | vtkGeneralTransform *readFrogJSON ( picojson::object root, const char *file ) { 368 | 369 | vtkGeneralTransform *transform = vtkGeneralTransform::New(); 370 | transform->PostMultiply(); 371 | 372 | picojson::array transforms = root[ "transforms" ].get(); 373 | 374 | for ( auto it = transforms.begin(); it != transforms.end(); it++) { 375 | 376 | object trans = it->get(); 377 | std::string type = trans[ "type" ].get(); 378 | 379 | if ( type.compare ( "vtkMatrixToLinearTransform" ) == 0 ) { 380 | 381 | vtkMatrixToLinearTransform *linear = vtkMatrixToLinearTransform::New(); 382 | vtkMatrix4x4 *matrix = vtkMatrix4x4::New(); 383 | picojson::array m = trans[ "matrix" ].get< picojson::array >(); 384 | int index = 0; 385 | 386 | for ( int i = 0; i < 4; i++ ) { 387 | 388 | for ( int j = 0; j < 4; j++ ) { 389 | 390 | matrix->SetElement( i, j, m[ index++ ].get< double >() ); 391 | 392 | } 393 | 394 | } 395 | 396 | linear->SetInput( matrix ); 397 | linear->Update(); 398 | transform->Concatenate( linear ); 399 | 400 | } else if ( type.compare ( "vtkBSplineTransform" ) == 0 ) { 401 | 402 | vtkImageData *coefficients = vtkImageData::New(); 403 | 404 | auto nii = trans[ "file" ]; 405 | 406 | if ( nii.is() ) { 407 | 408 | double sp[ 3 ], ori[ 3 ]; 409 | int dims[ 3 ]; 410 | 411 | picojson::array dimensions = trans[ "dimensions" ].get< picojson::array >(); 412 | picojson::array origin = trans[ "origin" ].get< picojson::array >(); 413 | picojson::array spacing = trans[ "spacing" ].get< picojson::array >(); 414 | 415 | for (int i = 0 ; i < 3 ; i++) { 416 | 417 | dims[ i ] = dimensions[ i ].get< double >(); 418 | sp[ i ] = spacing[ i ].get< double >(); 419 | ori[ i ] = origin[ i ].get< double >(); 420 | 421 | } 422 | 423 | coefficients->SetOrigin( ori ); 424 | coefficients->SetSpacing( sp ); 425 | coefficients->SetDimensions( dims ); 426 | coefficients->AllocateScalars( VTK_FLOAT, 3); 427 | float *p = ( float *) coefficients->GetScalarPointer(); 428 | int nb = 3 * dims[ 0 ] * dims[ 1 ] * dims[ 2 ]; 429 | picojson::array coeffs = trans[ "coeffs" ].get< picojson::array >(); 430 | 431 | for ( int i = 0; i < nb ; i++ ) { 432 | 433 | p[ i ] = coeffs[ i ].get< double >(); 434 | 435 | } 436 | 437 | } else { 438 | 439 | std::filesystem::path niiFile = nii.get(); 440 | std::filesystem::path path( file ); 441 | vtkNIFTIImageReader *reader = vtkNIFTIImageReader::New(); 442 | reader->SetFileName( ( path.parent_path() / niiFile ).c_str() ); 443 | reader->Update(); 444 | coefficients->ShallowCopy( reader->GetOutput() ); 445 | reader->Delete(); 446 | double Origin[3]; 447 | vtkMatrix4x4 *qForm = reader->GetQFormMatrix(); 448 | if (!qForm) qForm = vtkMatrix4x4::New(); 449 | 450 | for( int i = 0; i < 3; i++) 451 | Origin[ i ] = qForm->GetElement( i, 3 ); 452 | 453 | coefficients->SetOrigin( Origin ); 454 | 455 | } 456 | 457 | vtkBSplineTransform *bspline = vtkBSplineTransform::New(); 458 | bspline->SetCoefficientData( coefficients ); 459 | bspline->Update(); 460 | transform->Concatenate( bspline ); 461 | 462 | } 463 | 464 | } 465 | 466 | return transform; 467 | 468 | } 469 | 470 | vtkGeneralTransform *readJSONfromString ( const char *str, const char *file ) { 471 | 472 | picojson::value v; 473 | std::string err; 474 | picojson::parse( v, str, str + strlen( str ) ); 475 | if ( !err.empty() ) std::cerr << err << std::endl; 476 | object trans = v.get(); 477 | 478 | if ( !( trans[ "transforms" ].is() ) ) return readFrogJSON( trans, file ); 479 | 480 | cout << str << endl; 481 | double scale = trans[ "scale" ].get(); 482 | cout << scale <(); 484 | double T[ 3 ]; 485 | 486 | for ( int i = 0; i < 3; i++ ) { 487 | 488 | T[ i ] = translation[ i ].get(); 489 | cout << T[ i ] << " "; 490 | 491 | } 492 | 493 | cout << endl; 494 | vtkGeneralTransform *transform = vtkGeneralTransform::New(); 495 | transform->Scale( scale, scale, scale ); 496 | transform->Translate( T[ 0 ], T[ 1 ], T[ 2 ] ); 497 | return transform; 498 | 499 | } 500 | 501 | vtkGeneralTransform *readJSON( const char *fileName ) { 502 | 503 | std::ifstream file( fileName ); 504 | std::string str( ( std::istreambuf_iterator< char >( file ) ), 505 | std::istreambuf_iterator< char >() ); 506 | 507 | return readJSONfromString( str.c_str(), fileName ); 508 | 509 | } 510 | 511 | 512 | vtkGeneralTransform *readTransform( const char *file ) { 513 | 514 | char fin[6]; 515 | char filename[1000]; 516 | strcpy( filename, file ); 517 | 518 | if (filename != NULL) { 519 | 520 | char *p; 521 | for ( p = filename; *p; ++p ) *p = tolower(*p); 522 | 523 | } 524 | 525 | strcpy ( fin, ".json" ); 526 | if ( strstr( filename, fin) != NULL) return readJSON( file ); 527 | else return readTFM( file ); 528 | 529 | } 530 | 531 | #endif 532 | -------------------------------------------------------------------------------- /js/pairwiseRigidRegistration.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const { qx, desk, THREE } = window; 4 | 5 | console.clear(); 6 | 7 | const params = { 8 | 9 | MATCH3D : { 10 | action : "MATCH3D", 11 | RansacDist : 20, 12 | MatchingScale : 1.5, 13 | MatchingDist : 0.3, 14 | MatchingDist2 : 0.98, 15 | ComputeBoundingBoxes : 1, 16 | RansacMinInliers : 1 //[5, 20, 25, 30, 40] 17 | }, 18 | SURF3D : { 19 | action : "SURF3D", 20 | threshold : 7000, 21 | target_spacing: 1, 22 | }, 23 | MESHING : { 24 | action : "extract_meshes", 25 | threshold : 300, 26 | max_number_of_vertices : 100000 27 | }, 28 | inputFiles : [ 29 | 30 | "big/00/dea/1.3.12.2.1107.5.1.4.50338.30000012091205384854600003775/1.3.12.2.1107.5.1.4.50338.30000012091205384854600003775.mhd", 31 | "big/00/dea/1.3.12.2.1107.5.1.4.50338.30000012091205384854600004095/1.3.12.2.1107.5.1.4.50338.30000012091205384854600004095.mhd", 32 | "big/00/corps//19bad/1.3.46.670589.33.1.32963155271087338575.28270001074118335806/1.3.46.670589.33.1.32963155271087338575.28270001074118335806.mhd" 33 | ], 34 | 35 | registerAll : false, 36 | showSlices : true, 37 | showBoxes : false, 38 | showRegistration : false 39 | }; 40 | 41 | const volumes = []; 42 | const meshes = []; 43 | let links; 44 | const boxes = []; 45 | const loadedFiles = []; 46 | const ids = []; 47 | const shuffleButtons = []; 48 | let surfActions = []; 49 | let transform; 50 | 51 | const win = new qx.ui.window.Window( "SURF" ); 52 | win.set ({layout : new qx.ui.layout.HBox(), width : 700, height : 500}); 53 | const tabView = new desk.TabView(); 54 | const pane = new qx.ui.splitpane.Pane(); 55 | win.add( pane, { flex : 1 } ); 56 | if ( desk.auto ) qx.core.Init.getApplication().getRoot().add( pane, 57 | { width : '100%', height : '100%' } ); 58 | 59 | function tweakShader (volume) { 60 | volume.getSlices().forEach(function ( slice, index ) { 61 | var material = slice.getUserData('mesh').material; 62 | material.baseShader.extraShaders.push( 63 | 'gl_FragColor[3] *= opacity * step(0.10, rawData[0]);'); 64 | slice.updateMaterial(material); 65 | } ); 66 | } 67 | 68 | const filesContainer = new qx.ui.container.Composite(); 69 | filesContainer.setLayout( new qx.ui.layout.VBox( 5 ) ); 70 | if ( !desk.auto ) tabView.addElement( "files", filesContainer ); 71 | 72 | const viewer = new desk.SceneContainer( { 73 | cameraFront : [ 0, -1, 0 ], 74 | cameraUp : [ 0, 0, -1 ] 75 | }); 76 | const statusLabel = new qx.ui.basic.Label( "..." ); 77 | viewer.add( statusLabel, { left : "40%", bottom : 10 } ); 78 | const tab = tabView.addElement('3D', viewer); 79 | tabView.setSelection( [ tab ] ); 80 | 81 | const showContainer = new qx.ui.container.Composite(); 82 | showContainer.setLayout( new qx.ui.layout.VBox() ); 83 | showContainer.setBackgroundColor( "white" ); 84 | showContainer.setDecorator( "border-blue" ); 85 | viewer.add( showContainer, { bottom : 5, right : 5 } ); 86 | 87 | const files = [ 0, 1 ].map( ( i ) => { 88 | 89 | const field = new desk.FileField( "" ); 90 | filesContainer.add( new qx.ui.basic.Label( "File " + ( i + 1) ) ); 91 | filesContainer.add( field ); 92 | return field; 93 | 94 | } ); 95 | 96 | const sphereGeometry = new THREE.SphereGeometry( 1, 10, 10 ); 97 | 98 | async function addMesh( file, index ) { 99 | 100 | if ( loadedFiles[ index ] == file ) return; 101 | if ( meshes[ index ] ) viewer.removeMesh( meshes[ index ], { updateCamera : false } ); 102 | updateInliers(); 103 | 104 | async function MPR() { 105 | 106 | viewers[ index ].removeAllVolumes(); 107 | return await viewers[ index ].addVolumeAsync( file ); 108 | // tweakShader( volume ); 109 | 110 | } 111 | 112 | const MPRPromise = MPR(); 113 | 114 | const meshing = await desk.Actions.executeAsync( { 115 | input_volume : file, 116 | ...params.MESHING 117 | } ); 118 | 119 | const mesh = new THREE.Group(); 120 | const updateCamera = index == 0; 121 | viewer.addMesh( mesh, { label : "Input " + ( index + 1 ), updateCamera } ); 122 | 123 | await viewer.addFileAsync( meshing.outputDirectory + "/0.vtk", 124 | { label : "bones", parent : mesh, updateCamera : index == 0 } ); 125 | 126 | meshes[ index ] = mesh; 127 | setMeshesColor(); 128 | const action = await surfActions[ index ]; 129 | const pointsFile = action.outputDirectory + "pts.json"; 130 | const txt = await desk.FileSystem.readFileAsync( pointsFile, { cache : action } ); 131 | const pts = JSON.parse( txt ); 132 | const matrix = new THREE.Matrix4(); 133 | const material = new THREE.MeshLambertMaterial(); 134 | const imesh = new THREE.InstancedMesh( sphereGeometry, material, pts.points.length ); 135 | const v = new THREE.Vector3(); 136 | 137 | for ( let [ i, pt ] of pts.points.entries() ) { 138 | 139 | v.copy( pt ); 140 | matrix.makeScale( pt.scale, pt.scale, pt.scale ); 141 | matrix.setPosition( ...v.toArray() ); 142 | imesh.setMatrixAt( i, matrix ); 143 | 144 | } 145 | 146 | imesh.userData.points = pts; 147 | viewer.addMesh( imesh, { parent : mesh, label : "points", updateCamera } ); 148 | const volume = await MPRPromise; 149 | viewer.attachVolumeSlices( volume.getSlices(), { parent : mesh, updateCamera } ); 150 | volume.getSlices()[ 0 ].addListener( "changePosition", updatePointsVisibility ); 151 | updatePointsVisibility(); 152 | viewer.render(); 153 | loadedFiles[ index ] = file; 154 | if ( index == 0 ) viewer.resetView(); 155 | setMeshesColor(); 156 | 157 | } 158 | 159 | async function addMeshesAndMatch() { 160 | 161 | console.log( ids ); 162 | await update( files.map( f => f.getValue() ) ); 163 | 164 | } 165 | 166 | async function match( files ) { 167 | 168 | transform = null; 169 | 170 | surfActions = files.map( file => desk.Actions.executeAsync( { 171 | 172 | input_volume : file, 173 | outputFileName : "pts", 174 | writeJSON : 1, 175 | ...params.SURF3D 176 | 177 | } ) 178 | 179 | ); 180 | 181 | await Promise.all( surfActions ); 182 | 183 | const match = await desk.Actions.executeAsync( { 184 | 185 | input_volume1 : ( await surfActions[ 0 ] ).outputDirectory + "pts.json", 186 | input_volume2 : ( await surfActions[ 1 ] ).outputDirectory + "pts.json", 187 | writeInliers : true, 188 | ...params.MATCH3D 189 | 190 | } ); 191 | 192 | const txt = await desk.FileSystem.readFileAsync( match.outputDirectory + "transform.json" ); 193 | const trans = JSON.parse( txt ); 194 | return trans.fail ? null : trans; 195 | 196 | } 197 | 198 | async function update( files ) { 199 | 200 | try { 201 | 202 | transform = null; 203 | for ( let button of shuffleButtons ) button.setEnabled( false ); 204 | statusLabel.setValue( "Registering..." ); 205 | const promise = match( files ); 206 | await Promise.all( [ 0, 1 ].map( index => addMesh( files[ index ], index ) ) ); 207 | transform = await promise; 208 | setMeshesColor(); 209 | updatePointsVisibility(); 210 | statusLabel.setValue( transform ? 211 | "Done, " + transform.inliers + " inliers." : "Failed"); 212 | for ( let button of shuffleButtons ) button.setEnabled( true ); 213 | 214 | } catch( e ) { console.warn( e ); } 215 | 216 | } 217 | 218 | const layout = new qx.ui.layout.VBox( 5 ); 219 | const container = new qx.ui.container.Composite( layout ); 220 | 221 | const viewers = [ 0, 1 ].map( ( index ) => { 222 | 223 | const MPR = new desk.MPRContainer(null, { nbOrientations : 1 } ); 224 | MPR.maximizeViewer(0); 225 | container.add(MPR, {flex : 1}); 226 | const fileBrowser = new desk.FileBrowser(null, false); 227 | fileBrowser.setFileHandler(function () {}); // disable double click 228 | fileBrowser.setWidth(300); 229 | fileBrowser.getTree().addListener('changeSelection', function () { 230 | 231 | const selectedFiles = fileBrowser.getSelectedFiles(); 232 | if ( !selectedFiles.length ) return; 233 | const fileName = selectedFiles[0]; 234 | switch (desk.FileSystem.getFileExtension(fileName)) { 235 | case "mhd": 236 | case "png": 237 | case "jpg": 238 | files[ index ].setValue(fileName); 239 | break; 240 | default : 241 | break; 242 | } 243 | 244 | }); 245 | 246 | if ( !desk.auto ) tabView.addElement('input ' + ( index + 1 ), fileBrowser ); 247 | return MPR; 248 | 249 | } ); 250 | 251 | pane.add( container ); 252 | 253 | if ( desk.auto ) pane.add( viewer, 2 ); 254 | else { 255 | 256 | pane.add( tabView, 2 ); 257 | win.open(); 258 | win.center(); 259 | 260 | } 261 | 262 | const buttonsContainer = new qx.ui.container.Composite(); 263 | buttonsContainer.setLayout( new qx.ui.layout.VBox() ); 264 | viewer.add( buttonsContainer, { left : 5, bottom : 5 } ); 265 | const switchColors = new qx.ui.form.CheckBox( "switch colors" ); 266 | switchColors.addListener("changeValue", setMeshesColor); 267 | showContainer.add(switchColors); 268 | 269 | function updatePointsVisibility() { 270 | 271 | for ( let i = 0; i < 2; i++ ) { 272 | 273 | const group = meshes[ i ]; 274 | if ( !group ) continue; 275 | const arr = group.children.filter( m => m?.userData?.points ); 276 | if (!arr.length ) continue; 277 | const mesh = arr[ 0 ]; 278 | mesh.instanceMatrix.needsUpdate = true; 279 | const points = mesh.userData.points; 280 | 281 | let position; 282 | const arr2 = group.children.filter( m => m?.userData?.viewerProperties?.volumeSlice ); 283 | 284 | if ( pointsInSlices.getValue() && arr2.length ) { 285 | 286 | const slice = arr2[ 0 ].userData.viewerProperties.volumeSlice; 287 | position = slice.getPosition(); 288 | 289 | } 290 | 291 | let inliers; 292 | if ( inlierPoints.getValue() && links ) inliers = links.userData.inliers[ i ]; 293 | const v = new THREE.Vector3(); 294 | const matrix = new THREE.Matrix4(); 295 | 296 | for ( let [ i, pt ] of points.points.entries() ) { 297 | 298 | v.copy( pt ); 299 | let scale = pt.scale; 300 | 301 | if ( position != undefined && ( Math.abs( pt.z - position ) > scale ) ) 302 | scale = 0; 303 | 304 | if ( inliers && !inliers.has( i ) ) scale = 0; 305 | if ( inlierPoints.getValue() && !transform ) scale = 0; 306 | 307 | matrix.makeScale( scale, scale, scale ); 308 | matrix.setPosition( ...v.toArray() ); 309 | mesh.setMatrixAt( i, matrix ); 310 | 311 | } 312 | 313 | } 314 | 315 | viewer.render(); 316 | 317 | } 318 | 319 | function setMeshesColor() { 320 | 321 | pointsInSlices.setVisibility( showPoints.getValue() ? "visible" : "hidden" ); 322 | inlierPoints.setVisibility( showPoints.getValue() ? "visible" : "hidden" ); 323 | 324 | for ( let i = 0; i < 2; i++ ) { 325 | 326 | const index = switchColors.getValue() ? 1 - i : i; 327 | const color = index ? [ 1, 0.7, 0.55 ] : [ 1, 1, 1 ]; 328 | const color2 = index ? [ 1, 0, 0 ] : [ 0, 0, 1 ]; 329 | const group = meshes[ i ]; 330 | 331 | if ( group ) { 332 | 333 | const mesh = group.children[ 0 ]; 334 | 335 | if ( mesh ) { 336 | 337 | const material = mesh.material; 338 | material.color.setRGB( ...color ); 339 | material.transparent = true; 340 | material.opacity = 0.8; 341 | material.side = THREE.DoubleSide; 342 | mesh.visible = showMeshes.getValue(); 343 | mesh.renderOrder = 2000; 344 | 345 | } 346 | 347 | const slices = group.children[ 2 ]; 348 | if ( slices ) { 349 | slices.visible = showSlices.getValue(); 350 | slices.children[ 0 ].renderOrder = -10; 351 | } 352 | const points = group.children[ 1 ]; 353 | 354 | if ( points ) { 355 | points.material.color.setRGB( ...color2 ); 356 | points.visible = showPoints.getValue(); 357 | } 358 | 359 | } 360 | 361 | const button = shuffleButtons[ i ]; 362 | if ( button ) button.setDecorator( index ? "button-hover" : "main"); 363 | 364 | } 365 | 366 | const mesh = meshes[ 1 ]; 367 | 368 | if ( mesh ) { 369 | 370 | if ( transform && !transform.fail && showRegistration.getValue() ) { 371 | 372 | mesh.position.fromArray( transform.translation ).multiplyScalar( -1 ); 373 | mesh.scale.setScalar( 1.0 / transform.scale ); 374 | 375 | } else { 376 | 377 | mesh.position.set( 1000, 0, 0 ); 378 | mesh.scale.setScalar( 1.0 ); 379 | if ( ! meshes[ 0 ]?.children[ 0 ] ) return; 380 | if ( ! meshes[ 1 ]?.children[ 0 ] ) return; 381 | const box1 = meshes[ 0 ].children[ 0 ].geometry.boundingBox; 382 | const box2 = meshes[ 1 ].children[ 0 ].geometry.boundingBox; 383 | const center = new THREE.Vector3(); 384 | box1.getCenter( center ); 385 | box2.getCenter( mesh.position ); 386 | mesh.position.multiplyScalar( -1 ).add( center ); 387 | mesh.position.x += 1000; 388 | 389 | } 390 | 391 | } 392 | 393 | updateInliers(); 394 | viewer.render(); 395 | 396 | } 397 | 398 | function updateInliers() { 399 | 400 | if ( links ) viewer.removeMesh( links ); 401 | updateBoxes(); 402 | links = null; 403 | if ( !transform ) return; 404 | const nInliers = transform.inliers; 405 | const geometry = new THREE.BufferGeometry(); 406 | const material = new THREE.LineBasicMaterial( { color : 0x000000 }); 407 | const positions = new Float32Array( nInliers * 6 ); 408 | const pos = new THREE.BufferAttribute( positions, 3 ); 409 | geometry.setAttribute( 'position', pos ); 410 | const mesh1 = meshes[ 0 ].children[ 1 ]; 411 | const mesh2 = meshes[ 1 ].children[ 1 ]; 412 | const group2 = meshes[ 1 ]; 413 | group2.updateMatrix(); 414 | const matrix1 = new THREE.Matrix4(); 415 | const matrix2 = new THREE.Matrix4(); 416 | const p1 = new THREE.Vector3(); 417 | const p2 = new THREE.Vector3(); 418 | const s = new THREE.Vector3(); 419 | const q = new THREE.Quaternion(); 420 | const inliers = [ new Set(), new Set() ]; 421 | 422 | for ( let i = 0; i < nInliers; i ++ ) { 423 | 424 | const [ point1, point2 ] = transform.allInliers[ i ]; 425 | inliers[ 0 ].add( point1 ); 426 | inliers[ 1 ].add( point2 ); 427 | mesh1.getMatrixAt( point1, matrix1 ); 428 | mesh2.getMatrixAt( point2, matrix2 ); 429 | matrix1.decompose( p1, q, s ); 430 | matrix2.decompose( p2, q, s ); 431 | p2.applyMatrix4( group2.matrix ); 432 | pos.setXYZ( i * 2, ...p1.toArray() ); 433 | pos.setXYZ( 1 + i * 2, ...p2.toArray() ); 434 | 435 | } 436 | 437 | links = new THREE.LineSegments( geometry, material, THREE.LineSegments ); 438 | links.userData.inliers = inliers; 439 | links.visible = showLinks.getValue(); 440 | viewer.addMesh( links, { label : "links" } ); 441 | } 442 | 443 | function updateBoxes() { 444 | 445 | for ( let i = 0; i < 2; i++ ) { 446 | 447 | if ( boxes[ i ] ) viewer.removeMesh( boxes[ i ] ); 448 | boxes[ i ] = null; 449 | if ( !transform || !showBoxes.getValue() ) continue; 450 | const b = transform[ i ? "bboxB" : "bboxA" ]; 451 | const geometry = new THREE.BoxGeometry(); 452 | const material = new THREE.MeshLambertMaterial( { color : "yellow"} ); 453 | material.opacity = 0.2; 454 | material.transparent = true; 455 | material.side = THREE.DoubleSide; 456 | const box = new THREE.Box3(); 457 | box.min.fromArray( b.min ); 458 | box.max.fromArray( b.max ); 459 | const mesh = new THREE.Mesh( geometry, material ); 460 | mesh.renderOrder = 200000; 461 | box.getCenter( mesh.position ); 462 | box.getSize( mesh.scale ); 463 | viewer.addMesh( mesh, { label : "box", parent : meshes[ i ] } ); 464 | boxes[ i ] = mesh; 465 | 466 | } 467 | 468 | } 469 | 470 | const showRegistration = new qx.ui.form.CheckBox('Show registration'); 471 | showContainer.add( showRegistration ); 472 | showRegistration.setValue( params.showRegistration ); 473 | showRegistration.addListener( "changeValue", setMeshesColor ); 474 | 475 | const showPoints = new qx.ui.form.CheckBox('Show points'); 476 | showContainer.add( showPoints ); 477 | showPoints.setValue( false ); 478 | showPoints.addListener( "changeValue", setMeshesColor ); 479 | 480 | const pointsInSlices = new qx.ui.form.CheckBox('Points in slices'); 481 | showContainer.add( pointsInSlices ); 482 | pointsInSlices.setValue( false ); 483 | pointsInSlices.setVisibility( "hidden" ); 484 | pointsInSlices.addListener( "changeValue", updatePointsVisibility ); 485 | 486 | const inlierPoints = new qx.ui.form.CheckBox('Inlier points'); 487 | showContainer.add( inlierPoints ); 488 | inlierPoints.setValue( false ); 489 | inlierPoints.setVisibility( "hidden" ); 490 | inlierPoints.addListener( "changeValue", updatePointsVisibility ); 491 | 492 | const showSlices = new qx.ui.form.CheckBox('Show slices'); 493 | showContainer.add( showSlices ); 494 | showSlices.setValue( params.showSlices ); 495 | showSlices.addListener( "changeValue", setMeshesColor ); 496 | 497 | const showLinks = new qx.ui.form.CheckBox('Show links'); 498 | showContainer.add( showLinks ); 499 | showLinks.setValue( false ); 500 | showLinks.addListener( "changeValue", setMeshesColor ); 501 | 502 | const showBoxes = new qx.ui.form.CheckBox('Show boxes'); 503 | showContainer.add( showBoxes ); 504 | showBoxes.setValue( params.showBoxes ); 505 | showBoxes.addListener( "changeValue", setMeshesColor ); 506 | 507 | const showMeshes = new qx.ui.form.CheckBox('Show meshes'); 508 | showContainer.add( showMeshes ); 509 | showMeshes.setValue( true ); 510 | showMeshes.addListener( "changeValue", setMeshesColor ); 511 | 512 | let windowIsClosed; 513 | 514 | win.addListener('close', function () { 515 | 516 | windowIsClosed = true; 517 | for ( let MPR of viewers ) MPR.dispose(); 518 | tabView.dispose(); 519 | win.destroy(); 520 | viewer.dispose(); 521 | 522 | } ); 523 | 524 | desk.FileSystem.traverse( "big/00/corps/", traverse, afterTraverse ); 525 | 526 | function traverse( file ) { 527 | 528 | if ( desk.FileSystem.getFileExtension(file) === "mhd" ) volumes.push (file); 529 | 530 | } 531 | 532 | async function afterTraverse() { 533 | 534 | for ( let file of params.inputFiles.slice( 0,2 ) ) 535 | if ( !volumes.includes( file ) ) volumes.push( file ); 536 | 537 | console.log( volumes.length + " files" ); 538 | volumes.sort( ( a, b ) => a.localeCompare( b ) ); 539 | addRandomButton( 0 ); 540 | addRandomButton( 1 ); 541 | setMeshesColor(); 542 | 543 | 544 | if ( desk.auto || !params.registerAll ){ 545 | 546 | for ( let [ i, field ] of files.entries() ) { 547 | field.setValue( params.inputFiles[ i ] ); 548 | ids[ i ] = volumes.indexOf( params.inputFiles[ i ] ); 549 | } 550 | 551 | addMeshesAndMatch(); 552 | 553 | for ( let field of files ) 554 | field.addListener( "changeValue", addMeshesAndMatch ); 555 | 556 | return; 557 | 558 | } 559 | 560 | for ( let volume of volumes ) { 561 | 562 | for ( let volume2 of volumes ) { 563 | 564 | if ( windowIsClosed ) break; 565 | await update( [ volume, volume2 ] ); 566 | 567 | } 568 | 569 | } 570 | 571 | console.log( "done" ); 572 | 573 | } 574 | 575 | viewer.add( new qx.ui.basic.Label( "contrast" ), { top : 10, right : 0 } ); 576 | const slider = new qx.ui.form.Slider( "vertical" ); 577 | slider.setHeight( 300 ); 578 | viewer.add( slider, { right : 10, top : 30 } ); 579 | slider.setValue( 100 - 1 / 0.04 ); 580 | slider.addListener( "changeValue", () => { 581 | viewer.getScene().traverse( object => { 582 | const slice = object.userData?.viewerProperties?.volumeSlice; 583 | if ( !slice ) return; 584 | slice.setContrast( ( 100 - slider.getValue() ) * 0.04); 585 | } ); 586 | }); 587 | 588 | function addRandomButton( index ) { 589 | 590 | const button = new qx.ui.form.Button( "New " + ( index + 1 ) ); 591 | const rng = new desk.Random( index * 1000 ); 592 | shuffleButtons[ index ] = button; 593 | 594 | button.addListener("execute", function () { 595 | 596 | let id = ids[ index ]; 597 | while ( id == ids[ index ] ) id = Math.floor( rng.random() * volumes.length ); 598 | ids[ index ] = id; 599 | files[ index ].setValue( volumes[ id ] ); 600 | 601 | } ); 602 | 603 | buttonsContainer.add( button ); 604 | 605 | } 606 | 607 | -------------------------------------------------------------------------------- /js/stochasticRigidRegistration.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const params = { 4 | 5 | maxNumberOfIterations : 200, 6 | batchSize : 50, // stochastic batch size 7 | maxSize : 10000, // maximum number of volumes 8 | learningRate : 0.2, // learning rate 9 | sortStart : 3, // time at which volumes are displayed in z-sorted order 10 | outlierStart : 5, // time at which removing outlier starts 11 | outlierRatio : 10, // outlier threshold : displacement > outlierRatio * medianDisplacement 12 | spacingX : 500, // grid x spacing for display 13 | spacingZ : 3000, // grid z spacing for display 14 | animationDuration : 500, // length of animation in milliseconds 15 | showDisplacementNorms : false, 16 | displayMeanZ : false, 17 | loadVolumeConcurrency : 5 18 | 19 | }; 20 | 21 | const { qx, desk, THREE, _, async, alert } = window; 22 | desk.URLParameters.parseParameters( params ); 23 | console.clear(); 24 | 25 | ( async function () { 26 | 27 | const placesAll = [ 28 | 'big/01_Registrations/registration-001-931.json', 29 | //'big/01_MHD', 30 | //'big/01_MHD', 31 | //'big/00_MHD', 32 | 'big/visceral/volumes', 33 | 'big/visceral/silver/volumes/CTce_ThAb', 34 | 'big/visceral/volumes_CT_wb' 35 | ]; 36 | 37 | const places = placesAll; 38 | const outliers = []; // this array will contain detected outliers 39 | let files = []; // this array will be filled with volume files to register 40 | const lines = {}; 41 | let width; 42 | 43 | // setup UI 44 | const viewer = new desk.MeshViewer( { 45 | 46 | orthographic : true, 47 | cameraFront : [ 0, 1, 0 ], 48 | cameraUp : [ 0, 0, 1 ] 49 | 50 | } ); 51 | 52 | if ( desk.auto ) viewer.getOptionsButton().setVisibility( "excluded" ); 53 | if ( desk.auto ) viewer.fillScreen(); 54 | 55 | const resetView = _.throttle( () => viewer.resetView(), 500 ); 56 | viewer.getControls().noRotate = true; 57 | const statusLabel = new qx.ui.basic.Label( '' ); 58 | statusLabel.set( { backgroundColor : "white" } ); 59 | statusLabel.setValue = _.throttle( statusLabel.setValue.bind( statusLabel ), 500 ); 60 | viewer.add( statusLabel, { left : "30%", bottom : 50 } ); 61 | const rng = new desk.Random( "random" ); 62 | const batchSizeLabel = new qx.ui.form.Spinner( 2, 2, 300 ); 63 | viewer.add( batchSizeLabel, { left : "81%", bottom : 10 } ); 64 | viewer.add( new qx.ui.basic.Label(' batch size '), { right : "20%", bottom : 10 } ); 65 | 66 | batchSizeLabel.addListener( 'changeValue', function ( ) { 67 | 68 | width = Math.max ( 4 * Math.round( Math.sqrt( size ) ), batchSizeLabel.getValue() ); 69 | 70 | } ); 71 | 72 | 73 | function addFile ( file ) { 74 | 75 | if ( !file.endsWith( ".mhd" ) && !file.endsWith( ".nii.gz" ) ) return; 76 | files.push( file ); 77 | 78 | } 79 | 80 | // build file list 81 | for ( let place of places ) { 82 | 83 | if ( place.endsWith( ".json" ) ) { 84 | 85 | const content = JSON.parse( await desk.FileSystem.readFileAsync( place ) ); 86 | const list = Array.isArray( content ) ? content : content.volumes; 87 | files.push( ...list ); 88 | 89 | } else 90 | await desk.FileSystem.traverseAsync( place, addFile ); 91 | 92 | } 93 | 94 | files = _.uniq( files ); 95 | files.length = Math.min( files.length, params.maxSize ); 96 | const size = files.length; 97 | batchSizeLabel.setValue( params.batchSize ); 98 | 99 | const meshes = await async.mapLimit( files, params.loadVolumeConcurrency, async file => { 100 | 101 | const id = files.indexOf( file ); 102 | const anchor = new THREE.Group(); 103 | viewer.addMesh( anchor, { label : '' + id } ); 104 | 105 | const mesh = await viewer.addVolumeAsync( file , { 106 | 107 | orientations : [ 2 ], 108 | sliceWith : { sampling_ratio : 0.1 }, 109 | parent : anchor 110 | 111 | } ); 112 | 113 | mesh.userData.id = id; 114 | mesh.userData.file = file; 115 | const slice = mesh.children[ 0 ]; 116 | slice.renderOrder = 2000; 117 | slice.material.depthTest = false; 118 | slice.material.transparent = false; 119 | anchor.position.x = ( id % width ) * params.spacingX; 120 | anchor.position.z = - Math.floor( id / width ) * params.spacingZ; 121 | anchor.userData.initial = anchor.position.clone(); 122 | anchor.userData.final = new THREE.Vector3(); 123 | statusLabel.setValue( ( id + 1 ) + " volumes loaded" ); 124 | resetView(); 125 | return mesh; 126 | 127 | } ); 128 | 129 | viewer.resetView(); 130 | statusLabel.setValue( "All " + meshes.length + " volumes loaded. Ready to iterate" ); 131 | const button = new qx.ui.form.Button( 'iterate' ); 132 | const spinner = new qx.ui.form.Spinner( 0, 1, 1e10 ); 133 | 134 | viewer.add( button, { bottom : 10, left : "50%" } ); 135 | viewer.add( spinner, { bottom : 10, right : "50%" } ); 136 | 137 | let numberOfIterations = 0; 138 | 139 | button.addListener( 'execute', async function () { 140 | 141 | try { 142 | 143 | button.setEnabled( false ); 144 | const defaultSpinnerValue = spinner.getValue(); 145 | 146 | while ( spinner.getValue() ) { 147 | 148 | await iterate(); 149 | spinner.setValue( spinner.getValue() - 1 ); 150 | 151 | } 152 | 153 | button.setEnabled( true ); 154 | spinner.setValue( defaultSpinnerValue ); 155 | if ( numberOfIterations >= params.maxNumberOfIterations ) 156 | for ( let o of [ button, spinner ] ) o.setVisibility( "excluded" ); 157 | 158 | } catch( e ) { console.warn( e ); } 159 | 160 | } ); 161 | 162 | if ( !desk.auto ) addFilterButton(); 163 | 164 | function addFilterButton() { 165 | 166 | const button = new qx.ui.form.ToggleButton( 'filter' ); 167 | viewer.add( button, { top : 10, right : 10 } ); 168 | 169 | const container = new qx.ui.container.Composite( new qx.ui.layout.VBox() ); 170 | container.setVisibility( 'excluded' ); 171 | container.setWidth( 100 ); 172 | button.addListener( 'changeValue', function ( e ) { 173 | 174 | if ( e.getData() ) { 175 | 176 | container.setVisibility( 'visible' ); 177 | filter(); 178 | maxLine.visible = minLine.visible = true; 179 | 180 | } else { 181 | 182 | container.setVisibility( 'excluded' ); 183 | meshes.forEach( mesh => { mesh.visible = true; } ); 184 | maxLine.visible = minLine.visible = false; 185 | 186 | } 187 | 188 | viewer.render(); 189 | 190 | } ); 191 | 192 | container.addListenerOnce( 'appear', function () { 193 | 194 | const z = _.mean( meshes.map( mesh => mesh.userData.z ) ); 195 | if ( isNaN( z ) ) return; 196 | max.setValue( z ); 197 | min.setValue( z ); 198 | 199 | } ); 200 | 201 | viewer.add( container, { top : 40, right : 10 } ); 202 | 203 | const max = new qx.ui.form.Spinner( -10000, 0, 10000 ); 204 | container.add( max ); 205 | 206 | const min = new qx.ui.form.Spinner( -10000, 0, 10000 ); 207 | container.add( min ); 208 | 209 | const label = new qx.ui.basic.Label( '' ); 210 | container.add( label ); 211 | 212 | const exportButton = new qx.ui.form.Button( 'export' ); 213 | container.add( exportButton ); 214 | exportButton.addListener( 'execute', saveRegistration ); 215 | 216 | min.addListener( 'changeValue', filter ); 217 | max.addListener( 'changeValue', filter ); 218 | 219 | const material = new THREE.MeshBasicMaterial( { color: "red" } ); 220 | const geometry = new THREE.PlaneGeometry( ( 2 + width ) * params.spacingX, 3 ); 221 | geometry.applyMatrix4( new THREE.Matrix4().makeRotationX( 0.5 * Math.PI ) ); 222 | const maxLine = new THREE.Mesh( geometry, material ); 223 | const minLine = new THREE.Mesh( geometry, material ); 224 | 225 | for ( let line of [ minLine, maxLine ] ) { 226 | 227 | line.position.x = 0.5 * width * params.spacingX; 228 | line.position.y = - 1.5; 229 | viewer.addMesh( line, { label : "minMaxLine" }); 230 | 231 | } 232 | 233 | maxLine.visible = minLine.visible = false; 234 | 235 | function filter() { 236 | 237 | let n = 0; 238 | const zMin = min.getValue(); 239 | const zMax = max.getValue(); 240 | 241 | minLine.position.z = zMin; 242 | maxLine.position.z = zMax; 243 | 244 | for ( let mesh of meshes ) { 245 | 246 | 247 | if ( !mesh.userData.outlier && 248 | ( ( mesh.userData.zMax + mesh.position.z ) >= zMax ) && 249 | ( ( mesh.userData.zMin + mesh.position.z ) <= zMin ) ) { 250 | 251 | mesh.visible = true; 252 | const i = n % width; 253 | const j = Math.floor( n / width ); 254 | mesh.parent.position.set( i * params.spacingX, 0, - j * params.spacingZ ); 255 | n++; 256 | 257 | } else { 258 | 259 | mesh.visible = false; 260 | 261 | } 262 | 263 | } 264 | 265 | label.setValue( n + ' volumes' ); 266 | viewer.render(); 267 | 268 | } 269 | 270 | async function saveRegistration () { 271 | 272 | var obj = { 273 | volumes : [], 274 | positions : [], 275 | zMin : min.getValue(), 276 | zMax : max.getValue() 277 | }; 278 | 279 | for ( let mesh of meshes.filter( mesh => mesh.visible ) ) { 280 | 281 | obj.volumes.push( mesh.userData.file ); 282 | obj.positions.push( 283 | mesh.position.x, 284 | mesh.position.y, 285 | mesh.position.z, 286 | 0 287 | ); 288 | 289 | } 290 | 291 | const file = 'data/stochasticRegistration.json'; 292 | await desk.FileSystem.writeJSONAsync( file, obj ); 293 | alert ( "file written : " + file ); 294 | 295 | } 296 | 297 | } 298 | 299 | async function iterate () { 300 | 301 | numberOfIterations++; 302 | 303 | const batch = getRandomVolumes(); 304 | 305 | setupAnimation( batch ); 306 | 307 | // construct file list on disk 308 | const files = batch.map( mesh => mesh.userData.viewerProperties.label ); 309 | const fileOnDisk = await desk.FileSystem.writeCachedFileAsync ( "files.json", 310 | JSON.stringify( files ) ); 311 | 312 | // launch registration 313 | const registrationDone = desk.Actions.executeAsync( { 314 | 315 | action : "LSRegistration", 316 | files : fileOnDisk 317 | 318 | } ); 319 | 320 | await animate(); 321 | 322 | // read registration results 323 | const registration = JSON.parse( await desk.FileSystem.readFileAsync( 324 | ( await registrationDone ).outputDirectory + "registration.json") ); 325 | 326 | const currentMean = batch.reduce( ( prev, curr ) => prev.add( curr.position ), 327 | new THREE.Vector3() ).multiplyScalar( 1 / batch.length ); 328 | 329 | const newPositions = batch.map( ( mesh, index ) => { 330 | 331 | return new THREE.Vector3().fromArray( registration.positions, 4 * index ); 332 | 333 | }); 334 | 335 | const newMean = newPositions.reduce( ( prev, curr ) => prev.add( curr ), 336 | new THREE.Vector3() ).multiplyScalar( 1 / batch.length ); 337 | 338 | batch.forEach( ( mesh, index ) => { 339 | 340 | const alpha = mesh.userData.registeredOnce ? params.learningRate : 1; 341 | 342 | const initial = mesh.position.clone().sub( currentMean ); 343 | const final = newPositions[ index ].clone().sub( newMean ); 344 | const displacement = initial.clone().lerp( final, alpha ).sub( initial ); 345 | 346 | mesh.userData.registeredOnce = true; 347 | 348 | // displace mesh according to registration 349 | mesh.position.add( displacement ); 350 | mesh.userData.displacement = displacement.length() / alpha; 351 | 352 | // displace slice frame to give a hint on convergence status 353 | const frame = mesh.children[ 0 ].children[ 0 ]; 354 | frame.position.copy( displacement ).multiplyScalar( 1 / alpha ); 355 | const material = frame.material; 356 | material.color.set( 'blue' ); 357 | material.opacity = 0.5; 358 | material.transparent = false; 359 | 360 | if ( params.showDisplacementNorms ) { 361 | 362 | // display displacement norm on each mesh 363 | const sprite = mesh.userData.sprite; 364 | 365 | if ( sprite ) { 366 | 367 | mesh.parent.remove( sprite ); 368 | sprite.material.dispose(); 369 | 370 | } 371 | 372 | mesh.userData.sprite = new THREE.TextSpriteHelper( mesh.parent, 373 | '' + Math.round( displacement.length() / alpha ) ); 374 | 375 | } 376 | 377 | }); 378 | 379 | computeStatistics(); 380 | viewer.render(); 381 | 382 | } 383 | 384 | function computeStatistics() { 385 | 386 | const registeredMeshes = meshes.filter ( mesh => ( !mesh.userData.outlier && mesh.userData.registeredOnce ) ); 387 | if ( !registeredMeshes.length ) return; 388 | 389 | const displacements = registeredMeshes 390 | .map ( mesh => mesh.userData.displacement ) 391 | .sort(); 392 | 393 | const mean = _.mean( displacements ); 394 | const median = displacements[ Math.round( displacements.length / 2 ) ]; 395 | const maxMesh = _.maxBy( registeredMeshes, mesh => mesh.userData.displacement ); 396 | const max = maxMesh.userData.displacement; 397 | 398 | for ( let mesh of registeredMeshes ) 399 | mesh.children[ 0 ].children[ 0 ].material.color.set( 'blue' ); 400 | 401 | maxMesh.children[ 0 ].children[ 0 ].material.color.set( 'red' ); 402 | const batchSize = params.batchSize; 403 | 404 | statusLabel.setValue( 405 | meshes.length + " volumes, " + 406 | "Iteration " + numberOfIterations + 407 | ' ( ' + ( ( numberOfIterations * batchSize ) / size ).toFixed(1) + 408 | ' / vol, gain = ' + 409 | ( size * size / ( numberOfIterations * batchSize * batchSize ) ).toFixed( 1 ) + ' ), ' + 410 | "Mean = " + mean.toFixed( 1 ) + ", " + 411 | " Median = " + median.toFixed( 1 ) + ", " + 412 | " Max = " + max.toFixed( 1 ) 413 | ); 414 | 415 | // possibly remove one outlier 416 | if ( ( numberOfIterations >= params.outlierStart * size / batchSizeLabel.getValue() ) && 417 | ( max > ( params.outlierRatio * median ) ) ) { 418 | 419 | outliers.push( maxMesh ); 420 | maxMesh.userData.outlier = true; 421 | shuffledArray.length = 0; 422 | 423 | } 424 | 425 | } 426 | 427 | async function animate() { 428 | 429 | const start = performance.now(); 430 | 431 | while ( true ) { 432 | 433 | let ratio = Math.min( 1, ( performance.now() - start ) / params.animationDuration ); 434 | 435 | for ( let mesh of meshes ) { 436 | 437 | const anchor = mesh.parent; 438 | const pos = anchor.userData; 439 | anchor.position.lerpVectors( pos.initial, pos.final, ratio ); 440 | 441 | } 442 | 443 | await viewer.renderAsync(); 444 | if ( ratio >= 1 ) break; 445 | 446 | } 447 | 448 | for ( let mesh of meshes ) { 449 | 450 | const pos = mesh.parent.userData; 451 | pos.initial.copy( pos.final ); 452 | 453 | } 454 | 455 | } 456 | 457 | function setupAnimation ( batch ) { 458 | 459 | if ( numberOfIterations < params.sortStart * size / batchSizeLabel.getValue() ) { 460 | 461 | // setup animation to slide all volumes down 462 | for ( let mesh of meshes ) { 463 | 464 | const anchor = mesh.parent; 465 | anchor.userData.final.copy( anchor.position ); 466 | anchor.userData.final.z = anchor.position.z - params.spacingZ; 467 | 468 | } 469 | 470 | } else sortMeshes(); 471 | 472 | // setup animation for next volume batch 473 | batch.forEach( ( mesh, index ) => { 474 | 475 | const finalPos = mesh.parent.userData.final; 476 | finalPos.z = params.spacingZ; 477 | finalPos.x = index * params.spacingX; 478 | 479 | } ); 480 | 481 | // put rejected meshes aside 482 | outliers.forEach( ( mesh, index ) => { 483 | 484 | const pos = mesh.parent.userData; 485 | pos.final.z = - params.spacingZ * Math.round( index / 5 ); 486 | pos.final.x = - params.spacingX * ( ( index % 5 ) + 4 ); 487 | 488 | } ); 489 | 490 | } 491 | 492 | function sortMeshes() { 493 | 494 | // setup animation to sort meshes; 495 | meshes.forEach( mesh => { 496 | 497 | const bounds = mesh.children[ 0 ].userData.viewerProperties.volumeSlice.getBounds(); 498 | mesh.userData.zMin = bounds [ 4 ]; 499 | mesh.userData.zMax = bounds [ 5 ]; 500 | const z = mesh.userData.z = 0.5 * ( bounds [ 4 ] + bounds [ 5 ] ) + mesh.position.z; 501 | 502 | if ( params.displayMeanZ ) { 503 | 504 | // display mean z on each mesh 505 | var sprite = mesh.userData.Zsprite; 506 | 507 | if ( sprite ) { 508 | 509 | mesh.parent.remove( sprite ); 510 | sprite.material.dispose(); 511 | 512 | } 513 | 514 | mesh.userData.Zsprite = new THREE.TextSpriteHelper( mesh.parent, '' + Math.round( z ) ); 515 | 516 | } 517 | 518 | } ); 519 | 520 | const sortedMeshes = _.sortBy( meshes.filter( mesh => !mesh.userData.outlier && mesh.userData.registeredOnce ), 521 | mesh => - mesh.userData.z ); 522 | 523 | sortedMeshes.forEach( ( mesh, index ) => { 524 | 525 | const finalPos = mesh.parent.userData.final; 526 | const i = index % width; 527 | const j = Math.floor( index / width ); 528 | finalPos.x = i * params.spacingX; 529 | finalPos.z = - params.spacingZ * ( 1 + j ); 530 | 531 | let line = lines[ j ]; 532 | 533 | if ( !line ) { 534 | 535 | const material = new THREE.MeshBasicMaterial( { color: 0x00dddd} ); 536 | const geometry = new THREE.PlaneGeometry( ( 2 + width ) * params.spacingX, 10 ); 537 | geometry.applyMatrix4( new THREE.Matrix4().makeRotationX( 0.5 * Math.PI ) ); 538 | geometry.computeBoundingBox(); 539 | geometry.computeBoundingSphere(); 540 | line = lines[ j ] = new THREE.Mesh( geometry, material ); 541 | line.position.x = 0.5 * width * params.spacingX; 542 | viewer.getScene().add( line ); 543 | line.userData.iteration = -1; 544 | 545 | } 546 | 547 | const data = line.userData; 548 | 549 | if ( data.iteration != numberOfIterations ) { 550 | 551 | data.iteration = numberOfIterations; 552 | data.nZ = 0; 553 | data.sZ = 0; 554 | 555 | } 556 | 557 | data.nZ++; 558 | data.sZ += mesh.userData.z; 559 | 560 | } ); 561 | 562 | for ( let j of Object.keys( lines ).map( parseFloat ) ) { 563 | 564 | const line = lines[ j ]; 565 | const data = lines[ 0 ].userData; 566 | line.position.z = - ( j + 1 ) * params.spacingZ + data.sZ / data.nZ; 567 | 568 | } 569 | 570 | } 571 | 572 | let shuffledArray; 573 | 574 | function getRandomVolumes() { 575 | 576 | const vols = []; 577 | 578 | while ( vols.length < batchSizeLabel.getValue() ) { 579 | 580 | if ( !shuffledArray || !shuffledArray.length ) { 581 | 582 | shuffledArray = meshes.slice(); 583 | const l = shuffledArray.length; 584 | 585 | for ( let j = 0; j < l * 4; j++ ) { 586 | 587 | const id1 = Math.round( ( l - 1 ) * rng.random() ); 588 | const id2 = Math.round( ( l - 1 ) * rng.random() ); 589 | const temp = shuffledArray[ id1 ]; 590 | shuffledArray[ id1 ] = shuffledArray[ id2 ]; 591 | shuffledArray[ id2 ] = temp; 592 | 593 | } 594 | 595 | } 596 | 597 | const mesh = shuffledArray.pop(); 598 | if ( !mesh.userData.outlier ) vols.push( mesh ); 599 | 600 | } 601 | 602 | return vols; 603 | 604 | } 605 | 606 | viewer.add( new qx.ui.basic.Label( "contrast" ), { top : 80, right : 0 } ); 607 | const slider = new qx.ui.form.Slider( "vertical" ); 608 | slider.setHeight( 300 ); 609 | viewer.add( slider, { right : 10, top : 100 } ); 610 | slider.setValue( 100 - 1 / 0.04 ); 611 | slider.addListener( "changeValue", () => { 612 | viewer.getScene().traverse( object => { 613 | const slice = object.userData?.viewerProperties?.volumeSlice; 614 | if ( !slice ) return; 615 | slice.setContrast( ( 100 - slider.getValue() ) * 0.04); 616 | } ); 617 | }); 618 | 619 | } ) (); 620 | -------------------------------------------------------------------------------- /match/match.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | 15 | # ifdef USE_SSE_FOR_MATCHING 16 | #include 17 | #include "immintrin.h" 18 | #endif 19 | 20 | #include 21 | #include 22 | #include 23 | 24 | #include "../tools/pointIdType.h" 25 | #include "../tools/transformIO.h" 26 | 27 | namespace fs = boost::filesystem; 28 | using namespace std; 29 | # ifdef USE_SSE_FOR_MATCHING 30 | typedef vector > Descriptor; 31 | #else 32 | typedef vector Descriptor; 33 | #endif 34 | typedef pair Match; 35 | typedef vector MatchVect; 36 | typedef std::tuple pairMatches; 37 | typedef std::array v3; 38 | 39 | struct Point { 40 | Descriptor desc; 41 | float coordinates[3]; 42 | float transformedCoordinates[3]; 43 | float scale; 44 | float laplacianSign; 45 | float response; 46 | }; 47 | 48 | typedef vector< Point > Points; 49 | 50 | // reads the keypoint list contained in filename (in CSV format) 51 | Points* readCSVGZ(string filename) { 52 | 53 | std::string line; 54 | Points* points = new Points(); 55 | std::ifstream GZfile( filename, std::ios_base::in | std::ios_base::binary); 56 | boost::iostreams::filtering_istream file; 57 | file.push(boost::iostreams::gzip_decompressor()); 58 | file.push(GZfile); 59 | 60 | while(std::getline(file,line)) { 61 | 62 | Point row; 63 | std::stringstream lineStream(line); 64 | std::string cell; 65 | int count = 0; 66 | 67 | while(std::getline(lineStream,cell,',') && (int)cell[0] != 13 ) { 68 | 69 | float value = std::stof(cell); 70 | 71 | switch( count ) { 72 | 73 | case 0 : row.coordinates[ 0 ] = value; break; 74 | case 1 : row.coordinates[ 1 ] = value; break; 75 | case 2 : row.coordinates[ 2 ] = value; break; 76 | case 3 : row.scale = value; break; 77 | case 4 : row.laplacianSign = value; break; 78 | case 5 : row.response = value; break; 79 | default : row.desc.push_back( value ); 80 | 81 | } 82 | 83 | count++; 84 | 85 | } 86 | 87 | if ( count > 6 ) points->push_back( row ); 88 | 89 | } 90 | 91 | return points; 92 | } 93 | 94 | void writeCSV( Points &points, const char *fileName) { 95 | 96 | ofstream file; 97 | file.open(fileName, std::ofstream::out | std::ofstream::trunc); 98 | 99 | for (auto i = 0; i != points.size(); i++) { 100 | 101 | Point row = points[ i ]; 102 | 103 | file << points[ i ].coordinates[ 0 ] << ","; 104 | file << points[ i ].coordinates[ 1 ] << ","; 105 | file << points[ i ].coordinates[ 2 ] << ","; 106 | file << points[ i ].scale << ","; 107 | file << points[ i ].laplacianSign << ","; 108 | file << points[ i ].response << ","; 109 | 110 | for ( auto j = 0; j < points[ i ].desc.size(); j++ ) { 111 | 112 | file << points[ i ].desc[ j ]; 113 | 114 | if ( j < points[ i ].desc.size() - 1 ) { 115 | 116 | file << ","; 117 | 118 | } else { 119 | 120 | if ( i < points.size() ) { 121 | 122 | file << std::endl; 123 | 124 | } 125 | 126 | } 127 | 128 | } 129 | 130 | } 131 | 132 | file.close(); 133 | 134 | } 135 | 136 | // reads the keypoint list contained in filename (in CSV format) 137 | Points* readCSV(string filename) { 138 | 139 | std::string line; 140 | Points* points = new Points(); 141 | ifstream file( filename ); 142 | 143 | while(std::getline(file,line)) { 144 | 145 | Point row; 146 | std::stringstream lineStream(line); 147 | std::string cell; 148 | int count = 0; 149 | 150 | while(std::getline(lineStream,cell,',') && (int)cell[0] != 13 ) { 151 | 152 | float value = std::stof(cell); 153 | 154 | switch( count ) { 155 | 156 | case 0 : row.coordinates[ 0 ] = value; break; 157 | case 1 : row.coordinates[ 1 ] = value; break; 158 | case 2 : row.coordinates[ 2 ] = value; break; 159 | case 3 : row.scale = value; break; 160 | case 4 : row.laplacianSign = value; break; 161 | case 5 : row.response = value; break; 162 | default : row.desc.push_back( value ); 163 | 164 | } 165 | 166 | count++; 167 | 168 | } 169 | 170 | if ( count > 6 ) points->push_back( row ); 171 | 172 | } 173 | 174 | return points; 175 | 176 | } 177 | 178 | // reads the keypoint list contained in filename (in binary format) 179 | Points* readBinary(string filename) { 180 | 181 | FILE* file=fopen(filename.c_str(),"rb"); 182 | Points* points = new Points(); 183 | 184 | while(!feof(file)) { 185 | 186 | Point row; 187 | float valF; 188 | int unused; 189 | unused = fread(&valF, sizeof(float), 1, file); 190 | row.coordinates[ 0 ] = valF; 191 | unused = fread(&valF, sizeof(float), 1, file); 192 | row.coordinates[ 1 ] = valF; 193 | unused = fread(&valF, sizeof(float), 1, file); 194 | row.coordinates[ 2 ] = valF; 195 | unused = fread(&valF, sizeof(float), 1, file); 196 | row.scale = valF; 197 | unused = fread(&valF, sizeof(float), 1, file); 198 | row.laplacianSign = valF; 199 | unused = fread(&valF, sizeof(float), 1, file); 200 | row.response = valF; 201 | row.desc.resize(48); 202 | unused = fread(row.desc.data(), sizeof(float), 48, file); 203 | points->push_back(row); 204 | 205 | } 206 | 207 | return points; 208 | } 209 | 210 | 211 | # ifdef USE_SSE_FOR_MATCHING 212 | 213 | static inline float _mm256_reduce_add_ps(__m256 x) { 214 | /* ( x3+x7, x2+x6, x1+x5, x0+x4 ) */ 215 | const __m128 x128 = _mm_add_ps(_mm256_extractf128_ps(x, 1), _mm256_castps256_ps128(x)); 216 | /* ( -, -, x1+x3+x5+x7, x0+x2+x4+x6 ) */ 217 | const __m128 x64 = _mm_add_ps(x128, _mm_movehl_ps(x128, x128)); 218 | /* ( -, -, -, x0+x1+x2+x3+x4+x5+x6+x7 ) */ 219 | const __m128 x32 = _mm_add_ss(x64, _mm_shuffle_ps(x64, x64, 0x55)); 220 | /* Conversion to float is a no-op on x86-64 */ 221 | return _mm_cvtss_f32(x32); 222 | } 223 | 224 | // 75 op 225 | inline float norm(Descriptor& pts1, Descriptor& pts2, int size) { 226 | float result = .0; 227 | float* a = pts1.data(); 228 | float* b = pts2.data(); 229 | 230 | for (int i = 0 ; i < size; i+=8) { 231 | const __m256 x_1Vec = _mm256_load_ps(a+i); 232 | const __m256 y_1Vec = _mm256_load_ps(b+i); 233 | __m256 ans = _mm256_sub_ps(x_1Vec, y_1Vec); // Soustraction 234 | ans = _mm256_mul_ps(ans, ans); // ^2 235 | result += _mm256_reduce_add_ps(ans); 236 | } 237 | 238 | return result; 239 | } 240 | 241 | #else 242 | 243 | inline float norm(Descriptor& pts1, Descriptor& pts2, int size) { 244 | float result = .0; 245 | 246 | for ( int i = 0 ; i < size; i++ ) { 247 | result += ( pts1[ i ] - pts2[ i ] ) * ( pts1[ i ] - pts2[ i ] ); 248 | } 249 | 250 | return result; 251 | } 252 | 253 | #endif 254 | 255 | MatchVect* ComputeMatches(Points &points2, Points &points1, float threshold, float dist2second, bool matchAll, float anatVal, bool sym = false) { 256 | 257 | MatchVect* matches = new MatchVect(); 258 | float d1, d2; 259 | int match = 0; 260 | int end1 = points1.size(); 261 | 262 | for (int i = 0; i < end1 ; i++) { 263 | 264 | d1 = d2 = FLT_MAX; 265 | int end2 = points2.size(); 266 | 267 | for ( int j = 0; j < end2 ; j++) { 268 | 269 | //Laplacian 270 | if (points1[i].laplacianSign != points2[j].laplacianSign) continue; 271 | 272 | //Scale 273 | if ((points1[i].scale/points2[j].scale > 1.3) || 274 | (points2[j].scale/points1[i].scale > 1.3) ) 275 | continue; 276 | 277 | //Anatomical test (euclidian norm after transform) 278 | if (anatVal != 0){ 279 | float x1 = points1[i].transformedCoordinates[0]; 280 | float y1 = points1[i].transformedCoordinates[1]; 281 | float z1 = points1[i].transformedCoordinates[2]; 282 | float x2 = points2[j].transformedCoordinates[0]; 283 | float y2 = points2[j].transformedCoordinates[1]; 284 | float z2 = points2[j].transformedCoordinates[2]; 285 | 286 | float euclNorm = sqrt((x1-x2)*(x1-x2) + (y1-y2)*(y1-y2) + (z1-z2)*(z1-z2)); 287 | 288 | if (euclNorm > anatVal){ 289 | continue; 290 | } 291 | } 292 | 293 | float dist = norm(points1[i].desc, points2[j].desc, points1[i].desc.size() ); 294 | 295 | if (matchAll && sqrt(dist) < threshold) { 296 | if (sym) { 297 | matches->push_back(make_pair(i, match)); 298 | } else { 299 | matches->push_back(make_pair(match, i)); 300 | } 301 | } else { 302 | 303 | if(distpush_back(make_pair(i, match)); 325 | } else { 326 | matches->push_back(make_pair(match, i)); 327 | } 328 | } 329 | 330 | } 331 | 332 | } 333 | 334 | return matches; 335 | 336 | } 337 | 338 | bool compareCSVrow (Point i,Point j) { return (i.response>j.response); } 339 | 340 | int main( int argc, char *argv[] ) { 341 | 342 | std::chrono::time_point start, end; 343 | int N = 1000000; //something big enough 344 | float sp = 0; //Seuil pour les points d'interets 345 | int np = 1000000; //nb point d'interet max 346 | int nt = std::thread::hardware_concurrency(); 347 | if ( argc < 2 ) { 348 | std::cout<< "Usage : match pointFiles.txt [options] " << std::endl; 349 | return 1; 350 | } 351 | fs::path full_path = fs::system_complete( fs::path( argv[1] ) ); 352 | float dist = 0.22; 353 | float dist2second = 1; 354 | float zmin = -1e20; 355 | float zmax = 1e20; 356 | bool matchAll = false; 357 | bool writePoints = false; 358 | char* outputFileName = 0; 359 | int argumentsIndex = 2; 360 | float anatVal = 0.0; 361 | bool symFlag = false; 362 | int target = -1; 363 | char *transformPrefix = 0; 364 | 365 | while (argumentsIndex < argc) { 366 | 367 | char* key = argv[argumentsIndex]; 368 | char *value = argv[argumentsIndex + 1]; 369 | 370 | if (strcmp(key, "-n") == 0) { 371 | N = atoi(value); 372 | } 373 | 374 | if (strcmp(key, "-sp") == 0) { 375 | sp = atof(value); 376 | } 377 | 378 | if (strcmp(key, "-np") == 0) { 379 | np = atoi(value); 380 | } 381 | 382 | if (strcmp(key, "-nt") == 0) { 383 | nt = atoi(value); 384 | } 385 | 386 | if (strcmp(key, "-d") == 0) { 387 | dist = atof(value); 388 | } 389 | 390 | if (strcmp(key, "-d2") == 0) { 391 | dist2second = atof(value); 392 | } 393 | 394 | if (strcmp(key, "-zmin") == 0) { 395 | zmin = atof(value); 396 | } 397 | 398 | if (strcmp(key, "-zmax") == 0) { 399 | zmax = atof(value); 400 | } 401 | 402 | if (strcmp(key, "-o") == 0) { 403 | outputFileName = value; 404 | } 405 | 406 | if (strcmp(key, "-all") == 0) { 407 | matchAll = true; 408 | } 409 | 410 | if (strcmp(key, "-p") == 0) { 411 | writePoints = true; 412 | } 413 | 414 | if (strcmp(key, "-anat") == 0){ 415 | anatVal = atof(value); 416 | } 417 | if (strcmp(key, "-sym") == 0){ 418 | symFlag = true; 419 | argumentsIndex-=1; 420 | } 421 | 422 | if (strcmp(key, "-targ") == 0){ 423 | target = atoi(value); 424 | } 425 | 426 | if (strcmp(key, "-transformPrefix") == 0){ 427 | transformPrefix = value; 428 | } 429 | 430 | argumentsIndex += 2; 431 | } 432 | 433 | vector< Points* > allPoints; 434 | fs::directory_iterator end_iter; 435 | int i = 0; 436 | vector rigids; 437 | std::vector filenames; 438 | 439 | if (fs::is_directory(full_path)) { 440 | 441 | for ( fs::directory_iterator dir_itr( full_path ); 442 | dir_itr != end_iter; 443 | ++dir_itr ) { 444 | 445 | if ( fs::is_regular_file( dir_itr->status() ) ) { 446 | 447 | i++; 448 | filenames.push_back(dir_itr->path().native()); 449 | 450 | } 451 | 452 | } 453 | 454 | } else if ( is_regular_file( full_path ) ) { 455 | 456 | std::string line; 457 | ifstream file( full_path.native() ); 458 | 459 | while(std::getline(file,line)) { 460 | 461 | Point row; 462 | std::stringstream lineStream(line); 463 | std::string cell; 464 | std::getline(lineStream,cell,','); 465 | 466 | if ( cell.find( "/" ) == 0 ) { 467 | 468 | filenames.push_back( cell ); 469 | cout << cell << endl; 470 | 471 | } else { 472 | 473 | filenames.push_back(full_path.parent_path().native() + "/" + cell + ".csv"); 474 | cout << full_path.parent_path().native() + cell << endl; 475 | 476 | } 477 | 478 | v3 point; 479 | point[ 0 ] = point[ 1 ] = point[ 2 ] = 0; 480 | 481 | try { 482 | std::getline(lineStream,cell,','); 483 | point[0] = std::stof (cell); 484 | std::getline(lineStream,cell,','); 485 | point[1] = std::stof (cell); 486 | std::getline(lineStream,cell,','); 487 | point[2] = std::stof (cell); 488 | } catch ( ... ) {} 489 | 490 | rigids.push_back(point); 491 | 492 | } 493 | 494 | } else { 495 | 496 | cerr << "Bad argument, first arg must be a valid file or a directory" << endl; 497 | return 1; 498 | 499 | } 500 | 501 | cout << "Found " << filenames.size() << " files, loading : " << fmin(N, filenames.size()) << endl; 502 | start = std::chrono::system_clock::now(); 503 | if (filenames.size() > N) filenames.resize(N); 504 | omp_set_num_threads( nt ); 505 | int nb = filenames.size(); 506 | allPoints.resize(nb); 507 | 508 | #pragma omp parallel shared(filenames, allPoints, cout) 509 | { 510 | //Full list 511 | #pragma omp for schedule(dynamic) 512 | for (auto it = 0 ; it < nb ; ++it) { 513 | 514 | string ext = filenames[it].substr(filenames[it].find_last_of(".") + 1); 515 | //cout << filenames[it] << endl; 516 | vtkGeneralTransform *transform = 0; 517 | if ( transformPrefix ) { 518 | 519 | std::string transformFile( transformPrefix ); 520 | transformFile += std::to_string( it ); 521 | transformFile += ".json"; 522 | std::cout << "Reading transform " << transformFile << std::endl; 523 | transform = readTransform( transformFile.c_str() ); 524 | 525 | } 526 | 527 | Points* points; 528 | if ( ext == "csv") 529 | points = readCSV(filenames[it]); 530 | else if ( ext == "bin") 531 | points = readBinary(filenames[it]); 532 | else if ( ext == "gz") 533 | points = readCSVGZ(filenames[it]); 534 | else { 535 | cerr << "Bad file format : " << ext << endl; 536 | } 537 | 538 | float zT = rigids.size() ? rigids[it][2] : 0; 539 | 540 | auto pend = remove_if (points->begin(), points->end(), 541 | [zT, zmin, zmax] (Point &val){ 542 | float z = val.coordinates[2] + zT; 543 | return z < zmin || z > zmax; 544 | }); 545 | 546 | points->erase (pend, points->end()); 547 | 548 | for ( auto point : *points ) { 549 | 550 | if ( transform ) { 551 | 552 | transform->TransformPoint( point.coordinates, point.transformedCoordinates ); 553 | 554 | } else { 555 | for ( int i = 0; i < 3; i++ ) 556 | point.transformedCoordinates[ i ] = point.coordinates[ i ]; 557 | } 558 | 559 | } 560 | 561 | #pragma omp critical 562 | cout << "image " << it << " rigid : " 563 | << rigids[it][0] << ", " << rigids[it][1] << ", " << rigids[it][2] 564 | << " before : " << points->size() << " points, after : " << points->size() << endl << flush; 565 | 566 | allPoints[it] = points; 567 | 568 | } 569 | 570 | } 571 | 572 | end = std::chrono::system_clock::now(); 573 | cout << " : " << std::chrono::duration(end-start).count() << "s" << endl; 574 | start = end; 575 | cout << allPoints[ 0 ][ 0 ][ 0 ].desc.size() << " values per descriptor" << endl; 576 | cout << "Sorting and pruning..." << endl; 577 | nb = allPoints.size(); 578 | 579 | #pragma omp parallel shared(filenames, allPoints) 580 | { 581 | 582 | #pragma omp for schedule(dynamic) 583 | for (auto it = 0 ; it < nb ; ++it) { 584 | 585 | auto rit= remove_if (allPoints[it]->begin(), allPoints[it]->end(), [sp](Point row){ 586 | return row.response < sp; 587 | }); 588 | 589 | allPoints[it]->erase(rit, allPoints[it]->end()); 590 | 591 | 592 | if ( allPoints[it]->size() > np) { 593 | partial_sort(allPoints[it]->begin(), allPoints[it]->begin()+np, allPoints[it]->end(), compareCSVrow); 594 | allPoints[it]->resize(np); 595 | } /*else { 596 | sort(allPoints[it]->begin(), allPoints[it]->end(), compareCSVrow); 597 | }*/ 598 | cout << ". (" << allPoints[it]->size() << ")"<< flush; 599 | 600 | if ( writePoints ) { 601 | std::stringstream outfilename; 602 | outfilename << "points" << it << ".csv"; 603 | cout << " writing " << outfilename.str() << endl; 604 | writeCSV( *allPoints[it], outfilename.str().c_str() ); 605 | } 606 | 607 | } 608 | 609 | } 610 | 611 | end = std::chrono::system_clock::now(); 612 | cout << " : " << std::chrono::duration(end-start).count() << "s" << endl; 613 | start = end; 614 | 615 | int sum = 0; 616 | 617 | vector< pair > indices; 618 | for (int i = 0 ; i < allPoints.size()-1 ; i++) { 619 | if (target >= 0){ 620 | if (i != target ){ 621 | indices.push_back( make_pair(i, target) ); 622 | } 623 | } else { 624 | for (int j = i+1 ; j < allPoints.size() ; j++) { 625 | indices.push_back( make_pair(i, j) ); 626 | } 627 | } 628 | } 629 | 630 | vector < vector< MatchVect* > > pairs; 631 | pairs.resize( nb ); 632 | for ( auto i = 0; i < nb; i++ ) { 633 | pairs[ i ].resize(nb); 634 | for ( auto j = 0; j < nb; j++ ) pairs[ i ][ j ] = 0; 635 | } 636 | 637 | cout << "Pairing... " << endl; 638 | #pragma omp parallel shared(allPoints, pairs) 639 | { 640 | #pragma omp for reduction(+:sum) schedule(dynamic) 641 | for (int it = 0 ; it < indices.size() ; it++) { 642 | MatchVect* matches = ComputeMatches(*allPoints[ indices[it].first ], *allPoints[ indices[it].second ], dist, dist2second, matchAll, anatVal); 643 | if (symFlag){ 644 | MatchVect* matchesSym = ComputeMatches(*allPoints[ indices[it].second ], *allPoints[ indices[it].first ], dist, dist2second, matchAll, anatVal, true); 645 | matches->insert(matches->end(), matchesSym->begin(), matchesSym->end()); 646 | } 647 | #pragma omp critical 648 | pairs[ indices[ it ].first ][ indices[ it ].second ] = matches; 649 | sum += matches->size(); 650 | cout << "." << flush; 651 | } 652 | } 653 | 654 | end = std::chrono::system_clock::now(); 655 | cout << " : " << std::chrono::duration(end-start).count() << "s" << endl; 656 | start = end; 657 | 658 | cout << "Nb Match : " << sum << endl; 659 | 660 | std::stringstream outfilename; 661 | 662 | string f = full_path.stem().string(); 663 | 664 | if (outputFileName) { 665 | 666 | string file(outputFileName); 667 | outfilename << file; 668 | 669 | } else { 670 | 671 | outfilename << "out_" << f << "_" << filenames.size() << ".bin"; 672 | 673 | } 674 | 675 | FILE * file = fopen(outfilename.str().c_str(),"wb"); 676 | 677 | if (file == NULL) { 678 | 679 | cout << "write error : " << outfilename.str() << endl; 680 | exit(1); 681 | 682 | } 683 | 684 | unsigned short nbAcq = filenames.size(); 685 | fwrite(&nbAcq, sizeof(unsigned short), 1, file); 686 | 687 | for (auto it = 0 ; it < allPoints.size() ; it++) { 688 | 689 | // Write Filename 690 | size_t found = filenames[it].find_last_of("/\\"); 691 | string currFile = filenames[it].substr(found+1); 692 | unsigned short sizeString = currFile.size(); 693 | fwrite(&sizeString, sizeof(unsigned short), 1, file); 694 | fwrite(currFile.c_str(), sizeof(char), currFile.size(), file); 695 | 696 | // Write Rigid 697 | v3 tmp; 698 | if (rigids.size() > 0) { 699 | 700 | tmp = rigids[it]; 701 | 702 | } else { 703 | 704 | tmp[0] = 0.0; tmp[1] = 0.0; tmp[2] = 0.0; 705 | 706 | } 707 | 708 | fwrite(tmp.data(), sizeof(double), 3, file); 709 | 710 | // Write Points 711 | pointIdType nbPoints = allPoints[it]->size(); 712 | 713 | fwrite(&nbPoints, sizeof(pointIdType), 1, file); 714 | 715 | for (auto rowIt = 0 ; rowIt < allPoints[it]->size() ; rowIt++) { 716 | 717 | auto point = &allPoints[it]->at(rowIt); 718 | fwrite(point->coordinates, sizeof(float), 3, file); 719 | fwrite(&point->scale, sizeof(float), 1, file); 720 | fwrite(&point->laplacianSign, sizeof(float), 1, file); 721 | fwrite(&point->response, sizeof(float), 1, file); 722 | 723 | } 724 | 725 | } 726 | 727 | for ( auto i = 0; i < nb; i++ ) { 728 | 729 | for ( auto j = 0; j < nb; j++ ) { 730 | 731 | // Write local header 732 | MatchVect* matches = pairs[i][j]; 733 | if ( !matches ) continue; 734 | unsigned int size = matches->size(); 735 | fwrite(&i, sizeof(unsigned short), 1, file); 736 | fwrite(&j, sizeof(unsigned short), 1, file); 737 | fwrite(&size, sizeof(unsigned int), 1, file); 738 | fwrite(matches->data(), sizeof(Match), size, file); 739 | 740 | } 741 | 742 | } 743 | 744 | fclose(file); 745 | cout << "Output file : " << outfilename.str() << endl; 746 | 747 | } 748 | --------------------------------------------------------------------------------