├── MobileRGBDOdometry ├── Structure.framework │ ├── Versions │ │ ├── Current │ │ └── A │ │ │ ├── Structure │ │ │ └── Headers │ │ │ ├── Structure.h │ │ │ └── StructureSLAM.h │ ├── Headers │ └── Structure ├── MobileOpenCVOdometry │ ├── en.lproj │ │ └── InfoPlist.strings │ ├── ViewController.h │ ├── main.m │ ├── AppDelegate.h │ ├── MobileOpenCVOdometry-Prefix.pch │ ├── Images.xcassets │ │ ├── AppIcon.appiconset │ │ │ └── Contents.json │ │ └── LaunchImage.launchimage │ │ │ └── Contents.json │ ├── MobileOpenCVOdometry-Info.plist │ ├── AppDelegate.m │ ├── ViewController_iPad.xib │ ├── ViewController_iPhone.xib │ └── ViewController.mm ├── build_opencv_ios_with_contrib.diff ├── README.md └── MobileOpenCVOdometry.xcodeproj │ └── project.pbxproj ├── DepthColorCalibration └── README.md ├── README.md ├── RGBDTutorialSlides.pdf └── DepthColorRegistration ├── src ├── CMakeLists.txt ├── Registration.h ├── main.cpp └── Registration.cpp ├── README.md └── CMakeLists.txt /MobileRGBDOdometry/Structure.framework/Versions/Current: -------------------------------------------------------------------------------- 1 | A -------------------------------------------------------------------------------- /MobileRGBDOdometry/Structure.framework/Headers: -------------------------------------------------------------------------------- 1 | Versions/Current/Headers -------------------------------------------------------------------------------- /MobileRGBDOdometry/Structure.framework/Structure: -------------------------------------------------------------------------------- 1 | Versions/Current/Structure -------------------------------------------------------------------------------- /DepthColorCalibration/README.md: -------------------------------------------------------------------------------- 1 | # Depth Color Calibration # 2 | 3 | Sample code coming soon... -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # OpenCV RGBD Tutorial – CVPR 2014 # 2 | 3 | The presentation + sample code from the talk. 4 | -------------------------------------------------------------------------------- /RGBDTutorialSlides.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pokeefe/RGBDTutorial-CVPR2014/HEAD/RGBDTutorialSlides.pdf -------------------------------------------------------------------------------- /MobileRGBDOdometry/MobileOpenCVOdometry/en.lproj/InfoPlist.strings: -------------------------------------------------------------------------------- 1 | /* Localized versions of Info.plist keys */ 2 | 3 | -------------------------------------------------------------------------------- /MobileRGBDOdometry/Structure.framework/Versions/A/Structure: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pokeefe/RGBDTutorial-CVPR2014/HEAD/MobileRGBDOdometry/Structure.framework/Versions/A/Structure -------------------------------------------------------------------------------- /DepthColorRegistration/src/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | find_package(OpenCV REQUIRED) 2 | 3 | include_directories(SYSTEM ${OpenCV_INCLUDE_DIRS}) 4 | 5 | add_executable (DepthColorRegistration main.cpp Registration.cpp) 6 | 7 | target_link_libraries(DepthColorRegistration 8 | ${OpenCV_LIBRARIES} 9 | ) 10 | -------------------------------------------------------------------------------- /MobileRGBDOdometry/MobileOpenCVOdometry/ViewController.h: -------------------------------------------------------------------------------- 1 | /* 2 | This file is part of the Structure SDK. 3 | Copyright © 2014 Occipital, Inc. All rights reserved. 4 | http://structure.io 5 | */ 6 | 7 | #import 8 | #define HAS_LIBCXX 9 | #import 10 | 11 | @interface ViewController : UIViewController 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /MobileRGBDOdometry/MobileOpenCVOdometry/main.m: -------------------------------------------------------------------------------- 1 | /* 2 | This file is part of the Structure SDK. 3 | Copyright © 2014 Occipital, Inc. All rights reserved. 4 | http://structure.io 5 | */ 6 | 7 | #import 8 | 9 | #import "AppDelegate.h" 10 | 11 | 12 | int main(int argc, char *argv[]) 13 | { 14 | @autoreleasepool { 15 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /MobileRGBDOdometry/MobileOpenCVOdometry/AppDelegate.h: -------------------------------------------------------------------------------- 1 | /* 2 | This file is part of the Structure SDK. 3 | Copyright © 2014 Occipital, Inc. All rights reserved. 4 | http://structure.io 5 | */ 6 | 7 | #import 8 | 9 | #import "ViewController.h" 10 | 11 | @interface AppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | @property (strong, nonatomic) ViewController *viewController; 16 | 17 | @end 18 | -------------------------------------------------------------------------------- /MobileRGBDOdometry/MobileOpenCVOdometry/MobileOpenCVOdometry-Prefix.pch: -------------------------------------------------------------------------------- 1 | // 2 | // Prefix header 3 | // 4 | // The contents of this file are implicitly included at the beginning of every source file. 5 | // 6 | 7 | #import 8 | 9 | #ifndef __IPHONE_6_1 10 | #warning "This project uses features only available in iOS SDK 6.1 and later." 11 | #endif 12 | 13 | #ifdef __OBJC__ 14 | #import 15 | #import 16 | #endif 17 | -------------------------------------------------------------------------------- /DepthColorRegistration/README.md: -------------------------------------------------------------------------------- 1 | # Depth Color Registration # 2 | 3 | Registering depth to an external color camera. 4 | 5 | To use this, edit the camera intrinsics and extrinsics in main.cpp that correspond to your actual setup. (Not needed to just see something happening) 6 | 7 | Also, RGB camera access and frame synchronization aren't included here, since those will vary wildly depending on your hardware. 8 | 9 | 10 | ## Building ## 11 | 12 | mkdir build 13 | cd build 14 | cmake .. 15 | make 16 | 17 | -------------------------------------------------------------------------------- /DepthColorRegistration/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8) 2 | project(DepthColorRegistration) 3 | 4 | # Linux pretty much has to have these sorts of things 5 | if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU") 6 | if(CMAKE_CXX_COMPILER_VERSION VERSION_LESS "4.7") 7 | set (CMAKE_CXX_COMPILER "g++-4.7") 8 | endif() 9 | endif() 10 | 11 | include_directories(${CMAKE_SOURCE_DIR}/src) 12 | 13 | set (EXECUTABLE_OUTPUT_PATH ${CMAKE_BINARY_DIR}/bin) 14 | if (NOT IS_DIRECTORY ${EXECUTABLE_OUTPUT_PATH}) 15 | file (MAKE_DIRECTORY ${EXECUTABLE_OUTPUT_PATH}) 16 | endif () 17 | 18 | add_subdirectory (src) 19 | -------------------------------------------------------------------------------- /MobileRGBDOdometry/build_opencv_ios_with_contrib.diff: -------------------------------------------------------------------------------- 1 | diff --git a/platforms/ios/build_framework.py b/platforms/ios/build_framework.py 2 | index 4d4f7e3..b34922a 100755 3 | --- a/platforms/ios/build_framework.py 4 | +++ b/platforms/ios/build_framework.py 5 | @@ -39,9 +39,10 @@ def build_opencv(srcroot, buildroot, target, arch): 6 | cmakeargs = ("-GXcode " + 7 | "-DCMAKE_BUILD_TYPE=Release " + 8 | "-DCMAKE_TOOLCHAIN_FILE=%s/platforms/ios/cmake/Toolchains/Toolchain-%s_Xcode.cmake " + 9 | + "-DOPENCV_EXTRA_MODULES_PATH=%s/../opencv_contrib/modules " + 10 | "-DBUILD_opencv_world=ON " + 11 | "-DCMAKE_C_FLAGS=\"-Wno-implicit-function-declaration\" " + 12 | - "-DCMAKE_INSTALL_PREFIX=install") % (srcroot, target) 13 | + "-DCMAKE_INSTALL_PREFIX=install") % (srcroot, target, srcroot) 14 | # if cmake cache exists, just rerun cmake to update OpenCV.xproj if necessary 15 | if os.path.isfile(os.path.join(builddir, "CMakeCache.txt")): 16 | os.system("cmake %s ." % (cmakeargs,)) 17 | -------------------------------------------------------------------------------- /MobileRGBDOdometry/README.md: -------------------------------------------------------------------------------- 1 | # Mobile OpenCV RGBD Odometry # 2 | 3 | This sample demonstrates how easy it is to integrate OpenCV's RGBD odometry into a minimal sample project from the Structure SDK. The odometry is a part of the RGBD module, which can be found in the [OpenCV contrib repository](https://github.com/Itseez/opencv_contrib). Currently, there is no visualization of the odometry result (it's just printed to the console). You can roll your own VR or AR game, or even do some large scale mapping. 4 | 5 | For more information about the Structure Sensor and the Structure SDK, visit [this website](http://structure.io/). 6 | 7 | 8 | ## Building ## 9 | 10 | Not included is a binary of `opencv2.framework` that includes the RGBD module. You can download a binary of pre-alpha 3.0 [here.](https://www.dropbox.com/s/l9yu7hzv9j3d5qh/opencv2.framework.zip) 11 | 12 | If you'd like to build `opencv2.framework` yourself with the contrib modules, check out `build_opencv_ios_with_contrib.diff` and apply it to your opencv repo. It assumes that opencv_contrib has been cloned on the same level as the main OpenCV repositiory. 13 | -------------------------------------------------------------------------------- /MobileRGBDOdometry/MobileOpenCVOdometry/Images.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "29x29", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "40x40", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "60x60", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "ipad", 20 | "size" : "29x29", 21 | "scale" : "1x" 22 | }, 23 | { 24 | "idiom" : "ipad", 25 | "size" : "29x29", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "ipad", 30 | "size" : "40x40", 31 | "scale" : "1x" 32 | }, 33 | { 34 | "idiom" : "ipad", 35 | "size" : "40x40", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "ipad", 40 | "size" : "76x76", 41 | "scale" : "1x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "76x76", 46 | "scale" : "2x" 47 | } 48 | ], 49 | "info" : { 50 | "version" : 1, 51 | "author" : "xcode" 52 | } 53 | } -------------------------------------------------------------------------------- /MobileRGBDOdometry/MobileOpenCVOdometry/Images.xcassets/LaunchImage.launchimage/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "orientation" : "portrait", 5 | "idiom" : "iphone", 6 | "extent" : "full-screen", 7 | "minimum-system-version" : "7.0", 8 | "scale" : "2x" 9 | }, 10 | { 11 | "orientation" : "portrait", 12 | "idiom" : "iphone", 13 | "subtype" : "retina4", 14 | "extent" : "full-screen", 15 | "minimum-system-version" : "7.0", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "orientation" : "portrait", 20 | "idiom" : "ipad", 21 | "extent" : "full-screen", 22 | "minimum-system-version" : "7.0", 23 | "scale" : "1x" 24 | }, 25 | { 26 | "orientation" : "landscape", 27 | "idiom" : "ipad", 28 | "extent" : "full-screen", 29 | "minimum-system-version" : "7.0", 30 | "scale" : "1x" 31 | }, 32 | { 33 | "orientation" : "portrait", 34 | "idiom" : "ipad", 35 | "extent" : "full-screen", 36 | "minimum-system-version" : "7.0", 37 | "scale" : "2x" 38 | }, 39 | { 40 | "orientation" : "landscape", 41 | "idiom" : "ipad", 42 | "extent" : "full-screen", 43 | "minimum-system-version" : "7.0", 44 | "scale" : "2x" 45 | } 46 | ], 47 | "info" : { 48 | "version" : 1, 49 | "author" : "xcode" 50 | } 51 | } -------------------------------------------------------------------------------- /MobileRGBDOdometry/MobileOpenCVOdometry/MobileOpenCVOdometry-Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleDisplayName 8 | Odometry 9 | CFBundleExecutable 10 | ${EXECUTABLE_NAME} 11 | CFBundleIdentifier 12 | com.occipital.${PRODUCT_NAME:rfc1034identifier} 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | ${PRODUCT_NAME} 17 | CFBundlePackageType 18 | APPL 19 | CFBundleShortVersionString 20 | 1.0 21 | CFBundleSignature 22 | ???? 23 | CFBundleVersion 24 | 1.0 25 | LSRequiresIPhoneOS 26 | 27 | UIRequiredDeviceCapabilities 28 | 29 | armv7 30 | 31 | UISupportedExternalAccessoryProtocols 32 | 33 | io.structure.control 34 | io.structure.infrared 35 | io.structure.depth 36 | 37 | UISupportedInterfaceOrientations 38 | 39 | UIInterfaceOrientationLandscapeRight 40 | 41 | UISupportedInterfaceOrientations~ipad 42 | 43 | UIInterfaceOrientationLandscapeRight 44 | 45 | 46 | 47 | -------------------------------------------------------------------------------- /MobileRGBDOdometry/MobileOpenCVOdometry/AppDelegate.m: -------------------------------------------------------------------------------- 1 | /* 2 | This file is part of the Structure SDK. 3 | Copyright © 2014 Occipital, Inc. All rights reserved. 4 | http://structure.io 5 | */ 6 | 7 | #import "AppDelegate.h" 8 | 9 | @implementation AppDelegate 10 | 11 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions 12 | { 13 | self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; 14 | // Override point for customization after application launch. 15 | if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) { 16 | self.viewController = [[ViewController alloc] initWithNibName:@"ViewController_iPhone" bundle:nil]; 17 | } else { 18 | self.viewController = [[ViewController alloc] initWithNibName:@"ViewController_iPad" bundle:nil]; 19 | } 20 | self.window.rootViewController = self.viewController; 21 | [self.window makeKeyAndVisible]; 22 | return YES; 23 | } 24 | - (void)applicationWillResignActive:(UIApplication *)application 25 | { 26 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 27 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. 28 | } 29 | 30 | - (void)applicationDidEnterBackground:(UIApplication *)application 31 | { 32 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 33 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 34 | } 35 | 36 | - (void)applicationWillEnterForeground:(UIApplication *)application 37 | { 38 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. 39 | } 40 | 41 | - (void)applicationDidBecomeActive:(UIApplication *)application 42 | { 43 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 44 | } 45 | 46 | - (void)applicationWillTerminate:(UIApplication *)application 47 | { 48 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 49 | } 50 | 51 | @end 52 | -------------------------------------------------------------------------------- /DepthColorRegistration/src/Registration.h: -------------------------------------------------------------------------------- 1 | // 2 | // Registration.h 3 | // 4 | // Software License Agreement (BSD License) 5 | // 6 | // Copyright (c) 2014, Pat O'Keefe 7 | // All rights reserved. 8 | // 9 | // Redistribution and use in source and binary forms, with or without modification, are 10 | // permitted provided that the following conditions are met: 11 | // 12 | // 1. Redistributions of source code must retain the above copyright notice, this list of 13 | // conditions and the following disclaimer. 14 | // 2. Redistributions in binary form must reproduce the above copyright notice, this list 15 | // of conditions and the following disclaimer in the documentation and/or other materials 16 | // provided with the distribution. 17 | // 3. Neither the name of the copyright holder nor the names of its contributors may be 18 | // used to endorse or promote products derived from this software without specific prior 19 | // written permission. 20 | // 21 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY 22 | // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 23 | // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL 24 | // THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 26 | // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 27 | // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 28 | // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF 29 | // THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | // 31 | 32 | #if CV_VERSION_MAJOR == 3 33 | #include "opencv2/core.hpp" 34 | #else 35 | #include "opencv2/core/core.hpp" 36 | #endif 37 | 38 | namespace oc { 39 | 40 | 41 | /** Class for registering depth data to an external color camera 42 | * Registration is performed by creating a depth cloud, transforming the cloud by 43 | * the rigid body transformation between the cameras, and then projecting the 44 | * transformed points into the RGB camera. 45 | * 46 | * uv_rgb = K_rgb * [R | t] * z * inv(K_ir) * uv_ir 47 | */ 48 | class Registration { 49 | public: 50 | 51 | /** Constructor 52 | * @param unregisteredCameraMatrix the camera matrix of the depth camera 53 | * @param unregisteredDistCoeffs the distortion coefficients of the depth camera. NOTE: CURRENTLY UNUSED 54 | * @param registeredCameraMatrix the camera matrix of the external RGB camera 55 | * @param registeredDistCoeffs the distortion coefficients of the RGB camera 56 | * @param Rt the rigid body transform between the cameras. Used as seen above. 57 | */ 58 | Registration(const cv::Matx33f& unregisteredCameraMatrix, 59 | const cv::Vec& unregisteredDistCoeffs, 60 | const cv::Matx33f& registeredCameraMatrix, 61 | const cv::Vec& registeredDistCoeffs, 62 | const cv::Matx44f& Rt); 63 | 64 | /** Performs the registration 65 | * @param unregisteredDepthMillimeters the raw depth from the depth camera in millimieters 66 | * @param outputImagePlaneSize the dimensions of the registered image in pixels 67 | * @param registeredDepth the final registered depth in millimeters 68 | * @param depthDilation whether or not the depth is dilated to avoid holes and occlusion errors (optional) 69 | */ 70 | void registerDepthToColor(const cv::Mat_& unregisteredDepthMillimeters, 71 | const cv::Size& outputImagePlaneSize, 72 | cv::Mat_& registeredDepth, 73 | bool depthDilation=false); 74 | 75 | private: 76 | 77 | cv::Matx33f _unregisteredCameraMatrix; 78 | cv::Vec _unregisteredDistCoeffs; 79 | 80 | cv::Matx33f _registeredCameraMatrix; 81 | cv::Vec _registeredDistCoeffs; 82 | 83 | cv::Matx44f _rbtRgb2Depth, _projection; 84 | 85 | bool _has_distortion; 86 | }; 87 | 88 | } 89 | -------------------------------------------------------------------------------- /MobileRGBDOdometry/MobileOpenCVOdometry/ViewController_iPad.xib: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 1792 5 | 12E55 6 | 4488.1 7 | 1187.39 8 | 626.00 9 | 10 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 11 | 3715.3 12 | 13 | 14 | IBProxyObject 15 | IBUIView 16 | 17 | 18 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 19 | 20 | 21 | PluginDependencyRecalculationVersion 22 | 23 | 24 | 25 | 26 | IBFilesOwner 27 | IBIPadFramework 28 | 29 | 30 | IBFirstResponder 31 | IBIPadFramework 32 | 33 | 34 | 35 | 274 36 | {{0, 20}, {1024, 748}} 37 | 38 | 39 | 40 | 3 41 | MQA 42 | 43 | 2 44 | 45 | 46 | 47 | 2 48 | 49 | 50 | 3 51 | 3 52 | 53 | IBIPadFramework 54 | 55 | 56 | 57 | 58 | 59 | 60 | view 61 | 62 | 63 | 64 | 3 65 | 66 | 67 | 68 | 69 | 70 | 0 71 | 72 | 73 | 74 | 75 | 76 | -1 77 | 78 | 79 | File's Owner 80 | 81 | 82 | -2 83 | 84 | 85 | 86 | 87 | 2 88 | 89 | 90 | 91 | 92 | 93 | 94 | ViewController 95 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 96 | UIResponder 97 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 98 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 99 | 100 | 101 | 102 | 103 | 104 | 3 105 | 106 | 107 | 0 108 | IBIPadFramework 109 | YES 110 | 111 | com.apple.InterfaceBuilder.CocoaTouchPlugin.InterfaceBuilder3 112 | 113 | 114 | YES 115 | 3 116 | YES 117 | 3715.3 118 | 119 | 120 | -------------------------------------------------------------------------------- /DepthColorRegistration/src/main.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // main.cpp 3 | // 4 | // Software License Agreement (BSD License) 5 | // 6 | // Copyright (c) 2014, Pat O'Keefe 7 | // All rights reserved. 8 | // 9 | // Redistribution and use in source and binary forms, with or without modification, are 10 | // permitted provided that the following conditions are met: 11 | // 12 | // 1. Redistributions of source code must retain the above copyright notice, this list of 13 | // conditions and the following disclaimer. 14 | // 2. Redistributions in binary form must reproduce the above copyright notice, this list 15 | // of conditions and the following disclaimer in the documentation and/or other materials 16 | // provided with the distribution. 17 | // 3. Neither the name of the copyright holder nor the names of its contributors may be 18 | // used to endorse or promote products derived from this software without specific prior 19 | // written permission. 20 | // 21 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY 22 | // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 23 | // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL 24 | // THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 26 | // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 27 | // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 28 | // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF 29 | // THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | // 31 | 32 | 33 | #include "opencv2/highgui/highgui.hpp" 34 | #include "opencv2/imgproc/imgproc.hpp" 35 | 36 | #include 37 | 38 | #include "Registration.h" 39 | 40 | using namespace cv; 41 | using namespace std; 42 | 43 | 44 | int main(int argc, const char * argv[]) { 45 | 46 | //---------------------- 47 | // Open an OpenNI device 48 | //---------------------- 49 | 50 | //TODO: You'll want to open an RGB camera stream here too (the one with wich you wish to register the depth) 51 | 52 | cout << "Device opening ..." << endl; 53 | 54 | VideoCapture capture; 55 | capture.open( CAP_OPENNI ); 56 | 57 | if( !capture.isOpened() ) 58 | { 59 | cout << "Can not open a capture object." << endl; 60 | return -1; 61 | } 62 | 63 | // We don't want registration on, since we're going to do it ourselves. 64 | // Some devices with RGB cameras can perform registration on device 65 | bool modeRes=false; 66 | modeRes = capture.set( CAP_PROP_OPENNI_REGISTRATION, 0 ); 67 | 68 | if (!modeRes) { 69 | cout << "Can't disable registration. That's crazy!\n" << endl; 70 | return -1; 71 | } 72 | 73 | // Display the current configuration 74 | cout << "\nDepth generator output mode:" << endl << 75 | "FRAME_WIDTH " << capture.get( CAP_PROP_FRAME_WIDTH ) << endl << 76 | "FRAME_HEIGHT " << capture.get( CAP_PROP_FRAME_HEIGHT ) << endl << 77 | "FRAME_MAX_DEPTH " << capture.get( CAP_PROP_OPENNI_FRAME_MAX_DEPTH ) << " mm" << endl << 78 | "FPS " << capture.get( CAP_PROP_FPS ) << endl << 79 | "REGISTRATION " << capture.get( CAP_PROP_OPENNI_REGISTRATION ) << endl; 80 | 81 | 82 | //--------------------------------------- 83 | // Specify camera properties and geometry 84 | //-------------------------------------- 85 | 86 | //TODO: Fill in the values for your setup. 87 | 88 | // Depth camera intrinsics 89 | Matx33f unregisteredCameraMatrix = Matx33f::eye(); 90 | unregisteredCameraMatrix(0,0) = 570.0f; 91 | unregisteredCameraMatrix(1,1) = 570.0f; 92 | unregisteredCameraMatrix(0,2) = 320.0f-0.5f; 93 | unregisteredCameraMatrix(1,2) = 240.0f-0.5f; 94 | 95 | // NOTE: The depth distortion coefficients are currently not used by the Registration class. 96 | Vec unregisteredDistCoeffs(0,0,0,0,0); 97 | 98 | 99 | // RGB camera intrinsics 100 | Matx33f registeredCameraMatrix = Matx33f::eye(); 101 | registeredCameraMatrix(0,0) = 570.0f; 102 | registeredCameraMatrix(1,1) = 570.0f; 103 | registeredCameraMatrix(0,2) = 320.0f-0.5f; 104 | registeredCameraMatrix(1,2) = 240.0f-0.5f; 105 | 106 | Vec registeredDistCoeffs(0,0,0,0,0); 107 | 108 | Size2i registeredImagePlaneSize = Size2i(640, 480); 109 | 110 | // The rigid body transformation between cameras. 111 | // Used as: uv_rgb = K_rgb * [R | t] * z * inv(K_ir) * uv_ir 112 | Matx44f registrationRbt = Matx44f::eye(); 113 | registrationRbt(0,3) = .04; 114 | 115 | 116 | //------------------------------ 117 | // Create our registration class 118 | //------------------------------ 119 | oc::Registration registration(unregisteredCameraMatrix, 120 | unregisteredDistCoeffs, 121 | registeredCameraMatrix, 122 | registeredDistCoeffs, 123 | registrationRbt); 124 | 125 | for (;;) { 126 | 127 | Mat_ depthMap; 128 | 129 | if( !capture.grab() ) 130 | { 131 | cout << "Can't grab depth." << endl; 132 | return -1; 133 | } 134 | else 135 | { 136 | if( capture.retrieve( depthMap, CAP_OPENNI_DEPTH_MAP ) ) 137 | { 138 | 139 | // Actually perform the registration 140 | Mat_ registeredDepth; 141 | bool performDilation = false; 142 | registration.registerDepthToColor(depthMap, 143 | registeredImagePlaneSize, 144 | registeredDepth, 145 | performDilation); 146 | 147 | 148 | //Display the unregistered and registered depth 149 | const float scaleFactor = 0.05f; 150 | { 151 | Mat_ show; 152 | depthMap.convertTo( show, CV_8UC1, scaleFactor ); 153 | imshow( "depth map", show ); 154 | } 155 | { 156 | Mat_ show; 157 | registeredDepth.convertTo( show, CV_8UC1, scaleFactor ); 158 | imshow( "registered map", show ); 159 | } 160 | 161 | } 162 | 163 | } 164 | 165 | if( waitKey( 1 ) >= 0 ) 166 | break; 167 | } 168 | 169 | 170 | 171 | return 0; 172 | } 173 | -------------------------------------------------------------------------------- /MobileRGBDOdometry/MobileOpenCVOdometry/ViewController_iPhone.xib: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 1552 5 | 13A603 6 | 4514 7 | 1265 8 | 695.00 9 | 10 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 11 | 3747 12 | 13 | 14 | IBProxyObject 15 | IBUIView 16 | 17 | 18 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 19 | 20 | 21 | PluginDependencyRecalculationVersion 22 | 23 | 24 | 25 | 26 | IBFilesOwner 27 | IBCocoaTouchFramework 28 | 29 | 30 | IBFirstResponder 31 | IBCocoaTouchFramework 32 | 33 | 34 | 35 | 274 36 | 37 | {{0, 20}, {568, 300}} 38 | 39 | 40 | 41 | 42 | 3 43 | MC43NQA 44 | 45 | 2 46 | 47 | 48 | NO 49 | 50 | 51 | 3 52 | 3 53 | 54 | 55 | IBUIScreenMetrics 56 | 57 | YES 58 | 59 | 60 | 61 | 62 | 63 | {320, 568} 64 | {568, 320} 65 | 66 | 67 | IBCocoaTouchFramework 68 | Retina 4-inch Full Screen 69 | 2 70 | 71 | IBCocoaTouchFramework 72 | 73 | 74 | 75 | 76 | 77 | 78 | view 79 | 80 | 81 | 82 | 7 83 | 84 | 85 | 86 | 87 | 88 | 0 89 | 90 | 91 | 92 | 93 | 94 | -1 95 | 96 | 97 | File's Owner 98 | 99 | 100 | -2 101 | 102 | 103 | 104 | 105 | 6 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | ViewController 114 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 115 | UIResponder 116 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 117 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 118 | 119 | 120 | 121 | 122 | 123 | 12 124 | 125 | 126 | 127 | 128 | ViewController 129 | UIViewController 130 | 131 | IBProjectSource 132 | ./Classes/ViewController.h 133 | 134 | 135 | 136 | 137 | 0 138 | IBCocoaTouchFramework 139 | YES 140 | 141 | com.apple.InterfaceBuilder.CocoaTouchPlugin.iPhoneOS 142 | 143 | 144 | 145 | com.apple.InterfaceBuilder.CocoaTouchPlugin.InterfaceBuilder3 146 | 147 | 148 | YES 149 | 3 150 | YES 151 | 3747 152 | 153 | 154 | -------------------------------------------------------------------------------- /DepthColorRegistration/src/Registration.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Registration.cpp 3 | // 4 | // Software License Agreement (BSD License) 5 | // 6 | // Copyright (c) 2014, Pat O'Keefe 7 | // All rights reserved. 8 | // 9 | // Redistribution and use in source and binary forms, with or without modification, are 10 | // permitted provided that the following conditions are met: 11 | // 12 | // 1. Redistributions of source code must retain the above copyright notice, this list of 13 | // conditions and the following disclaimer. 14 | // 2. Redistributions in binary form must reproduce the above copyright notice, this list 15 | // of conditions and the following disclaimer in the documentation and/or other materials 16 | // provided with the distribution. 17 | // 3. Neither the name of the copyright holder nor the names of its contributors may be 18 | // used to endorse or promote products derived from this software without specific prior 19 | // written permission. 20 | // 21 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY 22 | // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 23 | // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL 24 | // THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 26 | // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 27 | // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 28 | // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF 29 | // THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | // 31 | 32 | 33 | #include 34 | #include 35 | #include "Registration.h" 36 | 37 | #if CV_VERSION_MAJOR == 3 38 | #include "opencv2/calib3d.hpp" 39 | #include "opencv2/rgbd.hpp" 40 | #else 41 | #include "opencv2/calib3d/calib3d.hpp" 42 | #include "opencv2/rgbd/rgbd.hpp" 43 | #endif 44 | 45 | using namespace cv; 46 | 47 | namespace oc { 48 | 49 | Registration::Registration(const cv::Matx33f& unregisteredCameraMatrix, 50 | const cv::Vec& unregisteredDistCoeffs, 51 | const cv::Matx33f& registeredCameraMatrix, 52 | const cv::Vec& registeredDistCoeffs, 53 | const cv::Matx44f& Rt) { 54 | 55 | _unregisteredCameraMatrix = unregisteredCameraMatrix; 56 | _unregisteredDistCoeffs = unregisteredDistCoeffs; 57 | 58 | _registeredCameraMatrix = registeredCameraMatrix; 59 | _registeredDistCoeffs = registeredDistCoeffs; 60 | 61 | _rbtRgb2Depth = Rt; 62 | 63 | // Figure out whether we'll have to apply a distortion 64 | _has_distortion = false; 65 | for(unsigned char i = 0; i < 5; ++i) 66 | _has_distortion |= (unregisteredDistCoeffs(i) != 0); 67 | 68 | // A point (i,j,1) will have to be converted to 3d first, by multiplying it by K.inv() 69 | // It will then be transformed by _rbtRgb2Depth 70 | cv::Matx44f K = cv::Matx44f::zeros(); 71 | for(unsigned char j = 0; j < 3; ++j) 72 | for(unsigned char i = 0; i < 3; ++i) 73 | K(j, i) = _unregisteredCameraMatrix(j, i); 74 | K(3, 3) = 1; 75 | 76 | if (_has_distortion) 77 | _projection = _rbtRgb2Depth * K.inv(); 78 | else { 79 | // In case there is no distortion, projecting it is just applying _registeredCameraMatrix 80 | _projection = cv::Matx44f::zeros(); 81 | for(unsigned char j = 0; j < 3; ++j) 82 | for(unsigned char i = 0; i < 3; ++i) 83 | _projection(j, i) = _registeredCameraMatrix(j, i); 84 | _projection(3, 3) = 1; 85 | _projection = _projection * _rbtRgb2Depth * K.inv(); 86 | } 87 | } 88 | 89 | void Registration::registerDepthToColor(const cv::Mat_& unregisteredDepthMillimeters, 90 | const cv::Size& outputImagePlaneSize, 91 | cv::Mat_& registeredDepth, 92 | bool depthDilation) 93 | { 94 | 95 | // Create out output Mat filled with an initial value of 0 96 | registeredDepth = cv::Mat_::zeros(outputImagePlaneSize); 97 | 98 | cv::Rect registeredDepthBounds(cv::Point(), registeredDepth.size()); 99 | 100 | Mat_ transformedCloud; 101 | { 102 | Mat_ point_tmp(outputImagePlaneSize); 103 | for(size_t j = 0; j < point_tmp.rows; ++j) { 104 | const uint16_t *depth = unregisteredDepthMillimeters[j]; 105 | 106 | cv::Point3f *point = point_tmp[j]; 107 | for(size_t i = 0; i < point_tmp.cols; ++i, ++depth, ++point) { 108 | float rescaled_depth = float(*depth) / 1000.0; 109 | point->x = i * rescaled_depth; 110 | point->y = j * rescaled_depth; 111 | point->z = rescaled_depth; 112 | } 113 | } 114 | 115 | perspectiveTransform(point_tmp, transformedCloud, _projection); 116 | } 117 | 118 | std::vector outputProjectedPoints(transformedCloud.cols); 119 | 120 | for( int y = 0; y < transformedCloud.rows; y++ ) 121 | { 122 | if (_has_distortion) { 123 | // Project an entire row of points. This has high overhead, so doing this for each point would be slow. 124 | // Doing this for the entire image at once would require more memory. 125 | projectPoints(transformedCloud.row(y), 126 | cv::Vec3f(0,0,0), 127 | cv::Vec3f(0,0,0), 128 | _registeredCameraMatrix, 129 | _registeredDistCoeffs, 130 | outputProjectedPoints); 131 | } else { 132 | // With no distortion, we can project the points right up 133 | cv::Point2f *point2d = &outputProjectedPoints[0], 134 | *point2d_end = point2d + outputProjectedPoints.size(); 135 | cv::Point3f *point3d = transformedCloud[y]; 136 | for( ; point2d < point2d_end; ++point2d, ++point3d ) { 137 | point2d->x = point3d->x / point3d->z; 138 | point2d->y = point3d->y / point3d->z; 139 | } 140 | } 141 | cv::Point2f *outputProjectedPoint = &outputProjectedPoints[0]; 142 | cv::Point3f *p = transformedCloud[y], *p_end = p + transformedCloud.cols; 143 | 144 | for( ; p < p_end; ++outputProjectedPoint, ++p ) 145 | { 146 | // Go back to millimeters, since that's what our output will be 147 | float cloudDepthMillimeters = 1e3*p->z; 148 | 149 | //Cast to integer pixel location 150 | Point2i projectedPixelLocation = *outputProjectedPoint; 151 | 152 | // Ensure that the projected point is actually contained in our output image 153 | if (!registeredDepthBounds.contains(projectedPixelLocation)) 154 | continue; 155 | 156 | uint16_t& outputDepthLocation = registeredDepth(projectedPixelLocation.y, projectedPixelLocation.x); 157 | 158 | 159 | // Occlusion check 160 | if ( outputDepthLocation == 0 || (outputDepthLocation > cloudDepthMillimeters) ) { 161 | outputDepthLocation = cloudDepthMillimeters; 162 | } 163 | 164 | 165 | // If desired, dilate this point to avoid holes in the final image 166 | if (depthDilation) { 167 | 168 | // Choosing to dilate in a 2x2 region, where the original projected location is in the bottom right of this 169 | // region. This is what's done on PrimeSense devices, but a more accurate scheme could be used. 170 | Point2i dilatedProjectedLocations[3] = {Point2i(projectedPixelLocation.x - 1, projectedPixelLocation.y ), 171 | Point2i(projectedPixelLocation.x , projectedPixelLocation.y - 1), 172 | Point2i(projectedPixelLocation.x - 1, projectedPixelLocation.y - 1)}; 173 | 174 | 175 | for (int i = 0; i < 3; i++) { 176 | 177 | Point2i& dilatedCoordinates = dilatedProjectedLocations[i]; 178 | 179 | if (!registeredDepthBounds.contains(dilatedCoordinates)) 180 | continue; 181 | 182 | uint16_t& outputDepthLocation = registeredDepth(dilatedCoordinates.y, dilatedCoordinates.x); 183 | 184 | // Occlusion check 185 | if ( outputDepthLocation == 0 || (outputDepthLocation > cloudDepthMillimeters) ) { 186 | outputDepthLocation = cloudDepthMillimeters; 187 | } 188 | 189 | } 190 | 191 | } // depthDilation 192 | 193 | } // iterate over a row 194 | } // iterate over the columns 195 | 196 | } 197 | 198 | 199 | } 200 | -------------------------------------------------------------------------------- /MobileRGBDOdometry/Structure.framework/Versions/A/Headers/Structure.h: -------------------------------------------------------------------------------- 1 | /* 2 | This file is part of the Structure SDK. 3 | Copyright © 2014 Occipital, Inc. All rights reserved. 4 | http://structure.io 5 | */ 6 | 7 | #pragma once 8 | 9 | #include 10 | #include 11 | #import 12 | #import 13 | 14 | #define ST_API __attribute__((visibility("default"))) 15 | 16 | //------------------------------------------------------------------------------ 17 | #pragma mark - Sensor Controller Types 18 | 19 | /// Sensor Initialization Status 20 | typedef NS_ENUM(NSInteger, STSensorControllerInitStatus) 21 | { 22 | STSensorControllerInitStatusSuccess = 0, 23 | STSensorControllerInitStatusAlreadyInitialized = 1, 24 | STSensorControllerInitStatusSensorNotFound = 2, 25 | STSensorControllerInitStatusSensorIsWakingUp = 3, 26 | STSensorControllerInitStatusOpenFailed = 4, 27 | }; 28 | 29 | /// Streaming Interruption Reason 30 | typedef NS_ENUM(NSInteger, STSensorControllerDidStopStreamingReason) 31 | { 32 | STSensorControllerDidStopStreamingReasonAppBackgrounded = 0 33 | }; 34 | 35 | /// Stream Configuration 36 | typedef NS_ENUM(NSInteger, StructureStreamConfig) 37 | { 38 | CONFIG_QVGA_DEPTH = 0, 39 | CONFIG_QVGA_REGISTERED_DEPTH, 40 | CONFIG_QVGA_DEPTH_AND_IR, 41 | CONFIG_QVGA_IR, 42 | CONFIG_VGA_DEPTH, 43 | CONFIG_VGA_IR, 44 | CONFIG_VGA_DEPTH_AND_IR, 45 | CONFIG_VGA_REGISTERED_DEPTH, 46 | CONFIG_QVGA_DEPTH_60_FPS, 47 | CONFIG_NUMS 48 | }; 49 | 50 | /// Frame Sync Configuration 51 | typedef NS_ENUM(NSInteger, FrameSyncConfig) 52 | { 53 | FRAME_SYNC_OFF = 0, //Default operation 54 | FRAME_SYNC_DEPTH_AND_RGB, 55 | FRAME_SYNC_IR_AND_RGB 56 | }; 57 | 58 | 59 | /// Frame 60 | typedef struct STFrame 61 | { 62 | uint16_t *data; 63 | double timestamp; 64 | int width; 65 | int height; 66 | } STFrame; 67 | 68 | /// Depth Frame 69 | typedef STFrame STDepthFrame; 70 | 71 | /// Infrared Frame 72 | typedef STFrame STIRFrame; 73 | 74 | 75 | /** Sensor Info 76 | 77 | @note 78 | This is an opaque type, for now. 79 | */ 80 | struct STSensorInfo; 81 | 82 | //------------------------------------------------------------------------------ 83 | # pragma mark - Sensor Controller Delegate 84 | 85 | # if !defined(__cplusplus) && !defined (HAS_LIBCXX) 86 | # error "Structure framework requires the C++ runtime. See Structure SDK Reference." 87 | # endif 88 | 89 | /** Sensor Controller Delegate 90 | 91 | The interface that your application-specific class must implement in order to receive sensor controller callbacks. 92 | 93 | @warning When creating a new application implementing a sensor controller delegate, the main `Info.plist` needs to contain an additional key "`Supported external accessory protocols`", with the following array of values: 94 | 95 | - `io.structure.control` 96 | - `io.structure.depth` 97 | - `io.structure.infrared` 98 | 99 | Without this modification to the plist, the app will not be able to connect to the sensor. All sample apps have this key/value array. 100 | 101 | See also <[STSensorController sharedController]> & <[STSensorController delegate]>. 102 | 103 | @note Delegate Registration Example 104 | 105 | [ STSensorController sharedController ].delegate = self; 106 | */ 107 | @protocol STSensorControllerDelegate 108 | 109 | /// @name Connection Status 110 | 111 | /// Notifies the delegate that the controller established a successful connection to the sensor. 112 | - (void)sensorDidConnect; 113 | 114 | /// Notifies the delegate that the sensor was disconnected from the controller. 115 | - (void)sensorDidDisconnect; 116 | 117 | /** Notifies the delegate that the sensor stopped streaming frames to the controller. 118 | 119 | @param reason The reason why the stream was stopped. See: STSensorControllerDidStopStreamingReason. 120 | */ 121 | - (void)sensorDidStopStreaming:(STSensorControllerDidStopStreamingReason)reason; 122 | 123 | /// @name Power Management 124 | - (void)sensorDidEnterLowPowerMode; 125 | - (void)sensorDidLeaveLowPowerMode; 126 | - (void)sensorBatteryNeedsCharging; // Will be called on main thread. 127 | 128 | @optional 129 | 130 | /// @name Colorless Frames 131 | 132 | /** Notifies the delegate that the sensor made a new depth frame available to the controller. 133 | 134 | @param depthFrame The new depth frame. 135 | */ 136 | - (void)sensorDidOutputDepthFrame:(STDepthFrame *)depthFrame; 137 | 138 | /** Notifies the delegate that the sensor made a new IR frame available to the controller. 139 | 140 | @param irFrame The new IR frame. 141 | */ 142 | - (void)sensorDidOutputIRFrame:(STIRFrame *)irFrame; 143 | 144 | /** @name Color-synchronized Frames 145 | 146 | Frame sync methods will only be used if setFrameSyncConfig: has been configured to sync frames. 147 | Also, Data will only be delivered if frameSyncNewColorImage: is called every time a new sample buffer is available. The 148 | driver needs the CMSampleBuffers in order to return them through these methods. 149 | */ 150 | 151 | /** Notifies the delegate that the sensor made a new pair of depth and color frames available to the controller. 152 | 153 | See also: 154 | 155 | - <[STSensorController setFrameSyncConfig:]> 156 | - <[STSensorController frameSyncNewColorImage:]> 157 | 158 | @param depthFrame The new depth frame 159 | @param sampleBuffer The new color buffer 160 | */ 161 | - (void)sensorDidOutputSynchronizedDepthFrame:(STDepthFrame *)depthFrame 162 | andColorFrame:(CMSampleBufferRef)sampleBuffer; 163 | 164 | /** Notifies the delegate that the sensor made a new pair of synchronized IR and color frames available to the controller. 165 | 166 | See also: 167 | 168 | - <[STSensorController setFrameSyncConfig:]> 169 | - <[STSensorController frameSyncNewColorImage:]> 170 | 171 | @param irFrame The new IR frame 172 | @param sampleBuffer The new color buffer 173 | */ 174 | - (void)sensorDidOutputSynchronizedIRFrame:(STIRFrame *)irFrame 175 | andColorFrame:(CMSampleBufferRef)sampleBuffer; 176 | 177 | @end 178 | 179 | //------------------------------------------------------------------------------ 180 | # pragma mark - Sensor Controller 181 | 182 | /** The sensor controller is the central point that manages all the interactions between the sensor and your application-specific delegate class. 183 | 184 | Its only instance is available through the sharedController method. 185 | 186 | Your custom delegate object can be registered using its delegate property. 187 | 188 | See also: 189 | 190 | - 191 | */ 192 | ST_API 193 | @interface STSensorController : NSObject 194 | 195 | /// @name Controller Setup 196 | 197 | /** 198 | The STSensorController singleton. 199 | 200 | Use it to register your application-specific STSensorControllerDelegate delegate. 201 | */ 202 | + (STSensorController *)sharedController; 203 | 204 | /** 205 | The STSensorControllerDelegate delegate. 206 | 207 | It will receive all notifications from the sensor, as well as raw stream data. 208 | */ 209 | @property(nonatomic, unsafe_unretained) id delegate; 210 | 211 | /** 212 | Attempt to connect to the Structure Sensor. 213 | 214 | @return Connection has succeeded only if the STSensorControllerInitStatus return value is either one of: 215 | 216 | - STSensorControllerInitStatusSuccess 217 | - STSensorControllerInitStatusAlreadyInitialized 218 | 219 | @note Many methods (including startStreamingWithConfig:) will have no effect until this method succeeds at initializing the sensor. 220 | */ 221 | - (STSensorControllerInitStatus)initializeSensorConnection; 222 | 223 | /** 224 | This will begin streaming data from the sensor and delivering data using the delegate methods 225 | 226 | @param config The stream configuration to use. See: StructureStreamConfig. 227 | */ 228 | - (void)startStreamingWithConfig:(StructureStreamConfig)config; 229 | 230 | /** 231 | Stop streaming data from the sensor. 232 | 233 | After this method is called, there may still be several pending frames delivered to the delegate. 234 | */ 235 | - (void)stopStreaming; 236 | 237 | /** Request that the driver should attempt to synchronize depth or IR frames with color frames from AVFoundation. 238 | 239 | When frame sync is active, one of the following methods is used in lieu of [STSensorControllerDelegate sensorDidOutputDepthFrame:], depending on the selected configuration: 240 | 241 | - [STSensorControllerDelegate sensorDidOutputSynchronizedDepthFrame:andColorFrame:] 242 | - [STSensorControllerDelegate sensorDidOutputSynchronizedIRFrame:andColorFrame:] 243 | 244 | You must then repeatedly call frameSyncNewColorImage: from the AVFoundation video capture delegate methods. Otherwise, the sensor controller delegate methods will never deliver any frames. This is simply because synchronized frames cannot be delivered if there are no color frames to synchronize. 245 | 246 | @param config When **not** equal to FRAME_SYNC_OFF, the driver will use the optional synchronized delegate methods to deliver frames. See: FrameSyncConfig. 247 | 248 | @note Frame sync of depth+IR+RGB and 60 FPS depth are not currently supported. 249 | @note For frame sync to be effective, the AVCaptureDevice must be configured to have a min and max framerate of 30fps. 250 | */ 251 | - (void)setFrameSyncConfig:(FrameSyncConfig)config; 252 | 253 | /** Give the driver a color frame that will be used to synchronize shutters between the iOS camera and the IR camera. 254 | 255 | When receiving the CMSampleBufferRef from AVFoundation, you should only call this one method and do no other processing. 256 | When a synchronized frame is found, one of the optional synchronized STSensorController delegate methods will be called, at which point you can then process/render the sampleBuffer. 257 | */ 258 | - (void)frameSyncNewColorImage:(CMSampleBufferRef)sampleBuffer; 259 | 260 | /// @name Sensor Status 261 | 262 | /// Checks whether the controlled sensor is connected. 263 | - (BOOL)isConnected; 264 | 265 | /// Checks whether the controlled sensor is in low-power mode. 266 | - (BOOL)isLowPower; 267 | 268 | /// Returns an integer in 0..100 representing the battery charge. 269 | - (int)getBatteryChargePercentage; 270 | 271 | /// @name Sensor Information 272 | 273 | /// Returns the name of the controlled sensor. 274 | - (NSString *)getName; 275 | 276 | /// Returns the serial number of the controlled sensor. 277 | - (NSString *)getSerialNumber; 278 | 279 | /// Returns the firmware revision of the controlled sensor. 280 | - (NSString *)getFirmwareRevision; 281 | 282 | /// Returns the hardware revision of the controlled sensor. 283 | - (NSString *)getHardwareRevision; 284 | 285 | /** Returns the controlled sensor info as a pointer to an opaque type. 286 | 287 | See also: 288 | 289 | - [STScene initWithContext:frameBufferSize:sensorInfo:freeGLTextureUnit:] 290 | - [STDepthToRgba initWithSensorInfo:] 291 | - [STCubePlacementInitializer initWithCameraInfo:volumeSizeInMeters:] 292 | */ 293 | - (struct STSensorInfo *)getSensorInfo:(StructureStreamConfig)config; 294 | 295 | /// @name Advanced Setup 296 | 297 | /** Enable or disable an optional dilation of depth values that has the effect of filling holes. 298 | 299 | If the streaming mode is changed with startStreamingWithConfig:, this method will need to be called again for it to take effect. 300 | 301 | @param enabled Whether hole filtering is enabled. 302 | 303 | @note The hole filter is enabled by default. 304 | */ 305 | - (void)setHoleFilterEnabled:(BOOL)enabled; 306 | 307 | /** Enable or disable high sensor gain. 308 | 309 | @param enabled When set to YES, the sensor gain will be increased, causing better performance in dark or far away objects at the expense of some bright nearby objects. 310 | 311 | @note High gain is disabled by default. 312 | */ 313 | - (void)setHighGain:(BOOL)enabled; 314 | 315 | /** 316 | Specify a new rigid body transformation between the iOS camera and IR camera. 317 | 318 | Since each device will have a slightly different RBT, this will improve the quality of registered depth. 319 | A stream stop and restart with registration will be required for this to take effect. 320 | The RBT represents the world motion of the IR camera w.r.t. the RGB camera. The coordinate frame is right handed: X right, Y down, Z out. 321 | 322 | @param newRbt This parameter is expected as a pointer to 16 floating point values in _column_ major order. This is the default ordering of Eigen. 323 | 324 | @note Currently the intrinsics assumed in the registration are fixed and set as follows: 325 | 326 | K_RGB_QVGA = [305.73, 0, 159.69; 0, 305.62, 119.86; 0, 0, 1] 327 | K_RGB_DISTORTION = [0.2073, -0.5398, 0, 0, 0] --> k1 k2 p1 p2 k3 328 | K_IR_QVGA = [288.28, 0, 159.26; 0, 288.24, 120.47; 0, 0, 1] 329 | K_IR_DISTORTION = [0, 0, 0, 0, 0] --> k1 k2 p1 p2 k3 330 | 331 | @note The following is an example call of this method using the Eigen C++ library (not required). 332 | Eigen is already column major, so we can just take the address of an Isometry3f, which is internally represented by 16 floats. 333 | 334 | - (void) updateRegistration 335 | { 336 | [ [STSensorController sharedController] stopStreaming ]; 337 | 338 | Eigen::Isometry3f sampleIsometry = Eigen::Isometry3f::Identity(); 339 | Eigen::Vector3f translation = Eigen::Vector3f(0.034, 0, 0.017); 340 | 341 | sampleIsometry.translate(translation); 342 | sampleIsometry.rotate((Eigen::Matrix3f() << 0.99977, -0.0210634, -0.00412405, 343 | 0.0210795, 0.99977, 0.00391278, 344 | 0.00404069, -0.00399881, 0.999984).finished()); 345 | 346 | [ [STSensorController sharedController] setRegistrationRBT: (float*) &sampleIsometry ]; 347 | 348 | [ [STSensorController sharedController] startStreamingWithConfig: CONFIG_QVGA_REGISTERED_DEPTH ]; 349 | } 350 | */ 351 | - (void)setRegistrationRBT:(float *)newRbt; 352 | 353 | @end 354 | 355 | //------------------------------------------------------------------------------ 356 | # pragma mark - STFloatDepthFrame 357 | 358 | /** 359 | Processed depth image with float pixels values in meters. 360 | 361 | Raw STDepthFrame objects output by Structure Sensor have 16 bits integers pixels holding internal shift values. STFloatDepthFrame transforms this data into metric float values. 362 | */ 363 | ST_API 364 | @interface STFloatDepthFrame : NSObject 365 | 366 | /** Image width */ 367 | @property (readonly, nonatomic) int width; 368 | 369 | /** Image height */ 370 | @property (readonly, nonatomic) int height; 371 | 372 | /** capture timestamp in seconds */ 373 | @property (readonly, nonatomic) double timestamp; 374 | 375 | /** Pointer to the beginning of a contiguous chunk of width*height depth pixel values, in millimeters. */ 376 | @property (readonly, nonatomic) const float *depthAsMillimeters; 377 | 378 | /** Recompute metric values from raw Structure depth frame */ 379 | - (void)updateFromDepthFrame:(STDepthFrame *)depthFrame; 380 | 381 | @end 382 | 383 | //------------------------------------------------------------------------------ 384 | # pragma mark - STWirelessLog 385 | 386 | /** 387 | Wireless logging utility. 388 | 389 | This class gives the ability to wirelessly send log messages to a remote console. 390 | 391 | It is very useful when the sensor is occupying the lightning port. 392 | */ 393 | ST_API 394 | @interface STWirelessLog : NSObject 395 | 396 | /** Redirects the standard and error outputs to a TCP connection. 397 | 398 | Messages sent to the stdout and stderr (such as `NSLog`, `std::cout`, `std::cerr`, `printf`) will be sent to the given IPv4 address on the specified port. 399 | 400 | In order to receive these messages on a remote machine, you can, for instance, use the *netcat* command-line utility (available by default on Mac OS X). Simply run in a terminal: `nc -lk ` 401 | 402 | @note If the connection fails, the returned error will be non-`nil` and no output will be transmitted. 403 | */ 404 | + (void)broadcastLogsToWirelessConsoleAtAddress:(NSString *)ipv4Address usingPort:(int)port error:(NSError **)error ; 405 | 406 | @end 407 | 408 | -------------------------------------------------------------------------------- /MobileRGBDOdometry/Structure.framework/Versions/A/Headers/StructureSLAM.h: -------------------------------------------------------------------------------- 1 | /* 2 | This file is part of the Structure SDK. 3 | Copyright © 2014 Occipital, Inc. All rights reserved. 4 | http://structure.io 5 | */ 6 | 7 | #pragma once 8 | 9 | #import 10 | #import 11 | #import 12 | 13 | //------------------------------------------------------------------------------ 14 | #pragma mark - STMesh 15 | 16 | /** Reference to face-vertex triangle mesh data. 17 | 18 | Stores mesh data as a collection of vertices and faces. STMesh objects are references, and access to the underlying data should be protected by locks in case multiple threads may be accessing it. 19 | 20 | Since OpenGL ES only supports 16 bits unsigned short for face indices, meshes larges than 65535 faces have to be split into smaller submeshes. STMesh is therefore a reference to a collection of partial meshes, each of them having less than 65k faces. 21 | */ 22 | ST_API 23 | @interface STMesh : NSObject 24 | 25 | /// Number of partial meshes. 26 | - (int)numberOfMeshes; 27 | 28 | /// Number of faces of a given submesh. 29 | - (int)numberOfMeshFaces:(int)meshIndex; 30 | 31 | /// Number of lines of a given submesh. 32 | - (int)numberOfMeshLines:(int)meshIndex; 33 | 34 | /// Number of vertices of a given submesh. 35 | - (int)numberOfMeshVertices:(int)meshIndex; 36 | 37 | /** Pointer to a contigous chunk of `numberOfMeshVertices:meshIndex` `GLKVector3` values representing per-vertex normals (nx, ny, nz). 38 | 39 | @return This method returns NULL is there are no normals. 40 | */ 41 | - (GLKVector3 *)meshNormals:(int)meshIndex; 42 | 43 | /// Pointer to a contigous chunk of `numberOfMeshVertices:meshIndex` `GLKVector3` values representing vertices coordinates (x, y, z). 44 | - (GLKVector3 *)meshVertices:(int)meshIndex; 45 | 46 | /// Pointer to a contigous chunk of `(3 * numberOfMeshFaces:meshIndex)` 16 bits `unsigned short` values representing vertex indices. Each face is represented by three vertex indices. 47 | - (unsigned short *)meshFaces:(int)meshIndex; 48 | 49 | /** STMesh can also return a polygon mesh as a sequence of lines. 50 | 51 | @return This method returns a pointer to a contigous chunk of `(2 * numberOfMeshLines:meshIndex)` 16 bits `unsigned short` values representing vertex indices. Each line is represented by two vertex indices. 52 | */ 53 | - (unsigned short *)meshLines:(int)meshIndex; 54 | 55 | /// Save the mesh as a Wavefront OBJ file. 56 | - (void)writeToObjFile:(NSString *)fileName; 57 | 58 | /// Create a copy of the current mesh 59 | - (id)initWithMesh:(STMesh*)mesh; 60 | 61 | /** Create a decimated (simplified) mesh from the current mesh with a target number of faces. `numFaces` can range from 1 to 65535. If target number of faces is larger than the current mesh number of faces, no processing is done. 62 | 63 | @return This method returns a new decimated (simplified) mesh. `nil` is returned on error. 64 | */ 65 | - (STMesh *)meshFromDecimation:(unsigned int)numFaces error:(NSError **)error; 66 | 67 | @end 68 | 69 | //------------------------------------------------------------------------------ 70 | # pragma mark - STScene 71 | 72 | /** Common data shared and updated by the SLAM pipeline. 73 | 74 | An STScene object regroup information about the camera and the reconstructed mesh. 75 | SLAM objects will be updating the scene, potentially using background threads. 76 | As a result, special care should be taken when accessing STScene members if an STTracker or STMapper is still active. 77 | In particular, STMesh objects should be propertly locked. 78 | */ 79 | ST_API 80 | @interface STScene : NSObject 81 | 82 | /** Mandatory initializer for STScene. 83 | 84 | @param glContext a valid EAGLContext. 85 | @param frameBufferSize size of the active view framebuffer. 86 | @param sensorInfo Structure Sensor information. 87 | @param freeGLTextureUnit a GL_TEXTUREX unit which will be used when SLAM objects need to render meshes to an OpenGL texture. 88 | */ 89 | - (id) initWithContext:(EAGLContext *)glContext 90 | frameBufferSize:(CGSize)frameBufferSize 91 | sensorInfo:(struct STSensorInfo *)sensorInfo 92 | freeGLTextureUnit:(GLenum)textureUnit; 93 | 94 | /** Reference to the current scene mesh. 95 | 96 | This mesh may be modified by a background thread if an instance of STMapper is running, so proper locking is necessary. 97 | */ 98 | - (STMesh *)lockAndGetSceneMesh; 99 | 100 | /// Unlock the mesh 101 | - (void) unlockSceneMesh; 102 | 103 | /** OpenGL projection matrix representing a Structure Sensor virtual camera. 104 | 105 | This matrix can be used to render a scene by simulating the same camera properties as the Structure Sensor depth camera. 106 | */ 107 | - (GLKMatrix4)depthCameraGLProjectionMatrix; 108 | 109 | /** Render the scene mesh from the given viewpoint. 110 | 111 | A virtual camera with the given projection and pose matrices will be used to render the mesh using OpenGL. This method is generally faster than using sceneMeshRef and manually rendering it, since in most cases STScene can reuse mesh data previously uploaded to the GPU. 112 | */ 113 | - (void)renderMeshFromViewpoint:(GLKMatrix4)cameraPose 114 | cameraGLProjection:(GLKMatrix4)gLProjection 115 | alpha:(float)alpha; 116 | 117 | /// Clear the scene mesh and state. 118 | - (void)clear; 119 | 120 | @end 121 | 122 | //------------------------------------------------------------------------------ 123 | # pragma mark - STTracker 124 | 125 | /** Track the 3D position of the Structure Sensor. 126 | 127 | STTracker uses sensor information and optionally IMU data to estimate how the camera is being moved over time, in real-time. 128 | */ 129 | ST_API 130 | @interface STTracker : NSObject 131 | 132 | /// STScene object storing common SLAM information. 133 | @property (nonatomic, retain) STScene *scene; 134 | 135 | /// Recommended initializer since STTracker cannot be used until an STScene has been provided. 136 | - (id)initWithScene:(STScene *)scene; 137 | 138 | /// Reset the tracker to its initial state. 139 | - (void)reset; 140 | 141 | /// Set the current camera pose. Tracking will take this as the initial pose. 142 | - (void)setCameraPose:(GLKMatrix4)cameraPose; 143 | 144 | /** Update the camera pose estimate using the given depth frame. 145 | 146 | Returns true if success, false otherwise. 147 | */ 148 | - (BOOL)updateCameraPoseFromDepth:(STFloatDepthFrame *)depthFrame; 149 | 150 | /// Update the current pose estimates using the provided motion data. 151 | - (void)updateCameraPoseFromMotion:(CMDeviceMotion *)motionData; 152 | 153 | /// Return the most recent camera pose estimate. 154 | - (GLKMatrix4)lastCameraPose; 155 | 156 | // The better mode with integration of timestamp 157 | - (GLKMatrix4) lastCameraPoseWtihTimestamp:(double)timestamp; 158 | 159 | /** Tracking Mode adjustement. 160 | 161 | STTracker can run in two different modes. STTrackingModeAccurate is best during scanning, but it 162 | will also take more CPU resources. STTrackingModeFast is designed for very fast tracking, and works 163 | best when tracking against static mesh, for example after a scan has already been done. 164 | */ 165 | enum STTrackingMode { 166 | STTrackingModeAccurate = 0, // best for scanning, but uses more CPU. This is the default. 167 | STTrackingModeFast, // will use less CPU, this mode is best for 30 FPS tracking against a fixed mesh 168 | }; 169 | 170 | - (void)setTrackingMode:(STTrackingMode)mode; 171 | 172 | @end 173 | 174 | //------------------------------------------------------------------------------ 175 | # pragma mark - STMapper 176 | 177 | /** Integrate sensor data to reconstruct a 3D model of a scene. 178 | 179 | STMapper will update the scene mesh progressively as new depth frames are fed. 180 | It works in a background thread, which means that it may update the STScene object at any time. 181 | You need to call the blocking stop method to make sure mapping has fully stopped. 182 | 183 | The mapper works over a fixed cuboid defining the volume of interest in the scene. 184 | This volume can be initialized interactively using STCubePlacementInitializer. 185 | 186 | The volume is defined by its size in the real world, in meters, and is discretized into cells. 187 | The volume resolution specifies the number of cells. As a consequence, the maximal level of detail which can be obtained by STMapper is roughly determined by volumeSizeInMeters / volumeResolution. 188 | In short, the bigger the volume size, the higher the resolution has to be to keep the same level of details. 189 | */ 190 | ST_API 191 | @interface STMapper : NSObject 192 | 193 | /// The STScene model which will be updated. 194 | @property (nonatomic, retain) STScene *scene; 195 | 196 | /// The rectangular cuboid size in meters. 197 | @property (nonatomic) GLKVector3 volumeSizeInMeters; 198 | 199 | /** Number of cells for each dimension. 200 | 201 | To keep the level of details isotropic, it is recommended to use a similar same aspect ratio as volumeSizeInMeters. 202 | To keep mapping real-time, the recommended value is 128x128x128. 203 | 204 | @note The volume resolution cannot be changed after initialization. 205 | */ 206 | @property (nonatomic, readonly) GLKVector3 volumeResolution; 207 | 208 | /// Initialize with a given scene and volume resolution. 209 | - (id)initWithScene:(STScene *)scene 210 | volumeResolution:(GLKVector3)volumeResolution; 211 | 212 | /** Specify whether the volume cuboid has been initialized on top of a support plane. 213 | 214 | If the mapper is aware that the volume in on top of a support plane, it will adapt the pipeline to be more robust. 215 | */ 216 | - (void)setHasSupportPlane:(BOOL)hasIt; 217 | 218 | /// Stop any processing which may still be happening in background threads. 219 | - (void)stop; 220 | 221 | /// Reset the mapper state. 222 | - (void)reset; 223 | 224 | /// Integrate a new depth frame to the model. 225 | - (void)integrateDepthFrame:(STFloatDepthFrame *)depthFrame 226 | cameraPose:(GLKMatrix4)cameraPose; 227 | 228 | @end 229 | 230 | //------------------------------------------------------------------------------ 231 | # pragma mark - STCubePlacementInitializer 232 | 233 | /** Automatically and interactively place a cubic volume of interest in the scene. 234 | 235 | This class uses an heuristic to help a user select the volume of interest to be scanned in a scene. 236 | If it can determines a supporting table, e.g. if an object is on a table or lying on the floor, then it will align the base of the cuboid with the plane. 237 | Otherwise it will initialize the cube around the depth of the central area of the depth image. 238 | */ 239 | ST_API 240 | @interface STCubePlacementInitializer : NSObject 241 | 242 | /// Structure Sensor information. Required. 243 | @property (nonatomic) STSensorInfo *cameraInfo; 244 | 245 | /// Width, height and depth of the volume cuboid. 246 | @property (nonatomic) GLKVector3 volumeSizeInMeters; 247 | 248 | /// Most recent estimated cube 3D pose, taking Structure Sensor as a reference. 249 | @property (nonatomic, readonly) GLKMatrix4 cubePose; 250 | 251 | /// Whether the last cube placement was made with a supporting plane. Useful for STMapper. 252 | @property (nonatomic, readonly) BOOL hasSupportPlane; 253 | 254 | /// Initialize with all the required fields. 255 | - (id)initWithCameraInfo:(STSensorInfo *)cameraInfo 256 | volumeSizeInMeters:(GLKVector3)volumeSize; 257 | 258 | /// Update the current pose estimate from a depth frame and a CoreMotion gravity vector. 259 | - (void)updateCubePose:(STFloatDepthFrame *)frame 260 | gravity:(GLKVector3)gravity; 261 | 262 | @end 263 | 264 | //------------------------------------------------------------------------------ 265 | # pragma mark - STCubeRenderer 266 | 267 | /** Helper class to render a cuboid. 268 | 269 | STCubeRenderer can render a wireframe outline of a cube, and also highlight the part of scene which fits in the given cube. 270 | This can be used to better visualize where the current cube is located. 271 | */ 272 | 273 | ST_API 274 | @interface STCubeRenderer : NSObject 275 | 276 | /// The global SLAM scene. 277 | @property (nonatomic, retain) STScene *scene; 278 | 279 | /// Initialize with required properties. 280 | - (id)initWithScene:(STScene *)scene; 281 | 282 | /// A depth frame is required to use renderHighlightedDepth. 283 | - (void)setDepthFrame:(STFloatDepthFrame *)depthFrame; 284 | 285 | /// Whether the cube has a support plane. Rendering will be adjusted in that case. 286 | - (void)setCubeHasSupportPlane:(BOOL)hasSupportPlane; 287 | 288 | /// Specify the cube size and the volume resolution in cells. 289 | - (void)adjustCubeSize:(const GLKVector3)sizeInMeters 290 | volumeResolution:(const GLKVector3)resolution; 291 | 292 | /// Highlight the depth frame area which fits inside the cube. 293 | - (void)renderHighlightedDepth:(GLKMatrix4)cubePose; 294 | 295 | /// Render the cube wireframe outline at the given pose. 296 | - (void)renderCubeOutline:(GLKMatrix4)cubePose 297 | depthTest:(BOOL)useDepthTest; 298 | 299 | @end 300 | 301 | //------------------------------------------------------------------------------ 302 | # pragma mark - STNormalFrame 303 | 304 | /** Processed normal frame with normal vector in each pixel. 305 | 306 | Output class from STNormalEstimatior. 307 | */ 308 | ST_API 309 | @interface STNormalFrame : NSObject 310 | 311 | /// Image width. 312 | @property (readonly, nonatomic) int width; 313 | 314 | /// Image height. 315 | @property (readonly, nonatomic) int height; 316 | 317 | /// Pointer to the beginning of a contiguous chunk of (`width` * `height`) normal pixel values. 318 | @property (readonly, nonatomic) const GLKVector3 *normals; 319 | 320 | @end 321 | 322 | //------------------------------------------------------------------------------ 323 | # pragma mark - STNormalEstimator 324 | 325 | /** Helper class to estimate surface normal. 326 | 327 | STNormalEstimator calculates a unit vector representing the surface normal for each depth pixel. 328 | */ 329 | 330 | ST_API 331 | @interface STNormalEstimator : NSObject 332 | 333 | /// Init with required STSensorInfo data. 334 | - (id)initWithSensorInfo:(STSensorInfo *)sensorInfo; 335 | 336 | /// Calculates normals with a depth frame. 337 | - (STNormalFrame *)calculateNormalsWithProcessedFrame:(STFloatDepthFrame *)floatDepthFrame; 338 | 339 | @end 340 | 341 | //------------------------------------------------------------------------------ 342 | # pragma mark - STGLTexture 343 | 344 | /** Helper class to manipulation OpenGL textures. 345 | 346 | This class makes it easier to initialize a GL texture. 347 | It will use texture cache if the program is running on device, and regular GL textures on simulator. 348 | */ 349 | ST_API 350 | @interface STGLTexture : NSObject 351 | 352 | /// OpenGL id of the texture. 353 | @property (nonatomic, readonly) GLint glId; 354 | 355 | /// Whether initWithContext or createWithContext was already called. 356 | @property (nonatomic, readonly) BOOL isInitialized; 357 | 358 | /// Initialize the texture and immediatly call createWithContext. 359 | - (id)initWithContext:(EAGLContext *)context 360 | width:(int)width 361 | height:(int)height 362 | sizeBytes:(int)sizeBytes 363 | glFormat:(GLenum)format 364 | glType:(GLenum)type; 365 | 366 | /// Initialize the underlying GL texture. 367 | - (void)createWithContext:(EAGLContext *)context 368 | width:(int)width 369 | height:(int)height 370 | sizeBytes:(int)sizeBytes 371 | glFormat:(GLenum)format 372 | glType:(GLenum)type; 373 | 374 | /// Upload the texture data to GPU. 375 | - (void)uploadData:(uint8_t *)data; 376 | 377 | /// Bind the texture. Equivalent to `glBindTexture(glId)` in simulator, but also support texture cache. 378 | - (void)bind; 379 | 380 | @end 381 | 382 | //------------------------------------------------------------------------------ 383 | # pragma mark - STGLTextureShaderRGBA 384 | 385 | /// Helper class to render a flat 2D texture with OpenGL ES. 386 | ST_API 387 | @interface STGLTextureShaderRGBA : NSObject 388 | 389 | /// Enable the underlying shader program. 390 | - (void)useShaderProgram; 391 | 392 | /// Render the texture on a fullscreen quad using the given GL_TEXTUREX unit. 393 | - (void)renderTextureWithAlpha:(float)alpha 394 | textureUnit:(GLint)textureUnit; 395 | 396 | @end 397 | 398 | //------------------------------------------------------------------------------ 399 | # pragma mark - STDepthToRgba 400 | 401 | /// Helper class to convert float depth data to RGB values for better visualization. 402 | ST_API 403 | @interface STDepthToRgba : NSObject 404 | 405 | /// Pointer to the RGBA values. 406 | @property (nonatomic, readonly) uint8_t *rgbaBuffer; 407 | 408 | /// Init with required STSensorInfo data. 409 | - (id)initWithSensorInfo:(STSensorInfo *)sensorInfo; 410 | 411 | /// Convert the given depth frame to RGBA. 412 | - (uint8_t *)convertDepthToRgba:(STFloatDepthFrame *)frame; 413 | 414 | @end 415 | -------------------------------------------------------------------------------- /MobileRGBDOdometry/MobileOpenCVOdometry.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 46; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 01FAFAF21964A14700BF040D /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 01FAFAF11964A14700BF040D /* QuartzCore.framework */; }; 11 | 431C6F5318455ABD00EDB38B /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 431C6F5218455ABD00EDB38B /* AVFoundation.framework */; }; 12 | 435F22551968CCD600731151 /* Structure.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 435F22541968CCD600731151 /* Structure.framework */; }; 13 | 435F22571968D35900731151 /* opencv2.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 435F22561968D35900731151 /* opencv2.framework */; }; 14 | 6F1239081862A59D00BD1D7A /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 6F1239071862A59D00BD1D7A /* Accelerate.framework */; }; 15 | 6F12390A1862A5D300BD1D7A /* ImageIO.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 6F1239091862A5D300BD1D7A /* ImageIO.framework */; }; 16 | 6F2B4EDB1865324D00403B8C /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 6F7C0CC217F0EA0500692EC1 /* CoreGraphics.framework */; }; 17 | 6F2B4EDD1865325D00403B8C /* OpenGLES.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 6F2B4EDC1865325D00403B8C /* OpenGLES.framework */; }; 18 | 6F2B4EDF1865326B00403B8C /* CoreImage.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 6F2B4EDE1865326B00403B8C /* CoreImage.framework */; }; 19 | 6F2B4EE1186532A500403B8C /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 6F2B4EE0186532A500403B8C /* CoreMedia.framework */; }; 20 | 6F2B4EE3186532CE00403B8C /* CoreVideo.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 6F2B4EE2186532CE00403B8C /* CoreVideo.framework */; }; 21 | 6F7C0CC117F0EA0500692EC1 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 6F7C0CC017F0EA0500692EC1 /* Foundation.framework */; }; 22 | 6F7C0CC517F0EA0500692EC1 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 6F7C0CC417F0EA0500692EC1 /* UIKit.framework */; }; 23 | 6F7C0CC717F0EA0500692EC1 /* ExternalAccessory.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 6F7C0CC617F0EA0500692EC1 /* ExternalAccessory.framework */; }; 24 | 6F7C0CCD17F0EA0500692EC1 /* InfoPlist.strings in Resources */ = {isa = PBXBuildFile; fileRef = 6F7C0CCB17F0EA0500692EC1 /* InfoPlist.strings */; }; 25 | 6F7C0CCF17F0EA0500692EC1 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 6F7C0CCE17F0EA0500692EC1 /* main.m */; }; 26 | 6F7C0CD317F0EA0500692EC1 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 6F7C0CD217F0EA0500692EC1 /* AppDelegate.m */; }; 27 | 6F7C0CD717F0EA0500692EC1 /* ViewController_iPhone.xib in Resources */ = {isa = PBXBuildFile; fileRef = 6F7C0CD617F0EA0500692EC1 /* ViewController_iPhone.xib */; }; 28 | 6F7C0CD917F0EA0500692EC1 /* ViewController_iPad.xib in Resources */ = {isa = PBXBuildFile; fileRef = 6F7C0CD817F0EA0500692EC1 /* ViewController_iPad.xib */; }; 29 | 6F7C0CDB17F0EA0500692EC1 /* ViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 6F7C0CDA17F0EA0500692EC1 /* ViewController.mm */; }; 30 | 6F7C0CDE17F0EA0500692EC1 /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 6F7C0CDD17F0EA0500692EC1 /* Images.xcassets */; }; 31 | /* End PBXBuildFile section */ 32 | 33 | /* Begin PBXFileReference section */ 34 | 01FAFAF11964A14700BF040D /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = System/Library/Frameworks/QuartzCore.framework; sourceTree = SDKROOT; }; 35 | 431C6F5218455ABD00EDB38B /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; 36 | 435F22541968CCD600731151 /* Structure.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = Structure.framework; sourceTree = ""; }; 37 | 435F22561968D35900731151 /* opencv2.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = opencv2.framework; sourceTree = ""; }; 38 | 6F1239071862A59D00BD1D7A /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; }; 39 | 6F1239091862A5D300BD1D7A /* ImageIO.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = ImageIO.framework; path = System/Library/Frameworks/ImageIO.framework; sourceTree = SDKROOT; }; 40 | 6F2B4EDC1865325D00403B8C /* OpenGLES.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = OpenGLES.framework; path = System/Library/Frameworks/OpenGLES.framework; sourceTree = SDKROOT; }; 41 | 6F2B4EDE1865326B00403B8C /* CoreImage.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreImage.framework; path = System/Library/Frameworks/CoreImage.framework; sourceTree = SDKROOT; }; 42 | 6F2B4EE0186532A500403B8C /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; }; 43 | 6F2B4EE2186532CE00403B8C /* CoreVideo.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreVideo.framework; path = System/Library/Frameworks/CoreVideo.framework; sourceTree = SDKROOT; }; 44 | 6F7C0CBD17F0EA0500692EC1 /* MobileOpenCVOdometry.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = MobileOpenCVOdometry.app; sourceTree = BUILT_PRODUCTS_DIR; }; 45 | 6F7C0CC017F0EA0500692EC1 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = System/Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; }; 46 | 6F7C0CC217F0EA0500692EC1 /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; }; 47 | 6F7C0CC417F0EA0500692EC1 /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; }; 48 | 6F7C0CC617F0EA0500692EC1 /* ExternalAccessory.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = ExternalAccessory.framework; path = System/Library/Frameworks/ExternalAccessory.framework; sourceTree = SDKROOT; }; 49 | 6F7C0CCA17F0EA0500692EC1 /* MobileOpenCVOdometry-Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = "MobileOpenCVOdometry-Info.plist"; sourceTree = ""; }; 50 | 6F7C0CCC17F0EA0500692EC1 /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/InfoPlist.strings; sourceTree = ""; }; 51 | 6F7C0CCE17F0EA0500692EC1 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 52 | 6F7C0CD017F0EA0500692EC1 /* MobileOpenCVOdometry-Prefix.pch */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "MobileOpenCVOdometry-Prefix.pch"; sourceTree = ""; }; 53 | 6F7C0CD117F0EA0500692EC1 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 54 | 6F7C0CD217F0EA0500692EC1 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 55 | 6F7C0CD617F0EA0500692EC1 /* ViewController_iPhone.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; path = ViewController_iPhone.xib; sourceTree = ""; }; 56 | 6F7C0CD817F0EA0500692EC1 /* ViewController_iPad.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; path = ViewController_iPad.xib; sourceTree = ""; }; 57 | 6F7C0CDA17F0EA0500692EC1 /* ViewController.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = ViewController.mm; sourceTree = ""; }; 58 | 6F7C0CDC17F0EA0500692EC1 /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; }; 59 | 6F7C0CDD17F0EA0500692EC1 /* Images.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Images.xcassets; sourceTree = ""; }; 60 | /* End PBXFileReference section */ 61 | 62 | /* Begin PBXFrameworksBuildPhase section */ 63 | 6F7C0CBA17F0EA0500692EC1 /* Frameworks */ = { 64 | isa = PBXFrameworksBuildPhase; 65 | buildActionMask = 2147483647; 66 | files = ( 67 | 6F1239081862A59D00BD1D7A /* Accelerate.framework in Frameworks */, 68 | 01FAFAF21964A14700BF040D /* QuartzCore.framework in Frameworks */, 69 | 6F7C0CC117F0EA0500692EC1 /* Foundation.framework in Frameworks */, 70 | 431C6F5318455ABD00EDB38B /* AVFoundation.framework in Frameworks */, 71 | 435F22551968CCD600731151 /* Structure.framework in Frameworks */, 72 | 6F2B4EDD1865325D00403B8C /* OpenGLES.framework in Frameworks */, 73 | 6F12390A1862A5D300BD1D7A /* ImageIO.framework in Frameworks */, 74 | 6F2B4EE1186532A500403B8C /* CoreMedia.framework in Frameworks */, 75 | 435F22571968D35900731151 /* opencv2.framework in Frameworks */, 76 | 6F2B4EDB1865324D00403B8C /* CoreGraphics.framework in Frameworks */, 77 | 6F2B4EDF1865326B00403B8C /* CoreImage.framework in Frameworks */, 78 | 6F2B4EE3186532CE00403B8C /* CoreVideo.framework in Frameworks */, 79 | 6F7C0CC517F0EA0500692EC1 /* UIKit.framework in Frameworks */, 80 | 6F7C0CC717F0EA0500692EC1 /* ExternalAccessory.framework in Frameworks */, 81 | ); 82 | runOnlyForDeploymentPostprocessing = 0; 83 | }; 84 | /* End PBXFrameworksBuildPhase section */ 85 | 86 | /* Begin PBXGroup section */ 87 | 6F7C0CB417F0EA0500692EC1 = { 88 | isa = PBXGroup; 89 | children = ( 90 | 6F7C0CC817F0EA0500692EC1 /* MobileOpenCVOdometry */, 91 | 6F7C0CBF17F0EA0500692EC1 /* Frameworks */, 92 | 6F7C0CBE17F0EA0500692EC1 /* Products */, 93 | ); 94 | sourceTree = ""; 95 | }; 96 | 6F7C0CBE17F0EA0500692EC1 /* Products */ = { 97 | isa = PBXGroup; 98 | children = ( 99 | 6F7C0CBD17F0EA0500692EC1 /* MobileOpenCVOdometry.app */, 100 | ); 101 | name = Products; 102 | sourceTree = ""; 103 | }; 104 | 6F7C0CBF17F0EA0500692EC1 /* Frameworks */ = { 105 | isa = PBXGroup; 106 | children = ( 107 | 435F22561968D35900731151 /* opencv2.framework */, 108 | 435F22541968CCD600731151 /* Structure.framework */, 109 | 01FAFAF11964A14700BF040D /* QuartzCore.framework */, 110 | 6F7C0CC017F0EA0500692EC1 /* Foundation.framework */, 111 | 431C6F5218455ABD00EDB38B /* AVFoundation.framework */, 112 | 6F1239071862A59D00BD1D7A /* Accelerate.framework */, 113 | 6F2B4EDC1865325D00403B8C /* OpenGLES.framework */, 114 | 6F1239091862A5D300BD1D7A /* ImageIO.framework */, 115 | 6F2B4EE0186532A500403B8C /* CoreMedia.framework */, 116 | 6F7C0CC217F0EA0500692EC1 /* CoreGraphics.framework */, 117 | 6F2B4EDE1865326B00403B8C /* CoreImage.framework */, 118 | 6F2B4EE2186532CE00403B8C /* CoreVideo.framework */, 119 | 6F7C0CC417F0EA0500692EC1 /* UIKit.framework */, 120 | 6F7C0CC617F0EA0500692EC1 /* ExternalAccessory.framework */, 121 | ); 122 | name = Frameworks; 123 | sourceTree = ""; 124 | }; 125 | 6F7C0CC817F0EA0500692EC1 /* MobileOpenCVOdometry */ = { 126 | isa = PBXGroup; 127 | children = ( 128 | 6F7C0CD117F0EA0500692EC1 /* AppDelegate.h */, 129 | 6F7C0CD217F0EA0500692EC1 /* AppDelegate.m */, 130 | 6F7C0CDC17F0EA0500692EC1 /* ViewController.h */, 131 | 6F7C0CDA17F0EA0500692EC1 /* ViewController.mm */, 132 | 6F7C0CC917F0EA0500692EC1 /* Supporting Files */, 133 | ); 134 | path = MobileOpenCVOdometry; 135 | sourceTree = ""; 136 | }; 137 | 6F7C0CC917F0EA0500692EC1 /* Supporting Files */ = { 138 | isa = PBXGroup; 139 | children = ( 140 | 6F7C0CD617F0EA0500692EC1 /* ViewController_iPhone.xib */, 141 | 6F7C0CD817F0EA0500692EC1 /* ViewController_iPad.xib */, 142 | 6F7C0CDD17F0EA0500692EC1 /* Images.xcassets */, 143 | 6F7C0CCB17F0EA0500692EC1 /* InfoPlist.strings */, 144 | 6F7C0CCA17F0EA0500692EC1 /* MobileOpenCVOdometry-Info.plist */, 145 | 6F7C0CD017F0EA0500692EC1 /* MobileOpenCVOdometry-Prefix.pch */, 146 | 6F7C0CCE17F0EA0500692EC1 /* main.m */, 147 | ); 148 | name = "Supporting Files"; 149 | sourceTree = ""; 150 | }; 151 | /* End PBXGroup section */ 152 | 153 | /* Begin PBXNativeTarget section */ 154 | 6F7C0CBC17F0EA0500692EC1 /* MobileOpenCVOdometry */ = { 155 | isa = PBXNativeTarget; 156 | buildConfigurationList = 6F7C0CF417F0EA0500692EC1 /* Build configuration list for PBXNativeTarget "MobileOpenCVOdometry" */; 157 | buildPhases = ( 158 | 6F7C0CB917F0EA0500692EC1 /* Sources */, 159 | 6F7C0CBA17F0EA0500692EC1 /* Frameworks */, 160 | 6F7C0CBB17F0EA0500692EC1 /* Resources */, 161 | ); 162 | buildRules = ( 163 | ); 164 | dependencies = ( 165 | ); 166 | name = MobileOpenCVOdometry; 167 | productName = DepthViewer; 168 | productReference = 6F7C0CBD17F0EA0500692EC1 /* MobileOpenCVOdometry.app */; 169 | productType = "com.apple.product-type.application"; 170 | }; 171 | /* End PBXNativeTarget section */ 172 | 173 | /* Begin PBXProject section */ 174 | 6F7C0CB517F0EA0500692EC1 /* Project object */ = { 175 | isa = PBXProject; 176 | attributes = { 177 | LastUpgradeCheck = 0510; 178 | ORGANIZATIONNAME = Occipital; 179 | }; 180 | buildConfigurationList = 6F7C0CB817F0EA0500692EC1 /* Build configuration list for PBXProject "MobileOpenCVOdometry" */; 181 | compatibilityVersion = "Xcode 3.2"; 182 | developmentRegion = English; 183 | hasScannedForEncodings = 0; 184 | knownRegions = ( 185 | en, 186 | ); 187 | mainGroup = 6F7C0CB417F0EA0500692EC1; 188 | productRefGroup = 6F7C0CBE17F0EA0500692EC1 /* Products */; 189 | projectDirPath = ""; 190 | projectRoot = ""; 191 | targets = ( 192 | 6F7C0CBC17F0EA0500692EC1 /* MobileOpenCVOdometry */, 193 | ); 194 | }; 195 | /* End PBXProject section */ 196 | 197 | /* Begin PBXResourcesBuildPhase section */ 198 | 6F7C0CBB17F0EA0500692EC1 /* Resources */ = { 199 | isa = PBXResourcesBuildPhase; 200 | buildActionMask = 2147483647; 201 | files = ( 202 | 6F7C0CCD17F0EA0500692EC1 /* InfoPlist.strings in Resources */, 203 | 6F7C0CDE17F0EA0500692EC1 /* Images.xcassets in Resources */, 204 | 6F7C0CD917F0EA0500692EC1 /* ViewController_iPad.xib in Resources */, 205 | 6F7C0CD717F0EA0500692EC1 /* ViewController_iPhone.xib in Resources */, 206 | ); 207 | runOnlyForDeploymentPostprocessing = 0; 208 | }; 209 | /* End PBXResourcesBuildPhase section */ 210 | 211 | /* Begin PBXSourcesBuildPhase section */ 212 | 6F7C0CB917F0EA0500692EC1 /* Sources */ = { 213 | isa = PBXSourcesBuildPhase; 214 | buildActionMask = 2147483647; 215 | files = ( 216 | 6F7C0CDB17F0EA0500692EC1 /* ViewController.mm in Sources */, 217 | 6F7C0CD317F0EA0500692EC1 /* AppDelegate.m in Sources */, 218 | 6F7C0CCF17F0EA0500692EC1 /* main.m in Sources */, 219 | ); 220 | runOnlyForDeploymentPostprocessing = 0; 221 | }; 222 | /* End PBXSourcesBuildPhase section */ 223 | 224 | /* Begin PBXVariantGroup section */ 225 | 6F7C0CCB17F0EA0500692EC1 /* InfoPlist.strings */ = { 226 | isa = PBXVariantGroup; 227 | children = ( 228 | 6F7C0CCC17F0EA0500692EC1 /* en */, 229 | ); 230 | name = InfoPlist.strings; 231 | sourceTree = ""; 232 | }; 233 | /* End PBXVariantGroup section */ 234 | 235 | /* Begin XCBuildConfiguration section */ 236 | 6F7C0CF217F0EA0500692EC1 /* Debug */ = { 237 | isa = XCBuildConfiguration; 238 | buildSettings = { 239 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 240 | CLANG_CXX_LIBRARY = "libc++"; 241 | CLANG_ENABLE_OBJC_ARC = YES; 242 | CODE_SIGN_IDENTITY = "iPhone Developer"; 243 | COPY_PHASE_STRIP = NO; 244 | GCC_OPTIMIZATION_LEVEL = 0; 245 | IPHONEOS_DEPLOYMENT_TARGET = 7.0; 246 | ONLY_ACTIVE_ARCH = YES; 247 | SDKROOT = iphoneos; 248 | TARGETED_DEVICE_FAMILY = "1,2"; 249 | }; 250 | name = Debug; 251 | }; 252 | 6F7C0CF317F0EA0500692EC1 /* Release */ = { 253 | isa = XCBuildConfiguration; 254 | buildSettings = { 255 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 256 | CLANG_CXX_LIBRARY = "libc++"; 257 | CLANG_ENABLE_OBJC_ARC = YES; 258 | CODE_SIGN_IDENTITY = "iPhone Developer"; 259 | ENABLE_NS_ASSERTIONS = NO; 260 | IPHONEOS_DEPLOYMENT_TARGET = 7.0; 261 | SDKROOT = iphoneos; 262 | TARGETED_DEVICE_FAMILY = "1,2"; 263 | VALIDATE_PRODUCT = YES; 264 | }; 265 | name = Release; 266 | }; 267 | 6F7C0CF517F0EA0500692EC1 /* Debug */ = { 268 | isa = XCBuildConfiguration; 269 | buildSettings = { 270 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 271 | ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage; 272 | FRAMEWORK_SEARCH_PATHS = ( 273 | "$(inherited)", 274 | "$(PROJECT_DIR)", 275 | ); 276 | GCC_PRECOMPILE_PREFIX_HEADER = YES; 277 | GCC_PREFIX_HEADER = "MobileOpenCVOdometry/MobileOpenCVOdometry-Prefix.pch"; 278 | INFOPLIST_FILE = "MobileOpenCVOdometry/MobileOpenCVOdometry-Info.plist"; 279 | PRODUCT_NAME = MobileOpenCVOdometry; 280 | }; 281 | name = Debug; 282 | }; 283 | 6F7C0CF617F0EA0500692EC1 /* Release */ = { 284 | isa = XCBuildConfiguration; 285 | buildSettings = { 286 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 287 | ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage; 288 | FRAMEWORK_SEARCH_PATHS = ( 289 | "$(inherited)", 290 | "$(PROJECT_DIR)", 291 | ); 292 | GCC_PRECOMPILE_PREFIX_HEADER = YES; 293 | GCC_PREFIX_HEADER = "MobileOpenCVOdometry/MobileOpenCVOdometry-Prefix.pch"; 294 | INFOPLIST_FILE = "MobileOpenCVOdometry/MobileOpenCVOdometry-Info.plist"; 295 | PRODUCT_NAME = MobileOpenCVOdometry; 296 | }; 297 | name = Release; 298 | }; 299 | /* End XCBuildConfiguration section */ 300 | 301 | /* Begin XCConfigurationList section */ 302 | 6F7C0CB817F0EA0500692EC1 /* Build configuration list for PBXProject "MobileOpenCVOdometry" */ = { 303 | isa = XCConfigurationList; 304 | buildConfigurations = ( 305 | 6F7C0CF217F0EA0500692EC1 /* Debug */, 306 | 6F7C0CF317F0EA0500692EC1 /* Release */, 307 | ); 308 | defaultConfigurationIsVisible = 0; 309 | defaultConfigurationName = Release; 310 | }; 311 | 6F7C0CF417F0EA0500692EC1 /* Build configuration list for PBXNativeTarget "MobileOpenCVOdometry" */ = { 312 | isa = XCConfigurationList; 313 | buildConfigurations = ( 314 | 6F7C0CF517F0EA0500692EC1 /* Debug */, 315 | 6F7C0CF617F0EA0500692EC1 /* Release */, 316 | ); 317 | defaultConfigurationIsVisible = 0; 318 | defaultConfigurationName = Release; 319 | }; 320 | /* End XCConfigurationList section */ 321 | }; 322 | rootObject = 6F7C0CB517F0EA0500692EC1 /* Project object */; 323 | } 324 | -------------------------------------------------------------------------------- /MobileRGBDOdometry/MobileOpenCVOdometry/ViewController.mm: -------------------------------------------------------------------------------- 1 | /* 2 | This file is part of the Structure SDK. 3 | Copyright © 2014 Occipital, Inc. All rights reserved. 4 | http://structure.io 5 | */ 6 | 7 | 8 | #import "ViewController.h" 9 | 10 | #import 11 | #import 12 | #import 13 | 14 | #include 15 | #include 16 | #include 17 | 18 | #define CONNECT_TEXT @"Please Connect Structure Sensor" 19 | #define CHARGE_TEXT @"Please Charge Structure Sensor" 20 | 21 | 22 | @interface ViewController () { 23 | 24 | STSensorController *_sensorController; 25 | 26 | AVCaptureSession *_session; 27 | 28 | UIImageView *_depthImageView; 29 | UIImageView *_normalsImageView; 30 | UIImageView *_colorImageView; 31 | 32 | uint16_t *_linearizeBuffer; 33 | uint8_t *_coloredDepthBuffer; 34 | uint8_t *_normalsBuffer; 35 | 36 | STFloatDepthFrame *_floatDepthFrame; 37 | STNormalEstimator *_normalsEstimator; 38 | 39 | UILabel* _statusLabel; 40 | 41 | cv::Odometry* odometry; 42 | 43 | cv::Ptr prevOdometryFrame; 44 | cv::Ptr currOdometryFrame; 45 | 46 | std::vector allOdometryPoses; 47 | 48 | } 49 | 50 | - (BOOL)connectAndStartStreaming; 51 | - (void)renderDepthFrame:(STDepthFrame*)depthFrame; 52 | - (void)renderNormalsFrame:(STDepthFrame*)normalsFrame; 53 | - (void)renderColorFrame:(CMSampleBufferRef)sampleBuffer; 54 | - (void)startAVCaptureSession; 55 | 56 | - (void)setupOpenCVOdometry; 57 | 58 | @end 59 | 60 | @implementation ViewController 61 | 62 | 63 | - (void)viewDidLoad 64 | { 65 | [super viewDidLoad]; 66 | 67 | _sensorController = [STSensorController sharedController]; 68 | _sensorController.delegate = self; 69 | 70 | // Request that we receive depth frames with synchronized color pairs 71 | [_sensorController setFrameSyncConfig:FRAME_SYNC_DEPTH_AND_RGB]; 72 | 73 | 74 | // Create three image views where we will render our frames 75 | 76 | CGRect depthFrame = self.view.frame; 77 | depthFrame.size.height /= 2; 78 | depthFrame.origin.y = self.view.frame.size.height/2; 79 | depthFrame.origin.x = 1; 80 | depthFrame.origin.x = -self.view.frame.size.width * 0.25; 81 | 82 | CGRect normalsFrame = self.view.frame; 83 | normalsFrame.size.height /= 2; 84 | normalsFrame.origin.y = self.view.frame.size.height/2; 85 | normalsFrame.origin.x = 1; 86 | normalsFrame.origin.x = self.view.frame.size.width * 0.25; 87 | 88 | CGRect colorFrame = self.view.frame; 89 | colorFrame.size.height /= 2; 90 | 91 | _linearizeBuffer = NULL; 92 | _coloredDepthBuffer = NULL; 93 | _normalsBuffer = NULL; 94 | 95 | _depthImageView = [[UIImageView alloc] initWithFrame:depthFrame]; 96 | _depthImageView.contentMode = UIViewContentModeScaleAspectFit; 97 | [self.view addSubview:_depthImageView]; 98 | 99 | _normalsImageView = [[UIImageView alloc] initWithFrame:normalsFrame]; 100 | _normalsImageView.contentMode = UIViewContentModeScaleAspectFit; 101 | [self.view addSubview:_normalsImageView]; 102 | 103 | _colorImageView = [[UIImageView alloc] initWithFrame:colorFrame]; 104 | _colorImageView.contentMode = UIViewContentModeScaleAspectFit; 105 | [self.view addSubview:_colorImageView]; 106 | 107 | 108 | // When the app enters the foreground, we can choose to restart the stream 109 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(appWillEnterForeground) name:UIApplicationWillEnterForegroundNotification object:nil]; 110 | 111 | #if !TARGET_IPHONE_SIMULATOR 112 | [self startAVCaptureSession]; 113 | #endif 114 | 115 | [self setupOpenCVOdometry]; 116 | 117 | // Sample usage of wireless debugging API 118 | // NSError* error = nil; 119 | // [STWirelessLog broadcastLogsToWirelessConsoleAtAddress:@"10.1.10.44" usingPort:4999 error:&error]; 120 | // 121 | // if (error) 122 | // NSLog(@"Oh no! Can't start wireless log: %@", [error localizedDescription]); 123 | 124 | } 125 | 126 | - (void)dealloc 127 | { 128 | if (_linearizeBuffer) 129 | free(_linearizeBuffer); 130 | 131 | if (_coloredDepthBuffer) 132 | free(_coloredDepthBuffer); 133 | 134 | if (_normalsBuffer) 135 | free(_normalsBuffer); 136 | } 137 | 138 | 139 | - (void)viewDidAppear:(BOOL)animated 140 | { 141 | static BOOL fromLaunch = true; 142 | if(fromLaunch) 143 | { 144 | 145 | // 146 | // Create a UILabel in the center of our view to display status messages 147 | // 148 | 149 | // We do this here instead of in viewDidLoad so that we get the correctly size/rotation view bounds 150 | if (!_statusLabel) { 151 | 152 | _statusLabel = [[UILabel alloc] initWithFrame:self.view.bounds]; 153 | _statusLabel.backgroundColor = [[UIColor blackColor] colorWithAlphaComponent:0.7]; 154 | _statusLabel.textAlignment = NSTextAlignmentCenter; 155 | _statusLabel.font = [UIFont systemFontOfSize:35.0]; 156 | 157 | [_statusLabel setText:CONNECT_TEXT]; 158 | [_statusLabel setTextColor:[UIColor whiteColor]]; 159 | [self.view addSubview: _statusLabel]; 160 | } 161 | 162 | [self connectAndStartStreaming]; 163 | fromLaunch = false; 164 | } 165 | } 166 | 167 | 168 | - (void)appWillEnterForeground 169 | { 170 | 171 | BOOL success = [self connectAndStartStreaming]; 172 | 173 | if(!success) 174 | { 175 | // Workaround for direct multitasking between two Structure Apps. 176 | 177 | // HACK ALERT! Try once more after a delay if we failed to reconnect on foregrounding. 178 | // 0.75s was not enough, 0.95s was, but this might depend on the other app using the sensor. 179 | // We need a better solution to this. 180 | [NSTimer scheduledTimerWithTimeInterval:2.0 target:self 181 | selector:@selector(connectAndStartStreaming) userInfo:nil repeats:NO]; 182 | } 183 | 184 | } 185 | 186 | 187 | - (void)didReceiveMemoryWarning 188 | { 189 | [super didReceiveMemoryWarning]; 190 | // Dispose of any resources that can be recreated. 191 | } 192 | 193 | 194 | - (BOOL)connectAndStartStreaming 195 | { 196 | 197 | STSensorControllerInitStatus result = [_sensorController initializeSensorConnection]; 198 | 199 | BOOL didSucceed = (result == STSensorControllerInitStatusSuccess || result == STSensorControllerInitStatusAlreadyInitialized); 200 | 201 | 202 | if (didSucceed) 203 | { 204 | // Now that we're about to stream, hide the status label 205 | [self hideStatusMessage]; 206 | 207 | // Set sensor stream quality 208 | StructureStreamConfig streamConfig = CONFIG_QVGA_DEPTH; 209 | 210 | // After this call, we will start to receive frames through the delegate methods 211 | [_sensorController startStreamingWithConfig:streamConfig]; 212 | 213 | // Allocate the depth (shift) -> to depth (millimeters) converter class 214 | _floatDepthFrame = [[STFloatDepthFrame alloc] init]; 215 | 216 | // Allocate the depth -> surface normals converter class 217 | _normalsEstimator = [[STNormalEstimator alloc] initWithSensorInfo:[_sensorController getSensorInfo:streamConfig]]; 218 | } 219 | else 220 | { 221 | if (result == STSensorControllerInitStatusSensorNotFound) 222 | NSLog(@"[Debug] No Structure Sensor found!"); 223 | else if (result == STSensorControllerInitStatusOpenFailed) 224 | NSLog(@"[Error] Structure Sensor open failed."); 225 | else if (result == STSensorControllerInitStatusSensorIsWakingUp) 226 | NSLog(@"[Debug] Structure Sensor is waking from low power."); 227 | else if (result != STSensorControllerInitStatusSuccess) 228 | NSLog(@"[Debug] Structure Sensor failed to init with status %d.", (int)result); 229 | 230 | [self showStatusMessage:CONNECT_TEXT]; 231 | } 232 | 233 | return didSucceed; 234 | 235 | } 236 | 237 | 238 | - (void)showStatusMessage:(NSString *)msg 239 | { 240 | 241 | _statusLabel.hidden = false; 242 | _statusLabel.text = msg; 243 | 244 | } 245 | 246 | - (void)hideStatusMessage 247 | { 248 | _statusLabel.hidden = true; 249 | } 250 | 251 | - (void)setupOpenCVOdometry 252 | { 253 | 254 | prevOdometryFrame = cv::Ptr(new cv::OdometryFrame()); 255 | currOdometryFrame = cv::Ptr(new cv::OdometryFrame()); 256 | 257 | cv::Mat1f cameraMatrix (3,3); cv::setIdentity(cameraMatrix); 258 | 259 | //TODO: Replace with device specific intrinsics 260 | 261 | //QVGA iOS iPad Air 262 | cameraMatrix(0,0) = 288.0f; 263 | cameraMatrix(1,1) = 288.0f; 264 | cameraMatrix(0,2) = 161.5f; 265 | cameraMatrix(1,2) = 121.5f; 266 | 267 | // OpenCV odometry ignores lens distortion 268 | 269 | float minDepth = 0.3f; 270 | float maxDepth = 4.f; 271 | float maxDepthDiff = 0.07f; 272 | 273 | std::vector iterCounts = cv::Mat(cv::Vec3i(7,7,10)); 274 | std::vector minGradientMagnitudes = cv::Mat(cv::Vec3f(10,10,10)); 275 | 276 | float maxPointsPart = cv::RgbdOdometry::DEFAULT_MAX_POINTS_PART(); 277 | 278 | 279 | odometry = new cv::RgbdOdometry (cameraMatrix, minDepth, maxDepth, maxDepthDiff, iterCounts, minGradientMagnitudes, maxPointsPart, 280 | cv::Odometry::RIGID_BODY_MOTION); 281 | 282 | 283 | } 284 | 285 | void convertBGRASampleBufferToRGB (CMSampleBufferRef sampleBuffer, cv::Mat& dest) 286 | { 287 | 288 | CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 289 | 290 | CVPixelBufferLockBaseAddress(pixelBuffer, 0); 291 | 292 | size_t width = CVPixelBufferGetWidth(pixelBuffer); 293 | size_t height = CVPixelBufferGetHeight(pixelBuffer); 294 | 295 | dest.create((int)height, (int)width, CV_8UC3); 296 | 297 | unsigned char* ptr = (unsigned char*) CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); 298 | 299 | // Use NEON to convert from BGRA to RGB, or use Accelerate in the case that we're on simulator 300 | #ifdef __ARM_NEON__ 301 | 302 | uint8_t* sourcePtr = (uint8_t*)ptr; 303 | uint8_t* destPtr = (uint8_t*)dest.data; 304 | 305 | const int numPixels = (int)(width*height); 306 | 307 | int pixel = 0; 308 | for (; pixel < numPixels; pixel += 16) 309 | { 310 | 311 | uint8x16x4_t sourcePixelsBGRA = vld4q_u8((const unsigned char*)sourcePtr); 312 | uint8x16x3_t sourceRGB; 313 | sourceRGB.val[0] = sourcePixelsBGRA.val[2]; 314 | sourceRGB.val[1] = sourcePixelsBGRA.val[1]; 315 | sourceRGB.val[2] = sourcePixelsBGRA.val[0]; 316 | vst3q_u8((unsigned char *)destPtr, sourceRGB); 317 | 318 | sourcePtr += 16*4; 319 | destPtr += 16*3; 320 | 321 | } 322 | 323 | // Convert any leftover pixels (15 or less would remain, if any) 324 | for (; pixel < numPixels; pixel++) { 325 | uint8_t* sourceBGRAPixel = sourcePtr; 326 | uint8_t* destRGBPixel = destPtr; 327 | 328 | destRGBPixel[0] = sourceBGRAPixel[2]; 329 | destRGBPixel[1] = sourceBGRAPixel[1]; 330 | destRGBPixel[2] = sourceBGRAPixel[0]; 331 | 332 | sourcePtr += 4; 333 | destPtr += 3; 334 | } 335 | 336 | #else 337 | 338 | vImage_Buffer src; 339 | src.width = width; 340 | src.height = height; 341 | src.data = ptr; 342 | src.rowBytes = CVPixelBufferGetBytesPerRow(pixelBuffer); 343 | 344 | vImage_Buffer destImage; 345 | destImage.width = width; 346 | destImage.height = height; 347 | destImage.rowBytes = width*3; 348 | destImage.data = dest.data; 349 | 350 | vImage_Error err; 351 | err = vImageConvert_BGRA8888toRGB888(&src, &destImage, kvImageNoFlags); 352 | if(err != kvImageNoError){ 353 | NSLog(@"Error in Pixel Copy vImage_error %ld", err); 354 | } 355 | 356 | 357 | #endif 358 | 359 | 360 | CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); 361 | } 362 | 363 | 364 | #pragma mark - 365 | #pragma mark Structure SDK Delegate Methods 366 | 367 | - (void)sensorDidDisconnect 368 | { 369 | NSLog(@"Structure Sensor disconnected!"); 370 | [self showStatusMessage:CONNECT_TEXT]; 371 | } 372 | 373 | - (void)sensorDidConnect 374 | { 375 | NSLog(@"Structure Sensor connected!"); 376 | [self connectAndStartStreaming]; 377 | } 378 | 379 | - (void)sensorDidEnterLowPowerMode 380 | { 381 | // Notify the user that the sensor needs to be charged. 382 | [self showStatusMessage:CHARGE_TEXT]; 383 | } 384 | 385 | - (void)sensorDidLeaveLowPowerMode 386 | { 387 | 388 | } 389 | 390 | - (void)sensorBatteryNeedsCharging 391 | { 392 | // Notify the user that the sensor needs to be charged. 393 | [self showStatusMessage:CHARGE_TEXT]; 394 | } 395 | 396 | - (void)sensorDidStopStreaming:(STSensorControllerDidStopStreamingReason)reason 397 | { 398 | //If needed, change any UI elements to account for the stopped stream 399 | } 400 | 401 | - (void)sensorDidOutputDepthFrame:(STDepthFrame *)depthFrame 402 | { 403 | [self renderDepthFrame:depthFrame]; 404 | } 405 | 406 | // This synchronized API will only be called when two frames match. Typically, timestamps are within 1ms of each other. 407 | // Two important things have to happen for this method to be called: 408 | // Tell the SDK we want framesync: [_ocSensorController setFrameSyncConfig:FRAME_SYNC_DEPTH_AND_RGB]; 409 | // Give the SDK color frames as they come in: [_ocSensorController frameSyncNewColorImage:sampleBuffer]; 410 | - (void)sensorDidOutputSynchronizedDepthFrame:(STDepthFrame*)depthFrame 411 | andColorFrame:(CMSampleBufferRef)sampleBuffer 412 | { 413 | 414 | 415 | // Fill from STDepthFrame 416 | cv::Mat depth(depthFrame->height, depthFrame->width, CV_16U, depthFrame->data); 417 | 418 | // scale depth to meters 419 | cv::Mat depthMeters; 420 | depth.convertTo(depthMeters, CV_32FC1, 1.f/1000.f); 421 | 422 | 423 | depthMeters.setTo(std::numeric_limits::quiet_NaN(), depth == 0); 424 | depth = depthMeters; 425 | 426 | 427 | // Fill from CMSampleBuffer 428 | cv::Mat vgaImage; 429 | convertBGRASampleBufferToRGB(sampleBuffer, vgaImage); 430 | 431 | //TODO: The conversion to grayscale and the decimation should all be done at once with NEON instead of separated. 432 | // A convertBGRASampleBufferToDecimatedGrayscale function should be used instead of convertBGRASampleBufferToRGB 433 | // and then these calls. 434 | 435 | cv::Mat vgaGray; 436 | cv::cvtColor(vgaImage, vgaGray, cv::COLOR_BGR2GRAY); 437 | 438 | cv::Mat1b qvgaGray; 439 | cv::resize(vgaGray, qvgaGray, cv::Size(320, 240)); 440 | 441 | currOdometryFrame->image = qvgaGray; 442 | currOdometryFrame->depth = depth; 443 | 444 | // Compute the delta pose between this frame and the last one 445 | cv::Mat deltaRt; 446 | if(!allOdometryPoses.empty()) 447 | { 448 | bool res = odometry->compute(currOdometryFrame, prevOdometryFrame, deltaRt); 449 | 450 | if(!res) 451 | deltaRt = cv::Mat::eye(4,4,CV_64FC1); 452 | } 453 | 454 | if( allOdometryPoses.empty() ) 455 | { 456 | allOdometryPoses.push_back(cv::Mat::eye(4,4,CV_64FC1)); 457 | } 458 | else 459 | { 460 | cv::Mat& prevRt = *allOdometryPoses.rbegin(); 461 | allOdometryPoses.push_back( prevRt * deltaRt ); 462 | std::cout << "Current pose: " << *allOdometryPoses.rbegin() << std::endl; 463 | 464 | } 465 | 466 | if(!prevOdometryFrame.empty()) 467 | prevOdometryFrame->release(); 468 | std::swap(prevOdometryFrame, currOdometryFrame); 469 | 470 | 471 | [self renderDepthFrame:depthFrame]; 472 | [self renderNormalsFrame:depthFrame]; 473 | [self renderColorFrame:sampleBuffer]; 474 | } 475 | 476 | 477 | #pragma mark - 478 | #pragma mark Rendering 479 | 480 | - (void)populateLinearizeBuffer:(size_t)depthValuesCount 481 | { 482 | _linearizeBuffer = (uint16_t*)malloc(depthValuesCount); 483 | 484 | int maxShiftValue = 2048; 485 | for (int i=0; i < maxShiftValue * 2 ; i++) 486 | { 487 | float v = i/ (float)maxShiftValue; 488 | v = powf(v, 3)* 6; 489 | _linearizeBuffer[i] = v*6*256; 490 | } 491 | 492 | } 493 | 494 | - (void)convertShiftToRGBA:(const uint16_t*)shiftedDepth depthValuesCount:(size_t)depthValuesCount 495 | { 496 | for (size_t i = 0; i < depthValuesCount; i++) 497 | { 498 | // Use a lookup table to make the non-linear shifted depth values vary more linearly with metric depth 499 | int linearizedDepth = _linearizeBuffer[shiftedDepth[i]]; 500 | 501 | // Use the upper byte of the linearized shift value to choose a base color 502 | // Base colors range from: (closest) White, Red, Orange, Yellow, Green, Cyan, Blue, Black (farthest) 503 | int lowerByte = (linearizedDepth & 0xff); 504 | 505 | // Use the lower byte to scale between the base colors 506 | int upperByte = (linearizedDepth >> 8); 507 | 508 | switch (upperByte) 509 | { 510 | case 0: 511 | _coloredDepthBuffer[4*i+0] = 255; 512 | _coloredDepthBuffer[4*i+1] = 255-lowerByte; 513 | _coloredDepthBuffer[4*i+2] = 255-lowerByte; 514 | _coloredDepthBuffer[4*i+3] = 255; 515 | break; 516 | case 1: 517 | _coloredDepthBuffer[4*i+0] = 255; 518 | _coloredDepthBuffer[4*i+1] = lowerByte; 519 | _coloredDepthBuffer[4*i+2] = 0; 520 | break; 521 | case 2: 522 | _coloredDepthBuffer[4*i+0] = 255-lowerByte; 523 | _coloredDepthBuffer[4*i+1] = 255; 524 | _coloredDepthBuffer[4*i+2] = 0; 525 | break; 526 | case 3: 527 | _coloredDepthBuffer[4*i+0] = 0; 528 | _coloredDepthBuffer[4*i+1] = 255; 529 | _coloredDepthBuffer[4*i+2] = lowerByte; 530 | break; 531 | case 4: 532 | _coloredDepthBuffer[4*i+0] = 0; 533 | _coloredDepthBuffer[4*i+1] = 255-lowerByte; 534 | _coloredDepthBuffer[4*i+2] = 255; 535 | break; 536 | case 5: 537 | _coloredDepthBuffer[4*i+0] = 0; 538 | _coloredDepthBuffer[4*i+1] = 0; 539 | _coloredDepthBuffer[4*i+2] = 255-lowerByte; 540 | break; 541 | default: 542 | _coloredDepthBuffer[4*i+0] = 0; 543 | _coloredDepthBuffer[4*i+1] = 0; 544 | _coloredDepthBuffer[4*i+2] = 0; 545 | break; 546 | } 547 | } 548 | } 549 | 550 | - (void)renderDepthFrame:(STDepthFrame *)depthFrame 551 | { 552 | size_t cols = depthFrame->width; 553 | size_t rows = depthFrame->height; 554 | 555 | if (_linearizeBuffer == NULL || _normalsBuffer == NULL) 556 | { 557 | [self populateLinearizeBuffer:cols * rows]; 558 | _coloredDepthBuffer = (uint8_t*)malloc(cols * rows * 4); 559 | } 560 | 561 | // Conversion of 16-bit non-linear shift depth values to 32-bit RGBA 562 | // 563 | // Adopted from: https://github.com/OpenKinect/libfreenect/blob/master/examples/glview.c 564 | // 565 | [self convertShiftToRGBA:depthFrame->data depthValuesCount:cols * rows]; 566 | 567 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 568 | 569 | CGBitmapInfo bitmapInfo; 570 | bitmapInfo = (CGBitmapInfo)kCGImageAlphaNoneSkipLast; 571 | bitmapInfo |= kCGBitmapByteOrder32Big; 572 | 573 | NSData *data = [NSData dataWithBytes:_coloredDepthBuffer length:cols * rows * 4]; 574 | CGDataProviderRef provider = CGDataProviderCreateWithCFData((CFDataRef)data); //toll-free ARC bridging 575 | 576 | CGImageRef imageRef = CGImageCreate(cols, //width 577 | rows, //height 578 | 8, //bits per component 579 | 8 * 4, //bits per pixel 580 | cols * 4, //bytes per row 581 | colorSpace, //Quartz color space 582 | bitmapInfo, //Bitmap info (alpha channel?, order, etc) 583 | provider, //Source of data for bitmap 584 | NULL, //decode 585 | false, //pixel interpolation 586 | kCGRenderingIntentDefault); //rendering intent 587 | 588 | // Assign CGImage to UIImage 589 | _depthImageView.image = [UIImage imageWithCGImage:imageRef]; 590 | 591 | CGImageRelease(imageRef); 592 | CGDataProviderRelease(provider); 593 | CGColorSpaceRelease(colorSpace); 594 | 595 | } 596 | 597 | - (void) renderNormalsFrame: (STDepthFrame*) depthFrame 598 | { 599 | // Convert depth units from shift to millimeters (stored as floats) 600 | [_floatDepthFrame updateFromDepthFrame:depthFrame]; 601 | 602 | // Estimate surface normal direction from depth float values 603 | STNormalFrame *normalsFrame = [_normalsEstimator calculateNormalsWithProcessedFrame:_floatDepthFrame]; 604 | 605 | size_t cols = normalsFrame.width; 606 | size_t rows = normalsFrame.height; 607 | 608 | // Convert normal unit vectors (ranging from -1 to 1) to RGB (ranging from 0 to 255) 609 | // Z can be slightly positive in some cases too! 610 | if (_normalsBuffer == NULL) 611 | { 612 | _normalsBuffer = (uint8_t*)malloc(cols * rows * 4); 613 | } 614 | for (size_t i = 0; i < cols * rows; i++) 615 | { 616 | _normalsBuffer[4*i+0] = (uint8_t)( ( ( normalsFrame.normals[i].x / 2 ) + 0.5 ) * 255); 617 | _normalsBuffer[4*i+1] = (uint8_t)( ( ( normalsFrame.normals[i].y / 2 ) + 0.5 ) * 255); 618 | _normalsBuffer[4*i+2] = (uint8_t)( ( ( normalsFrame.normals[i].z / 2 ) + 0.5 ) * 255); 619 | } 620 | 621 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 622 | 623 | CGBitmapInfo bitmapInfo; 624 | bitmapInfo = (CGBitmapInfo)kCGImageAlphaNoneSkipFirst; 625 | bitmapInfo |= kCGBitmapByteOrder32Little; 626 | 627 | NSData *data = [NSData dataWithBytes:_normalsBuffer length:cols * rows * 4]; 628 | CGDataProviderRef provider = CGDataProviderCreateWithCFData((CFDataRef)data); 629 | 630 | CGImageRef imageRef = CGImageCreate(cols, 631 | rows, 632 | 8, 633 | 8 * 4, 634 | cols * 4, 635 | colorSpace, 636 | bitmapInfo, 637 | provider, 638 | NULL, 639 | false, 640 | kCGRenderingIntentDefault); 641 | 642 | _normalsImageView.image = [[UIImage alloc] initWithCGImage:imageRef]; 643 | 644 | CGImageRelease(imageRef); 645 | CGDataProviderRelease(provider); 646 | CGColorSpaceRelease(colorSpace); 647 | 648 | } 649 | 650 | - (void)renderColorFrame:(CMSampleBufferRef)sampleBuffer 651 | { 652 | CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 653 | CVPixelBufferLockBaseAddress(pixelBuffer, 0); 654 | 655 | size_t cols = CVPixelBufferGetWidth(pixelBuffer); 656 | size_t rows = CVPixelBufferGetHeight(pixelBuffer); 657 | 658 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 659 | 660 | unsigned char *ptr = (unsigned char *) CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); 661 | 662 | NSData *data = [[NSData alloc] initWithBytes:ptr length:rows*cols*4]; 663 | CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); 664 | 665 | CGBitmapInfo bitmapInfo; 666 | bitmapInfo = (CGBitmapInfo)kCGImageAlphaNoneSkipFirst; 667 | bitmapInfo |= kCGBitmapByteOrder32Little; 668 | 669 | CGDataProviderRef provider = CGDataProviderCreateWithCFData((CFDataRef)data); 670 | 671 | CGImageRef imageRef = CGImageCreate(cols, 672 | rows, 673 | 8, 674 | 8 * 4, 675 | cols*4, 676 | colorSpace, 677 | bitmapInfo, 678 | provider, 679 | NULL, 680 | false, 681 | kCGRenderingIntentDefault); 682 | 683 | _colorImageView.image = [[UIImage alloc] initWithCGImage:imageRef]; 684 | 685 | CGImageRelease(imageRef); 686 | CGDataProviderRelease(provider); 687 | CGColorSpaceRelease(colorSpace); 688 | 689 | } 690 | 691 | 692 | 693 | #pragma mark - AVFoundation 694 | 695 | - (void)startAVCaptureSession 696 | { 697 | NSString *sessionPreset = AVCaptureSessionPreset640x480; 698 | 699 | //-- Set up Capture Session. 700 | _session = [[AVCaptureSession alloc] init]; 701 | [_session beginConfiguration]; 702 | 703 | //-- Set preset session size. 704 | [_session setSessionPreset:sessionPreset]; 705 | 706 | //-- Creata a video device and input from that Device. Add the input to the capture session. 707 | AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 708 | if(videoDevice == nil) 709 | assert(0); 710 | 711 | NSError *error; 712 | [videoDevice lockForConfiguration:&error]; 713 | 714 | // Auto-focus Auto-exposure, auto-white balance 715 | if ([[[UIDevice currentDevice] systemVersion] compare:@"7.0" options:NSNumericSearch] != NSOrderedAscending) 716 | [videoDevice setAutoFocusRangeRestriction:AVCaptureAutoFocusRangeRestrictionFar]; 717 | 718 | [videoDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; 719 | 720 | [videoDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure]; 721 | [videoDevice setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]; 722 | 723 | [videoDevice unlockForConfiguration]; 724 | 725 | //-- Add the device to the session. 726 | AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error]; 727 | if(error) 728 | assert(0); 729 | 730 | [_session addInput:input]; // After this point, captureSession captureOptions are filled. 731 | 732 | //-- Create the output for the capture session. 733 | AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init]; 734 | 735 | [dataOutput setAlwaysDiscardsLateVideoFrames:YES]; 736 | 737 | //-- Set to YUV420. 738 | [dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] 739 | forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; 740 | 741 | // Set dispatch to be on the main thread so OpenGL can do things with the data 742 | [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()]; 743 | 744 | [_session addOutput:dataOutput]; 745 | 746 | if ([[[UIDevice currentDevice] systemVersion] compare:@"7.0" options:NSNumericSearch] != NSOrderedAscending) 747 | { 748 | [videoDevice lockForConfiguration:&error]; 749 | [videoDevice setActiveVideoMaxFrameDuration:CMTimeMake(1, 30)]; 750 | [videoDevice setActiveVideoMinFrameDuration:CMTimeMake(1, 30)]; 751 | [videoDevice unlockForConfiguration]; 752 | } 753 | else 754 | { 755 | AVCaptureConnection *conn = [dataOutput connectionWithMediaType:AVMediaTypeVideo]; 756 | 757 | // Deprecated use is OK here because we're using the correct APIs on iOS 7 above when available 758 | // If we're running before iOS 7, we still really want 30 fps! 759 | #pragma clang diagnostic push 760 | #pragma clang diagnostic ignored "-Wdeprecated-declarations" 761 | conn.videoMinFrameDuration = CMTimeMake(1, 30); 762 | conn.videoMaxFrameDuration = CMTimeMake(1, 30); 763 | #pragma clang diagnostic pop 764 | 765 | } 766 | [_session commitConfiguration]; 767 | 768 | [_session startRunning]; 769 | 770 | } 771 | 772 | 773 | - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 774 | { 775 | 776 | // Pass into the driver. The sampleBuffer will return later with a synchronized depth or IR pair. 777 | [_sensorController frameSyncNewColorImage:sampleBuffer]; 778 | 779 | // If we weren't using framesync, we could just do the following instead: 780 | // [self renderColorFrame:sampleBuffer]; 781 | 782 | } 783 | 784 | 785 | @end 786 | --------------------------------------------------------------------------------