├── CMakeLists.txt ├── README.md ├── include ├── Openni2 │ ├── Bin │ │ └── OpenNI.ini │ ├── KinectProperties.h │ ├── Linux-x86 │ │ └── OniPlatformLinux-x86.h │ ├── OniCAPI.h │ ├── OniCEnums.h │ ├── OniCProperties.h │ ├── OniCTypes.h │ ├── OniEnums.h │ ├── OniPlatform.h │ ├── OniProperties.h │ ├── OniTest.h │ ├── OniVersion.h │ ├── OpenNI.h │ ├── PS1080.h │ └── PSLink.h ├── kinect2_tracker.hpp └── visualization.hpp ├── launch └── tracker.launch ├── msg ├── bounding_box.msg ├── user_IDs.msg └── user_points.msg ├── package.xml ├── setup_nite.bash └── src └── kinect2_tracker_node.cpp /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8.3) 2 | project(kinect2_tracker) 3 | 4 | # check c++11 / c++0x 5 | include(CheckCXXCompilerFlag) 6 | CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) 7 | CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X) 8 | if(COMPILER_SUPPORTS_CXX11) 9 | set(CMAKE_CXX_FLAGS "-std=c++11") 10 | elseif(COMPILER_SUPPORTS_CXX0X) 11 | set(CMAKE_CXX_FLAGS "-std=c++0x") 12 | else() 13 | message(FATAL_ERROR "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.") 14 | endif() 15 | 16 | ## Find catkin macros and libraries 17 | ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz) 18 | ## is used, also find other catkin packages 19 | find_package(catkin REQUIRED COMPONENTS 20 | cmake_modules 21 | cv_bridge 22 | geometry_msgs 23 | image_transport 24 | kdl_conversions 25 | roscpp 26 | roslib 27 | tf 28 | tf_conversions 29 | std_msgs 30 | visualization_msgs 31 | message_generation 32 | ) 33 | 34 | find_package(Eigen REQUIRED) 35 | find_package(cmake_modules REQUIRED) 36 | 37 | link_directories( 38 | ${catkin_LIBRARY_DIRS} 39 | ) 40 | 41 | include_directories( 42 | include 43 | ${catkin_INCLUDE_DIRS} 44 | ) 45 | 46 | add_message_files( 47 | DIRECTORY 48 | msg 49 | FILES 50 | user_IDs.msg 51 | user_points.msg 52 | bounding_box.msg 53 | ) 54 | 55 | generate_messages( 56 | DEPENDENCIES geometry_msgs std_msgs 57 | ) 58 | 59 | catkin_package( 60 | INCLUDE_DIRS include 61 | LIBRARIES kinect2_tracker 62 | CATKIN_DEPENDS cv_bridge geometry_msgs image_transport kdl_conversions roscpp roslib tf tf_conversions visualization_msgs message_runtime 63 | DEPENDS Eigen 64 | ) 65 | 66 | ########### 67 | ## Build ## 68 | ########### 69 | 70 | ## Specify additional locations of header files 71 | ## Your package locations should be listed before other locations 72 | # include_directories(include) 73 | include_directories( 74 | ${catkin_INCLUDE_DIRS} 75 | ${Eigen_INCLUDE_DIRS} 76 | ) 77 | 78 | ## Declare a C++ library 79 | add_library(kinect2_tracker 80 | src/kinect2_tracker_node.cpp 81 | ) 82 | 83 | ## Add cmake target dependencies of the library 84 | ## as an example, code may need to be generated before libraries 85 | ## either from message generation or dynamic reconfigure 86 | # add_dependencies(kinect2_tracker ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) 87 | 88 | set(NITE2_DIR ~/package_ws/NiTE-Linux-x64-2.2/) 89 | set(NITE2_LIB ~/package_ws/NiTE-Linux-x64-2.2/Redist/libNiTE2.so) 90 | include_directories(./include/Openni2/) 91 | include_directories(${NITE2_DIR}/Include) 92 | include_directories(./include) 93 | include_directories(${OpenCV_INCLUDE_DIRS}/include) 94 | 95 | add_executable(kinect2_tracker_node src/kinect2_tracker_node.cpp include/kinect2_tracker.hpp include/visualization.hpp) 96 | target_link_libraries(kinect2_tracker_node OpenNI2 ${NITE2_LIB} ${catkin_LIBRARIES} ${Eigen_LIBRARIES}) 97 | 98 | add_dependencies(kinect2_tracker_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # kinect2_tracker 2 | A working ROS wrapper for the KinectOne (v2) using libfreenect2 3 | 4 | ## install 5 | 6 | - [install libfreenect2](https://github.com/OpenKinect/libfreenect2/) 7 | - Make sure to install all the optional stuff, including OpenCL and **OpenNI2** 8 | - When you build the library, do not follow the instructions there, instead run 9 | ```bash 10 | mkdir build && cd build 11 | cmake .. -DCMAKE_INSTALL_PREFIX=/usr/ 12 | make 13 | sudo make install 14 | ``` 15 | - [Download NiTE2](http://openni.ru/files/nite/index.html) and put it in `~/package_ws/NiTE-Linux-x64-2.2/` 16 | - Or you can put it in some other random places, but you need to modify `CMakeList.txt` and `setup_nite.bash` 17 | - `source setup_nite.bash` 18 | 19 | To run the program the launch file needs to be used 20 | 21 | ## Run 22 | 23 | ```bash 24 | roslaunch kinect2_tracker tracker.launch 25 | ``` 26 | 27 | ## API 28 | 29 | ### Published 30 | 31 | - `/people_skeleton` : `kinect2_tracker::user_IDs`, id array of the tracked people 32 | - `/people_points`: `kinect2_tracker::user_points`, center of mass for each person 33 | - `/people_points_viz`: `visualization_msgs::Marker`, people points to show in `rviz` 34 | - `tf` transforms for the human skeletons 35 | - Kinect RGB, depth and infrad images 36 | 37 | ### Params 38 | 39 | - `tf_prefix`: The prefirx when publishing tf 40 | - `relative_frame`: The base frame of the Kinect observations 41 | -------------------------------------------------------------------------------- /include/Openni2/Bin/OpenNI.ini: -------------------------------------------------------------------------------- 1 | [Log] 2 | ; 0 - Verbose; 1 - Info; 2 - Warning; 3 - Error. Default - None 3 | Verbosity=3 4 | LogToConsole=0 5 | LogToFile=0 6 | 7 | [Device] 8 | ;Override="" 9 | 10 | [Drivers] 11 | ; Location of the drivers specified by a relative path based on OpenNI's shared library or an absolute path. 12 | ; Path separator "/" can be used to be portable for any platforms. 13 | ; Default - OpenNI2/Drivers 14 | ;Repository=OpenNI2/Drivers 15 | -------------------------------------------------------------------------------- /include/Openni2/KinectProperties.h: -------------------------------------------------------------------------------- 1 | /***************************************************************************** 2 | * * 3 | * OpenNI 2.x Alpha * 4 | * Copyright (C) 2012 PrimeSense Ltd. * 5 | * * 6 | * This file is part of OpenNI. * 7 | * * 8 | * Licensed under the Apache License, Version 2.0 (the "License"); * 9 | * you may not use this file except in compliance with the License. * 10 | * You may obtain a copy of the License at * 11 | * * 12 | * http://www.apache.org/licenses/LICENSE-2.0 * 13 | * * 14 | * Unless required by applicable law or agreed to in writing, software * 15 | * distributed under the License is distributed on an "AS IS" BASIS, * 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 17 | * See the License for the specific language governing permissions and * 18 | * limitations under the License. * 19 | * * 20 | *****************************************************************************/ 21 | #ifndef KINECTPROPERTIES_H 22 | #define KINECTPROPERTIES_H 23 | 24 | #include 25 | 26 | /* 27 | * private properties of Microsoft Kinect devices. 28 | * 29 | * @remarks 30 | * properties structure is 0x045eXXYY (045e = Microsoft's USB vendor ID) 31 | * where XX is range and YY is code. 32 | * range values: 33 | * 00 - common stream properties 34 | * 10 - depth stream properties 35 | * 20 - color stream properties 36 | * E0 - device commands 37 | * F0 - device properties 38 | */ 39 | enum 40 | { 41 | KINECT_PROPERTY_BASE = 0x045e0000, 42 | 43 | /*******************************************************************/ 44 | /* Common stream properties (00-) */ 45 | /*******************************************************************/ 46 | 47 | /*******************************************************************/ 48 | /* Depth stream properties (10-) */ 49 | /*******************************************************************/ 50 | 51 | /** OniBool, set and get. 52 | * Maps to Near Mode in Kinect SDK. 53 | * Also maps to XN_STREAM_PROPERTY_CLOSE_RANGE in PS1080.h. 54 | */ 55 | KINECT_DEPTH_PROPERTY_CLOSE_RANGE = KINECT_PROPERTY_BASE + 0x1001, 56 | 57 | /*******************************************************************/ 58 | /* Color stream properties (20-) */ 59 | /*******************************************************************/ 60 | 61 | /*******************************************************************/ 62 | /* Device commands (E0-) */ 63 | /*******************************************************************/ 64 | 65 | /*******************************************************************/ 66 | /* Device properties (F0-) */ 67 | /*******************************************************************/ 68 | 69 | /* 3D sensing properties (F0-) */ 70 | 71 | /** OniBool, set and get. 72 | * Maps to !NuiGetForceInfraredEmitterOff in Kinect SDK. 73 | * Also maps to XN_MODULE_PROPERTY_EMITTER_STATE. 74 | */ 75 | KINECT_DEVICE_PROPERTY_EMITTER_STATE = KINECT_PROPERTY_BASE + 0xF001, 76 | 77 | /* Non- 3D sensing bonus properties (F8) */ 78 | 79 | /** long, set and get. 80 | * Maps to NuiCameraElevationGetAngle and NuiCameraElevationSetAngle in Kinect SDK. 81 | */ 82 | KINECT_DEVICE_PROPERTY_CAMERA_ELEVATION = KINECT_PROPERTY_BASE + 0xF801, 83 | 84 | /** KinectVector3f, get only. 85 | * Maps to NuiAccelerometerGetCurrentReading. 86 | */ 87 | KINECT_DEVICE_PROPERTY_ACCELEROMETER = KINECT_PROPERTY_BASE + 0xF802, 88 | 89 | /** String, get only. 90 | * Maps to NuiAudioArrayId. 91 | * Useful to find the mic array on the sensor when developing audio-enabled applications. 92 | */ 93 | KINECT_DEVICE_PROPERTY_AUDIO_ARRAY_ID = KINECT_PROPERTY_BASE + 0xF803, 94 | 95 | }; 96 | 97 | typedef struct 98 | { 99 | float x; 100 | float y; 101 | float z; 102 | } KinectVector3f; 103 | 104 | #endif // KINECTPROPERTIES_H 105 | -------------------------------------------------------------------------------- /include/Openni2/Linux-x86/OniPlatformLinux-x86.h: -------------------------------------------------------------------------------- 1 | /***************************************************************************** 2 | * * 3 | * OpenNI 2.x Alpha * 4 | * Copyright (C) 2012 PrimeSense Ltd. * 5 | * * 6 | * This file is part of OpenNI. * 7 | * * 8 | * Licensed under the Apache License, Version 2.0 (the "License"); * 9 | * you may not use this file except in compliance with the License. * 10 | * You may obtain a copy of the License at * 11 | * * 12 | * http://www.apache.org/licenses/LICENSE-2.0 * 13 | * * 14 | * Unless required by applicable law or agreed to in writing, software * 15 | * distributed under the License is distributed on an "AS IS" BASIS, * 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 17 | * See the License for the specific language governing permissions and * 18 | * limitations under the License. * 19 | * * 20 | *****************************************************************************/ 21 | #ifndef ONIPLATFORMLINUX_X86_H 22 | #define ONIPLATFORMLINUX_X86_H 23 | 24 | //--------------------------------------------------------------------------- 25 | // Prerequisites 26 | //--------------------------------------------------------------------------- 27 | 28 | //--------------------------------------------------------------------------- 29 | // Includes 30 | //--------------------------------------------------------------------------- 31 | #include 32 | #include 33 | #include 34 | #include 35 | #include 36 | #include 37 | #include 38 | 39 | //--------------------------------------------------------------------------- 40 | // Platform Basic Definition 41 | //--------------------------------------------------------------------------- 42 | #define ONI_PLATFORM ONI_PLATFORM_LINUX_X86 43 | #define ONI_PLATFORM_STRING "Linux-x86" 44 | 45 | //--------------------------------------------------------------------------- 46 | // Platform Capabilities 47 | //--------------------------------------------------------------------------- 48 | #define ONI_PLATFORM_ENDIAN_TYPE ONI_PLATFORM_IS_LITTLE_ENDIAN 49 | 50 | #define ONI_PLATFORM_SUPPORTS_DYNAMIC_LIBS 1 51 | 52 | //--------------------------------------------------------------------------- 53 | // Memory 54 | //--------------------------------------------------------------------------- 55 | /** The default memory alignment. */ 56 | #define ONI_DEFAULT_MEM_ALIGN 16 57 | 58 | /** The thread static declarator (using TLS). */ 59 | #define ONI_THREAD_STATIC __thread 60 | 61 | //--------------------------------------------------------------------------- 62 | // Files 63 | //--------------------------------------------------------------------------- 64 | /** The maximum allowed file path size (in bytes). */ 65 | #define ONI_FILE_MAX_PATH 256 66 | 67 | //--------------------------------------------------------------------------- 68 | // Call back 69 | //--------------------------------------------------------------------------- 70 | /** The std call type. */ 71 | #define ONI_STDCALL __stdcall 72 | 73 | /** The call back calling convention. */ 74 | #define ONI_CALLBACK_TYPE 75 | 76 | /** The C and C++ calling convension. */ 77 | #define ONI_C_DECL 78 | 79 | //--------------------------------------------------------------------------- 80 | // Macros 81 | //--------------------------------------------------------------------------- 82 | /** Returns the date and time at compile time. */ 83 | #define ONI_TIMESTAMP __DATE__ " " __TIME__ 84 | 85 | /** Converts n into a pre-processor string. */ 86 | #define ONI_STRINGIFY(n) ONI_STRINGIFY_HELPER(n) 87 | #define ONI_STRINGIFY_HELPER(n) #n 88 | 89 | //--------------------------------------------------------------------------- 90 | // API Export/Import Macros 91 | //--------------------------------------------------------------------------- 92 | /** Indicates an exported shared library function. */ 93 | #define ONI_API_EXPORT __attribute__ ((visibility("default"))) 94 | 95 | /** Indicates an imported shared library function. */ 96 | #define ONI_API_IMPORT 97 | 98 | /** Indicates a deprecated function */ 99 | #define ONI_API_DEPRECATED(msg) __attribute__((warning("This function is deprecated: " msg))) 100 | 101 | #endif // ONIPLATFORMLINUX_X86_H 102 | 103 | -------------------------------------------------------------------------------- /include/Openni2/OniCAPI.h: -------------------------------------------------------------------------------- 1 | /***************************************************************************** 2 | * * 3 | * OpenNI 2.x Alpha * 4 | * Copyright (C) 2012 PrimeSense Ltd. * 5 | * * 6 | * This file is part of OpenNI. * 7 | * * 8 | * Licensed under the Apache License, Version 2.0 (the "License"); * 9 | * you may not use this file except in compliance with the License. * 10 | * You may obtain a copy of the License at * 11 | * * 12 | * http://www.apache.org/licenses/LICENSE-2.0 * 13 | * * 14 | * Unless required by applicable law or agreed to in writing, software * 15 | * distributed under the License is distributed on an "AS IS" BASIS, * 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 17 | * See the License for the specific language governing permissions and * 18 | * limitations under the License. * 19 | * * 20 | *****************************************************************************/ 21 | #ifndef ONICAPI_H 22 | #define ONICAPI_H 23 | 24 | #include "OniPlatform.h" 25 | #include "OniCTypes.h" 26 | #include "OniCProperties.h" 27 | #include "OniVersion.h" 28 | 29 | /******************************************** General APIs */ 30 | 31 | /** Initialize OpenNI2. Use ONI_API_VERSION as the version. */ 32 | ONI_C_API OniStatus oniInitialize(int apiVersion); 33 | /** Shutdown OpenNI2 */ 34 | ONI_C_API void oniShutdown(); 35 | 36 | /** 37 | * Get the list of currently connected device. 38 | * Each device is represented by its OniDeviceInfo. 39 | * pDevices will be allocated inside. 40 | */ 41 | ONI_C_API OniStatus oniGetDeviceList(OniDeviceInfo** pDevices, int* pNumDevices); 42 | /** Release previously allocated device list */ 43 | ONI_C_API OniStatus oniReleaseDeviceList(OniDeviceInfo* pDevices); 44 | 45 | ONI_C_API OniStatus oniRegisterDeviceCallbacks(OniDeviceCallbacks* pCallbacks, void* pCookie, OniCallbackHandle* pHandle); 46 | ONI_C_API void oniUnregisterDeviceCallbacks(OniCallbackHandle handle); 47 | 48 | /** Wait for any of the streams to have a new frame */ 49 | ONI_C_API OniStatus oniWaitForAnyStream(OniStreamHandle* pStreams, int numStreams, int* pStreamIndex, int timeout); 50 | 51 | /** Get the current version of OpenNI2 */ 52 | ONI_C_API OniVersion oniGetVersion(); 53 | 54 | /** Translate from format to number of bytes per pixel. Will return 0 for formats in which the number of bytes per pixel isn't fixed. */ 55 | ONI_C_API int oniFormatBytesPerPixel(OniPixelFormat format); 56 | 57 | /** Get internal error */ 58 | ONI_C_API const char* oniGetExtendedError(); 59 | 60 | /******************************************** Device APIs */ 61 | 62 | /** Open a device. Uri can be taken from the matching OniDeviceInfo. */ 63 | ONI_C_API OniStatus oniDeviceOpen(const char* uri, OniDeviceHandle* pDevice); 64 | /** Close a device */ 65 | ONI_C_API OniStatus oniDeviceClose(OniDeviceHandle device); 66 | 67 | /** Get the possible configurations available for a specific source, or NULL if the source does not exist. */ 68 | ONI_C_API const OniSensorInfo* oniDeviceGetSensorInfo(OniDeviceHandle device, OniSensorType sensorType); 69 | 70 | /** Get the OniDeviceInfo of a certain device. */ 71 | ONI_C_API OniStatus oniDeviceGetInfo(OniDeviceHandle device, OniDeviceInfo* pInfo); 72 | 73 | /** Create a new stream in the device. The stream will originate from the source. */ 74 | ONI_C_API OniStatus oniDeviceCreateStream(OniDeviceHandle device, OniSensorType sensorType, OniStreamHandle* pStream); 75 | 76 | ONI_C_API OniStatus oniDeviceEnableDepthColorSync(OniDeviceHandle device); 77 | ONI_C_API void oniDeviceDisableDepthColorSync(OniDeviceHandle device); 78 | ONI_C_API OniBool oniDeviceGetDepthColorSyncEnabled(OniDeviceHandle device); 79 | 80 | /** Set property in the device. Use the properties listed in OniTypes.h: ONI_DEVICE_PROPERTY_..., or specific ones supplied by the device. */ 81 | ONI_C_API OniStatus oniDeviceSetProperty(OniDeviceHandle device, int propertyId, const void* data, int dataSize); 82 | /** Get property in the device. Use the properties listed in OniTypes.h: ONI_DEVICE_PROPERTY_..., or specific ones supplied by the device. */ 83 | ONI_C_API OniStatus oniDeviceGetProperty(OniDeviceHandle device, int propertyId, void* data, int* pDataSize); 84 | /** Check if the property is supported by the device. Use the properties listed in OniTypes.h: ONI_DEVICE_PROPERTY_..., or specific ones supplied by the device. */ 85 | ONI_C_API OniBool oniDeviceIsPropertySupported(OniDeviceHandle device, int propertyId); 86 | /** Invoke an internal functionality of the device. */ 87 | ONI_C_API OniStatus oniDeviceInvoke(OniDeviceHandle device, int commandId, void* data, int dataSize); 88 | /** Check if a command is supported, for invoke */ 89 | ONI_C_API OniBool oniDeviceIsCommandSupported(OniDeviceHandle device, int commandId); 90 | 91 | ONI_C_API OniBool oniDeviceIsImageRegistrationModeSupported(OniDeviceHandle device, OniImageRegistrationMode mode); 92 | 93 | /** @internal */ 94 | ONI_C_API OniStatus oniDeviceOpenEx(const char* uri, const char* mode, OniDeviceHandle* pDevice); 95 | 96 | /******************************************** Stream APIs */ 97 | 98 | /** Destroy an existing stream */ 99 | ONI_C_API void oniStreamDestroy(OniStreamHandle stream); 100 | 101 | /** Get the OniSensorInfo of the certain stream. */ 102 | ONI_C_API const OniSensorInfo* oniStreamGetSensorInfo(OniStreamHandle stream); 103 | 104 | /** Start generating data from the stream. */ 105 | ONI_C_API OniStatus oniStreamStart(OniStreamHandle stream); 106 | /** Stop generating data from the stream. */ 107 | ONI_C_API void oniStreamStop(OniStreamHandle stream); 108 | 109 | /** Get the next frame from the stream. This function is blocking until there is a new frame from the stream. For timeout, use oniWaitForStreams() first */ 110 | ONI_C_API OniStatus oniStreamReadFrame(OniStreamHandle stream, OniFrame** pFrame); 111 | 112 | /** Register a callback to when the stream has a new frame. */ 113 | ONI_C_API OniStatus oniStreamRegisterNewFrameCallback(OniStreamHandle stream, OniNewFrameCallback handler, void* pCookie, OniCallbackHandle* pHandle); 114 | /** Unregister a previously registered callback to when the stream has a new frame. */ 115 | ONI_C_API void oniStreamUnregisterNewFrameCallback(OniStreamHandle stream, OniCallbackHandle handle); 116 | 117 | /** Set property in the stream. Use the properties listed in OniTypes.h: ONI_STREAM_PROPERTY_..., or specific ones supplied by the device for its streams. */ 118 | ONI_C_API OniStatus oniStreamSetProperty(OniStreamHandle stream, int propertyId, const void* data, int dataSize); 119 | /** Get property in the stream. Use the properties listed in OniTypes.h: ONI_STREAM_PROPERTY_..., or specific ones supplied by the device for its streams. */ 120 | ONI_C_API OniStatus oniStreamGetProperty(OniStreamHandle stream, int propertyId, void* data, int* pDataSize); 121 | /** Check if the property is supported the stream. Use the properties listed in OniTypes.h: ONI_STREAM_PROPERTY_..., or specific ones supplied by the device for its streams. */ 122 | ONI_C_API OniBool oniStreamIsPropertySupported(OniStreamHandle stream, int propertyId); 123 | /** Invoke an internal functionality of the stream. */ 124 | ONI_C_API OniStatus oniStreamInvoke(OniStreamHandle stream, int commandId, void* data, int dataSize); 125 | /** Check if a command is supported, for invoke */ 126 | ONI_C_API OniBool oniStreamIsCommandSupported(OniStreamHandle stream, int commandId); 127 | /** Sets the stream buffer allocation functions. Note that this function may only be called while stream is not started. */ 128 | ONI_C_API OniStatus oniStreamSetFrameBuffersAllocator(OniStreamHandle stream, OniFrameAllocBufferCallback alloc, OniFrameFreeBufferCallback free, void* pCookie); 129 | 130 | //// 131 | /** Mark another user of the frame. */ 132 | ONI_C_API void oniFrameAddRef(OniFrame* pFrame); 133 | /** Mark that the frame is no longer needed. */ 134 | ONI_C_API void oniFrameRelease(OniFrame* pFrame); 135 | 136 | // ONI_C_API OniStatus oniConvertRealWorldToProjective(OniStreamHandle stream, OniFloatPoint3D* pRealWorldPoint, OniFloatPoint3D* pProjectivePoint); 137 | // ONI_C_API OniStatus oniConvertProjectiveToRealWorld(OniStreamHandle stream, OniFloatPoint3D* pProjectivePoint, OniFloatPoint3D* pRealWorldPoint); 138 | 139 | /** 140 | * Creates a recorder that records to a file. 141 | * @param [in] fileName The name of the file that will contain the recording. 142 | * @param [out] pRecorder Points to the handle to the newly created recorder. 143 | * @retval ONI_STATUS_OK Upon successful completion. 144 | * @retval ONI_STATUS_ERROR Upon any kind of failure. 145 | */ 146 | ONI_C_API OniStatus oniCreateRecorder(const char* fileName, OniRecorderHandle* pRecorder); 147 | 148 | /** 149 | * Attaches a stream to a recorder. The amount of attached streams is virtually 150 | * infinite. You cannot attach a stream after you have started a recording, if 151 | * you do: an error will be returned by oniRecorderAttachStream. 152 | * @param [in] recorder The handle to the recorder. 153 | * @param [in] stream The handle to the stream. 154 | * @param [in] allowLossyCompression Allows/denies lossy compression 155 | * @retval ONI_STATUS_OK Upon successful completion. 156 | * @retval ONI_STATUS_ERROR Upon any kind of failure. 157 | */ 158 | ONI_C_API OniStatus oniRecorderAttachStream( 159 | OniRecorderHandle recorder, 160 | OniStreamHandle stream, 161 | OniBool allowLossyCompression); 162 | 163 | /** 164 | * Starts recording. There must be at least one stream attached to the recorder, 165 | * if not: oniRecorderStart will return an error. 166 | * @param[in] recorder The handle to the recorder. 167 | * @retval ONI_STATUS_OK Upon successful completion. 168 | * @retval ONI_STATUS_ERROR Upon any kind of failure. 169 | */ 170 | ONI_C_API OniStatus oniRecorderStart(OniRecorderHandle recorder); 171 | 172 | /** 173 | * Stops recording. You can resume recording via oniRecorderStart. 174 | * @param[in] recorder The handle to the recorder. 175 | * @retval ONI_STATUS_OK Upon successful completion. 176 | * @retval ONI_STATUS_ERROR Upon any kind of failure. 177 | */ 178 | ONI_C_API void oniRecorderStop(OniRecorderHandle recorder); 179 | 180 | /** 181 | * Stops recording if needed, and destroys a recorder. 182 | * @param [in,out] recorder The handle to the recorder, the handle will be 183 | * invalidated (nullified) when the function returns. 184 | * @retval ONI_STATUS_OK Upon successful completion. 185 | * @retval ONI_STATUS_ERROR Upon any kind of failure. 186 | */ 187 | ONI_C_API OniStatus oniRecorderDestroy(OniRecorderHandle* pRecorder); 188 | 189 | ONI_C_API OniStatus oniCoordinateConverterDepthToWorld(OniStreamHandle depthStream, float depthX, float depthY, float depthZ, float* pWorldX, float* pWorldY, float* pWorldZ); 190 | 191 | ONI_C_API OniStatus oniCoordinateConverterWorldToDepth(OniStreamHandle depthStream, float worldX, float worldY, float worldZ, float* pDepthX, float* pDepthY, float* pDepthZ); 192 | 193 | ONI_C_API OniStatus oniCoordinateConverterDepthToColor(OniStreamHandle depthStream, OniStreamHandle colorStream, int depthX, int depthY, OniDepthPixel depthZ, int* pColorX, int* pColorY); 194 | 195 | /******************************************** Log APIs */ 196 | 197 | /** 198 | * Change the log output folder 199 | 200 | * @param const char * strOutputFolder [in] path to the desirebale folder 201 | * 202 | * @retval ONI_STATUS_OK Upon successful completion. 203 | * @retval ONI_STATUS_ERROR Upon any kind of failure. 204 | */ 205 | ONI_C_API OniStatus oniSetLogOutputFolder(const char* strOutputFolder); 206 | 207 | /** 208 | * Get the current log file name 209 | 210 | * @param char * strFileName [out] hold the returned file name 211 | * @param int nBufferSize [in] size of strFileName 212 | * 213 | * @retval ONI_STATUS_OK Upon successful completion. 214 | * @retval ONI_STATUS_ERROR Upon any kind of failure. 215 | */ 216 | ONI_C_API OniStatus oniGetLogFileName(char* strFileName, int nBufferSize); 217 | 218 | /** 219 | * Set the Minimum severity for log produce 220 | 221 | * @param const char * strMask [in] Name of the logger 222 | * 223 | * @retval ONI_STATUS_OK Upon successful completion. 224 | * @retval ONI_STATUS_ERROR Upon any kind of failure. 225 | */ 226 | ONI_C_API OniStatus oniSetLogMinSeverity(int nMinSeverity); 227 | 228 | /** 229 | * Configures if log entries will be printed to console. 230 | 231 | * @param OniBool bConsoleOutput [in] TRUE to print log entries to console, FALSE otherwise. 232 | * 233 | * @retval ONI_STATUS_OK Upon successful completion. 234 | * @retval ONI_STATUS_ERROR Upon any kind of failure. 235 | */ 236 | ONI_C_API OniStatus oniSetLogConsoleOutput(OniBool bConsoleOutput); 237 | 238 | /** 239 | * Configures if log entries will be printed to a log file. 240 | 241 | * @param OniBool bFileOutput [in] TRUE to print log entries to the file, FALSE otherwise. 242 | * 243 | * @retval ONI_STATUS_OK Upon successful completion. 244 | * @retval ONI_STATUS_ERROR Upon any kind of failure. 245 | */ 246 | ONI_C_API OniStatus oniSetLogFileOutput(OniBool bFileOutput); 247 | 248 | #if ONI_PLATFORM == ONI_PLATFORM_ANDROID_ARM 249 | /** 250 | * Configures if log entries will be printed to the Android log. 251 | 252 | * @param OniBool bAndroidOutput [in] TRUE to print log entries to the Android log, FALSE otherwise. 253 | * 254 | * @retval ONI_STATUS_OK Upon successful completion. 255 | * @retval ONI_STATUS_ERROR Upon any kind of failure. 256 | */ 257 | ONI_C_API OniStatus oniSetLogAndroidOutput(OniBool bAndroidOutput); 258 | #endif 259 | #endif // ONICAPI_H 260 | -------------------------------------------------------------------------------- /include/Openni2/OniCEnums.h: -------------------------------------------------------------------------------- 1 | /***************************************************************************** 2 | * * 3 | * OpenNI 2.x Alpha * 4 | * Copyright (C) 2012 PrimeSense Ltd. * 5 | * * 6 | * This file is part of OpenNI. * 7 | * * 8 | * Licensed under the Apache License, Version 2.0 (the "License"); * 9 | * you may not use this file except in compliance with the License. * 10 | * You may obtain a copy of the License at * 11 | * * 12 | * http://www.apache.org/licenses/LICENSE-2.0 * 13 | * * 14 | * Unless required by applicable law or agreed to in writing, software * 15 | * distributed under the License is distributed on an "AS IS" BASIS, * 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 17 | * See the License for the specific language governing permissions and * 18 | * limitations under the License. * 19 | * * 20 | *****************************************************************************/ 21 | #ifndef ONICENUMS_H 22 | #define ONICENUMS_H 23 | 24 | /** Possible failure values */ 25 | typedef enum 26 | { 27 | ONI_STATUS_OK = 0, 28 | ONI_STATUS_ERROR = 1, 29 | ONI_STATUS_NOT_IMPLEMENTED = 2, 30 | ONI_STATUS_NOT_SUPPORTED = 3, 31 | ONI_STATUS_BAD_PARAMETER = 4, 32 | ONI_STATUS_OUT_OF_FLOW = 5, 33 | ONI_STATUS_NO_DEVICE = 6, 34 | ONI_STATUS_TIME_OUT = 102, 35 | } OniStatus; 36 | 37 | /** The source of the stream */ 38 | typedef enum 39 | { 40 | ONI_SENSOR_IR = 1, 41 | ONI_SENSOR_COLOR = 2, 42 | ONI_SENSOR_DEPTH = 3, 43 | 44 | } OniSensorType; 45 | 46 | /** All available formats of the output of a stream */ 47 | typedef enum 48 | { 49 | // Depth 50 | ONI_PIXEL_FORMAT_DEPTH_1_MM = 100, 51 | ONI_PIXEL_FORMAT_DEPTH_100_UM = 101, 52 | ONI_PIXEL_FORMAT_SHIFT_9_2 = 102, 53 | ONI_PIXEL_FORMAT_SHIFT_9_3 = 103, 54 | 55 | // Color 56 | ONI_PIXEL_FORMAT_RGB888 = 200, 57 | ONI_PIXEL_FORMAT_YUV422 = 201, 58 | ONI_PIXEL_FORMAT_GRAY8 = 202, 59 | ONI_PIXEL_FORMAT_GRAY16 = 203, 60 | ONI_PIXEL_FORMAT_JPEG = 204, 61 | ONI_PIXEL_FORMAT_YUYV = 205, 62 | } OniPixelFormat; 63 | 64 | typedef enum 65 | { 66 | ONI_DEVICE_STATE_OK = 0, 67 | ONI_DEVICE_STATE_ERROR = 1, 68 | ONI_DEVICE_STATE_NOT_READY = 2, 69 | ONI_DEVICE_STATE_EOF = 3 70 | } OniDeviceState; 71 | 72 | typedef enum 73 | { 74 | ONI_IMAGE_REGISTRATION_OFF = 0, 75 | ONI_IMAGE_REGISTRATION_DEPTH_TO_COLOR = 1, 76 | } OniImageRegistrationMode; 77 | 78 | enum 79 | { 80 | ONI_TIMEOUT_NONE = 0, 81 | ONI_TIMEOUT_FOREVER = -1, 82 | }; 83 | 84 | #endif // ONICENUMS_H 85 | -------------------------------------------------------------------------------- /include/Openni2/OniCProperties.h: -------------------------------------------------------------------------------- 1 | /***************************************************************************** 2 | * * 3 | * OpenNI 2.x Alpha * 4 | * Copyright (C) 2012 PrimeSense Ltd. * 5 | * * 6 | * This file is part of OpenNI. * 7 | * * 8 | * Licensed under the Apache License, Version 2.0 (the "License"); * 9 | * you may not use this file except in compliance with the License. * 10 | * You may obtain a copy of the License at * 11 | * * 12 | * http://www.apache.org/licenses/LICENSE-2.0 * 13 | * * 14 | * Unless required by applicable law or agreed to in writing, software * 15 | * distributed under the License is distributed on an "AS IS" BASIS, * 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 17 | * See the License for the specific language governing permissions and * 18 | * limitations under the License. * 19 | * * 20 | *****************************************************************************/ 21 | #ifndef ONICPROPERTIES_H 22 | #define ONICPROPERTIES_H 23 | 24 | // Device properties 25 | enum 26 | { 27 | ONI_DEVICE_PROPERTY_FIRMWARE_VERSION = 0, // By implementation 28 | ONI_DEVICE_PROPERTY_DRIVER_VERSION = 1, // OniVersion 29 | ONI_DEVICE_PROPERTY_HARDWARE_VERSION = 2, // int 30 | ONI_DEVICE_PROPERTY_SERIAL_NUMBER = 3, // string 31 | ONI_DEVICE_PROPERTY_ERROR_STATE = 4, // ?? 32 | ONI_DEVICE_PROPERTY_IMAGE_REGISTRATION = 5, // OniImageRegistrationMode 33 | 34 | // Files 35 | ONI_DEVICE_PROPERTY_PLAYBACK_SPEED = 100, // float 36 | ONI_DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED = 101, // OniBool 37 | }; 38 | 39 | // Stream properties 40 | enum 41 | { 42 | ONI_STREAM_PROPERTY_CROPPING = 0, // OniCropping* 43 | ONI_STREAM_PROPERTY_HORIZONTAL_FOV = 1, // float: radians 44 | ONI_STREAM_PROPERTY_VERTICAL_FOV = 2, // float: radians 45 | ONI_STREAM_PROPERTY_VIDEO_MODE = 3, // OniVideoMode* 46 | 47 | ONI_STREAM_PROPERTY_MAX_VALUE = 4, // int 48 | ONI_STREAM_PROPERTY_MIN_VALUE = 5, // int 49 | 50 | ONI_STREAM_PROPERTY_STRIDE = 6, // int 51 | ONI_STREAM_PROPERTY_MIRRORING = 7, // OniBool 52 | 53 | ONI_STREAM_PROPERTY_NUMBER_OF_FRAMES = 8, // int 54 | 55 | // Camera 56 | ONI_STREAM_PROPERTY_AUTO_WHITE_BALANCE = 100, // OniBool 57 | ONI_STREAM_PROPERTY_AUTO_EXPOSURE = 101, // OniBool 58 | ONI_STREAM_PROPERTY_EXPOSURE = 102, // int 59 | ONI_STREAM_PROPERTY_GAIN = 103, // int 60 | }; 61 | 62 | // Device commands (for Invoke) 63 | enum 64 | { 65 | ONI_DEVICE_COMMAND_SEEK = 1, // OniSeek 66 | }; 67 | 68 | #endif // ONICPROPERTIES_H 69 | -------------------------------------------------------------------------------- /include/Openni2/OniCTypes.h: -------------------------------------------------------------------------------- 1 | /***************************************************************************** 2 | * * 3 | * OpenNI 2.x Alpha * 4 | * Copyright (C) 2012 PrimeSense Ltd. * 5 | * * 6 | * This file is part of OpenNI. * 7 | * * 8 | * Licensed under the Apache License, Version 2.0 (the "License"); * 9 | * you may not use this file except in compliance with the License. * 10 | * You may obtain a copy of the License at * 11 | * * 12 | * http://www.apache.org/licenses/LICENSE-2.0 * 13 | * * 14 | * Unless required by applicable law or agreed to in writing, software * 15 | * distributed under the License is distributed on an "AS IS" BASIS, * 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 17 | * See the License for the specific language governing permissions and * 18 | * limitations under the License. * 19 | * * 20 | *****************************************************************************/ 21 | #ifndef ONICTYPES_H 22 | #define ONICTYPES_H 23 | 24 | #include "OniPlatform.h" 25 | #include "OniCEnums.h" 26 | 27 | /** Basic types **/ 28 | typedef int OniBool; 29 | 30 | #ifndef TRUE 31 | #define TRUE 1 32 | #endif //TRUE 33 | #ifndef FALSE 34 | #define FALSE 0 35 | #endif //FALSE 36 | 37 | #define ONI_MAX_STR 256 38 | #define ONI_MAX_SENSORS 10 39 | 40 | struct OniCallbackHandleImpl; 41 | typedef struct OniCallbackHandleImpl* OniCallbackHandle; 42 | 43 | /** Holds an OpenNI version number, which consists of four separate numbers in the format: @c major.minor.maintenance.build. For example: 2.0.0.20. */ 44 | typedef struct 45 | { 46 | /** Major version number, incremented for major API restructuring. */ 47 | int major; 48 | /** Minor version number, incremented when significant new features added. */ 49 | int minor; 50 | /** Maintenance build number, incremented for new releases that primarily provide minor bug fixes. */ 51 | int maintenance; 52 | /** Build number. Incremented for each new API build. Generally not shown on the installer and download site. */ 53 | int build; 54 | } OniVersion; 55 | 56 | typedef int OniHardwareVersion; 57 | 58 | /** Description of the output: format and resolution */ 59 | typedef struct 60 | { 61 | OniPixelFormat pixelFormat; 62 | int resolutionX; 63 | int resolutionY; 64 | int fps; 65 | } OniVideoMode; 66 | 67 | /** List of supported video modes by a specific source */ 68 | typedef struct 69 | { 70 | OniSensorType sensorType; 71 | int numSupportedVideoModes; 72 | OniVideoMode *pSupportedVideoModes; 73 | } OniSensorInfo; 74 | 75 | /** Basic description of a device */ 76 | typedef struct 77 | { 78 | char uri[ONI_MAX_STR]; 79 | char vendor[ONI_MAX_STR]; 80 | char name[ONI_MAX_STR]; 81 | uint16_t usbVendorId; 82 | uint16_t usbProductId; 83 | } OniDeviceInfo; 84 | 85 | struct _OniDevice; 86 | typedef struct _OniDevice* OniDeviceHandle; 87 | 88 | struct _OniStream; 89 | typedef struct _OniStream* OniStreamHandle; 90 | 91 | struct _OniRecorder; 92 | typedef struct _OniRecorder* OniRecorderHandle; 93 | 94 | /** All information of the current frame */ 95 | typedef struct 96 | { 97 | int dataSize; 98 | void* data; 99 | 100 | OniSensorType sensorType; 101 | uint64_t timestamp; 102 | int frameIndex; 103 | 104 | int width; 105 | int height; 106 | 107 | OniVideoMode videoMode; 108 | OniBool croppingEnabled; 109 | int cropOriginX; 110 | int cropOriginY; 111 | 112 | int stride; 113 | } OniFrame; 114 | 115 | typedef void (ONI_CALLBACK_TYPE* OniNewFrameCallback)(OniStreamHandle stream, void* pCookie); 116 | typedef void (ONI_CALLBACK_TYPE* OniGeneralCallback)(void* pCookie); 117 | typedef void (ONI_CALLBACK_TYPE* OniDeviceInfoCallback)(const OniDeviceInfo* pInfo, void* pCookie); 118 | typedef void (ONI_CALLBACK_TYPE* OniDeviceStateCallback)(const OniDeviceInfo* pInfo, OniDeviceState deviceState, void* pCookie); 119 | 120 | typedef void* (ONI_CALLBACK_TYPE* OniFrameAllocBufferCallback)(int size, void* pCookie); 121 | typedef void (ONI_CALLBACK_TYPE* OniFrameFreeBufferCallback)(void* data, void* pCookie); 122 | 123 | typedef struct 124 | { 125 | OniDeviceInfoCallback deviceConnected; 126 | OniDeviceInfoCallback deviceDisconnected; 127 | OniDeviceStateCallback deviceStateChanged; 128 | } OniDeviceCallbacks; 129 | 130 | typedef struct 131 | { 132 | int enabled; 133 | int originX; 134 | int originY; 135 | int width; 136 | int height; 137 | } OniCropping; 138 | 139 | // Pixel types 140 | /** 141 | Pixel type used to store depth images. 142 | */ 143 | typedef uint16_t OniDepthPixel; 144 | typedef uint16_t OniIRPixel; 145 | 146 | /** 147 | Pixel type used to store 16-bit grayscale images 148 | */ 149 | typedef uint16_t OniGrayscale16Pixel; 150 | 151 | /** 152 | Pixel type used to store 8-bit grayscale/bayer images 153 | */ 154 | typedef uint8_t OniGrayscale8Pixel; 155 | 156 | #pragma pack (push, 1) 157 | 158 | /** Holds the value of a single color image pixel in 24-bit RGB format. */ 159 | typedef struct 160 | { 161 | /* Red value of this pixel. */ 162 | uint8_t r; 163 | /* Green value of this pixel. */ 164 | uint8_t g; 165 | /* Blue value of this pixel. */ 166 | uint8_t b; 167 | } OniRGB888Pixel; 168 | 169 | /** 170 | Holds the value of two pixels in YUV422 format (Luminance/Chrominance,16-bits/pixel). 171 | The first pixel has the values y1, u, v. 172 | The second pixel has the values y2, u, v. 173 | */ 174 | typedef struct 175 | { 176 | /** First chrominance value for two pixels, stored as blue luminance difference signal. */ 177 | uint8_t u; 178 | /** Overall luminance value of first pixel. */ 179 | uint8_t y1; 180 | /** Second chrominance value for two pixels, stored as red luminance difference signal. */ 181 | uint8_t v; 182 | /** Overall luminance value of second pixel. */ 183 | uint8_t y2; 184 | } OniYUV422DoublePixel; 185 | 186 | /** 187 | Holds the value of two pixels in YUV422 format (Luminance/Chrominance,16-bits/pixel). 188 | The first pixel has the values y1, u, v. 189 | The second pixel has the values y2, u, v. 190 | */ 191 | typedef struct 192 | { 193 | /** Overall luminance value of first pixel. */ 194 | uint8_t y1; 195 | /** First chrominance value for two pixels, stored as blue luminance difference signal. */ 196 | uint8_t u; 197 | /** Overall luminance value of second pixel. */ 198 | uint8_t y2; 199 | /** Second chrominance value for two pixels, stored as red luminance difference signal. */ 200 | uint8_t v; 201 | } OniYUYVDoublePixel; 202 | 203 | #pragma pack (pop) 204 | 205 | typedef struct 206 | { 207 | int frameIndex; 208 | OniStreamHandle stream; 209 | } OniSeek; 210 | 211 | #endif // ONICTYPES_H 212 | -------------------------------------------------------------------------------- /include/Openni2/OniEnums.h: -------------------------------------------------------------------------------- 1 | /***************************************************************************** 2 | * * 3 | * OpenNI 2.x Alpha * 4 | * Copyright (C) 2012 PrimeSense Ltd. * 5 | * * 6 | * This file is part of OpenNI. * 7 | * * 8 | * Licensed under the Apache License, Version 2.0 (the "License"); * 9 | * you may not use this file except in compliance with the License. * 10 | * You may obtain a copy of the License at * 11 | * * 12 | * http://www.apache.org/licenses/LICENSE-2.0 * 13 | * * 14 | * Unless required by applicable law or agreed to in writing, software * 15 | * distributed under the License is distributed on an "AS IS" BASIS, * 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 17 | * See the License for the specific language governing permissions and * 18 | * limitations under the License. * 19 | * * 20 | *****************************************************************************/ 21 | #ifndef ONIENUMS_H 22 | #define ONIENUMS_H 23 | 24 | namespace openni 25 | { 26 | 27 | /** Possible failure values */ 28 | typedef enum 29 | { 30 | STATUS_OK = 0, 31 | STATUS_ERROR = 1, 32 | STATUS_NOT_IMPLEMENTED = 2, 33 | STATUS_NOT_SUPPORTED = 3, 34 | STATUS_BAD_PARAMETER = 4, 35 | STATUS_OUT_OF_FLOW = 5, 36 | STATUS_NO_DEVICE = 6, 37 | STATUS_TIME_OUT = 102, 38 | } Status; 39 | 40 | /** The source of the stream */ 41 | typedef enum 42 | { 43 | SENSOR_IR = 1, 44 | SENSOR_COLOR = 2, 45 | SENSOR_DEPTH = 3, 46 | 47 | } SensorType; 48 | 49 | /** All available formats of the output of a stream */ 50 | typedef enum 51 | { 52 | // Depth 53 | PIXEL_FORMAT_DEPTH_1_MM = 100, 54 | PIXEL_FORMAT_DEPTH_100_UM = 101, 55 | PIXEL_FORMAT_SHIFT_9_2 = 102, 56 | PIXEL_FORMAT_SHIFT_9_3 = 103, 57 | 58 | // Color 59 | PIXEL_FORMAT_RGB888 = 200, 60 | PIXEL_FORMAT_YUV422 = 201, 61 | PIXEL_FORMAT_GRAY8 = 202, 62 | PIXEL_FORMAT_GRAY16 = 203, 63 | PIXEL_FORMAT_JPEG = 204, 64 | PIXEL_FORMAT_YUYV = 205, 65 | } PixelFormat; 66 | 67 | typedef enum 68 | { 69 | DEVICE_STATE_OK = 0, 70 | DEVICE_STATE_ERROR = 1, 71 | DEVICE_STATE_NOT_READY = 2, 72 | DEVICE_STATE_EOF = 3 73 | } DeviceState; 74 | 75 | typedef enum 76 | { 77 | IMAGE_REGISTRATION_OFF = 0, 78 | IMAGE_REGISTRATION_DEPTH_TO_COLOR = 1, 79 | } ImageRegistrationMode; 80 | 81 | static const int TIMEOUT_NONE = 0; 82 | static const int TIMEOUT_FOREVER = -1; 83 | 84 | } // namespace openni 85 | 86 | #endif // ONIENUMS_H 87 | -------------------------------------------------------------------------------- /include/Openni2/OniPlatform.h: -------------------------------------------------------------------------------- 1 | /***************************************************************************** 2 | * * 3 | * OpenNI 2.x Alpha * 4 | * Copyright (C) 2012 PrimeSense Ltd. * 5 | * * 6 | * This file is part of OpenNI. * 7 | * * 8 | * Licensed under the Apache License, Version 2.0 (the "License"); * 9 | * you may not use this file except in compliance with the License. * 10 | * You may obtain a copy of the License at * 11 | * * 12 | * http://www.apache.org/licenses/LICENSE-2.0 * 13 | * * 14 | * Unless required by applicable law or agreed to in writing, software * 15 | * distributed under the License is distributed on an "AS IS" BASIS, * 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 17 | * See the License for the specific language governing permissions and * 18 | * limitations under the License. * 19 | * * 20 | *****************************************************************************/ 21 | #ifndef ONIPLATFORM_H 22 | #define ONIPLATFORM_H 23 | 24 | // Supported platforms 25 | #define ONI_PLATFORM_WIN32 1 26 | #define ONI_PLATFORM_LINUX_X86 2 27 | #define ONI_PLATFORM_LINUX_ARM 3 28 | #define ONI_PLATFORM_MACOSX 4 29 | #define ONI_PLATFORM_ANDROID_ARM 5 30 | 31 | #if (defined _WIN32) 32 | # ifndef RC_INVOKED 33 | # if _MSC_VER < 1300 34 | # error OpenNI Platform Abstraction Layer - Win32 - Microsoft Visual Studio version below 2003 (7.0) are not supported! 35 | # endif 36 | # endif 37 | # include "Win32/OniPlatformWin32.h" 38 | #elif defined (ANDROID) && defined (__arm__) 39 | # include "Android-Arm/OniPlatformAndroid-Arm.h" 40 | #elif (__linux__ && (i386 || __x86_64__)) 41 | # include "Linux-x86/OniPlatformLinux-x86.h" 42 | #elif (__linux__ && __arm__) 43 | # include "Linux-Arm/OniPlatformLinux-Arm.h" 44 | #elif _ARC 45 | # include "ARC/OniPlaformARC.h" 46 | #elif (__APPLE__) 47 | # include "MacOSX/OniPlatformMacOSX.h" 48 | #else 49 | # error Xiron Platform Abstraction Layer - Unsupported Platform! 50 | #endif 51 | 52 | #ifdef __cplusplus 53 | # define ONI_C extern "C" 54 | # define ONI_C_API_EXPORT ONI_C ONI_API_EXPORT 55 | # define ONI_C_API_IMPORT ONI_C ONI_API_IMPORT 56 | # define ONI_CPP_API_EXPORT ONI_API_EXPORT 57 | # define ONI_CPP_API_IMPORT ONI_API_IMPORT 58 | #else // __cplusplus 59 | # define ONI_C_API_EXPORT ONI_API_EXPORT 60 | # define ONI_C_API_IMPORT ONI_API_IMPORT 61 | #endif // __cplusplus 62 | 63 | #ifdef OPENNI2_EXPORT 64 | # define ONI_C_API ONI_C_API_EXPORT 65 | # define ONI_CPP_API ONI_CPP_API_EXPORT 66 | #else // OPENNI2_EXPORT 67 | # define ONI_C_API ONI_C_API_IMPORT 68 | # define ONI_CPP_API ONI_CPP_API_IMPORT 69 | #endif // OPENNI2_EXPORT 70 | 71 | 72 | #endif // ONIPLATFORM_H 73 | -------------------------------------------------------------------------------- /include/Openni2/OniProperties.h: -------------------------------------------------------------------------------- 1 | /***************************************************************************** 2 | * * 3 | * OpenNI 2.x Alpha * 4 | * Copyright (C) 2012 PrimeSense Ltd. * 5 | * * 6 | * This file is part of OpenNI. * 7 | * * 8 | * Licensed under the Apache License, Version 2.0 (the "License"); * 9 | * you may not use this file except in compliance with the License. * 10 | * You may obtain a copy of the License at * 11 | * * 12 | * http://www.apache.org/licenses/LICENSE-2.0 * 13 | * * 14 | * Unless required by applicable law or agreed to in writing, software * 15 | * distributed under the License is distributed on an "AS IS" BASIS, * 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 17 | * See the License for the specific language governing permissions and * 18 | * limitations under the License. * 19 | * * 20 | *****************************************************************************/ 21 | #ifndef ONIPROPERTIES_H 22 | #define ONIPROPERTIES_H 23 | 24 | namespace openni 25 | { 26 | 27 | // Device properties 28 | enum 29 | { 30 | DEVICE_PROPERTY_FIRMWARE_VERSION = 0, // string 31 | DEVICE_PROPERTY_DRIVER_VERSION = 1, // OniVersion 32 | DEVICE_PROPERTY_HARDWARE_VERSION = 2, // int 33 | DEVICE_PROPERTY_SERIAL_NUMBER = 3, // string 34 | DEVICE_PROPERTY_ERROR_STATE = 4, // ?? 35 | DEVICE_PROPERTY_IMAGE_REGISTRATION = 5, // OniImageRegistrationMode 36 | 37 | // Files 38 | DEVICE_PROPERTY_PLAYBACK_SPEED = 100, // float 39 | DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED = 101, // OniBool 40 | }; 41 | 42 | // Stream properties 43 | enum 44 | { 45 | STREAM_PROPERTY_CROPPING = 0, // OniCropping* 46 | STREAM_PROPERTY_HORIZONTAL_FOV = 1, // float: radians 47 | STREAM_PROPERTY_VERTICAL_FOV = 2, // float: radians 48 | STREAM_PROPERTY_VIDEO_MODE = 3, // OniVideoMode* 49 | 50 | STREAM_PROPERTY_MAX_VALUE = 4, // int 51 | STREAM_PROPERTY_MIN_VALUE = 5, // int 52 | 53 | STREAM_PROPERTY_STRIDE = 6, // int 54 | STREAM_PROPERTY_MIRRORING = 7, // OniBool 55 | 56 | STREAM_PROPERTY_NUMBER_OF_FRAMES = 8, // int 57 | 58 | // Camera 59 | STREAM_PROPERTY_AUTO_WHITE_BALANCE = 100, // OniBool 60 | STREAM_PROPERTY_AUTO_EXPOSURE = 101, // OniBool 61 | STREAM_PROPERTY_EXPOSURE = 102, // int 62 | STREAM_PROPERTY_GAIN = 103, // int 63 | 64 | }; 65 | 66 | // Device commands (for Invoke) 67 | enum 68 | { 69 | DEVICE_COMMAND_SEEK = 1, // OniSeek 70 | }; 71 | 72 | } // namespace openni 73 | #endif // ONIPROPERTIES_H 74 | -------------------------------------------------------------------------------- /include/Openni2/OniTest.h: -------------------------------------------------------------------------------- 1 | /***************************************************************************** 2 | * * 3 | * OpenNI 2.x Alpha * 4 | * Copyright (C) 2012 PrimeSense Ltd. * 5 | * * 6 | * This file is part of OpenNI. * 7 | * * 8 | * Licensed under the Apache License, Version 2.0 (the "License"); * 9 | * you may not use this file except in compliance with the License. * 10 | * You may obtain a copy of the License at * 11 | * * 12 | * http://www.apache.org/licenses/LICENSE-2.0 * 13 | * * 14 | * Unless required by applicable law or agreed to in writing, software * 15 | * distributed under the License is distributed on an "AS IS" BASIS, * 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 17 | * See the License for the specific language governing permissions and * 18 | * limitations under the License. * 19 | * * 20 | *****************************************************************************/ 21 | #ifndef ONITEST_H 22 | #define ONITEST_H 23 | 24 | #define TEST_DEVICE_NAME "Test" // use with device.open() to create a test device 25 | 26 | /** 27 | * Additional commands for Test device streams 28 | */ 29 | enum 30 | { 31 | TEST_COMMAND_ISSUE_FRAME = 0xFFFF0001, // TestCommandIssueFrame 32 | }; 33 | 34 | typedef struct TestCommandIssueFrame 35 | { 36 | uint64_t timestamp; 37 | void* data; 38 | } TestCommandIssueFrame; 39 | 40 | #endif // ONITEST_H 41 | -------------------------------------------------------------------------------- /include/Openni2/OniVersion.h: -------------------------------------------------------------------------------- 1 | /***************************************************************************** 2 | * * 3 | * OpenNI 2.x Alpha * 4 | * Copyright (C) 2012 PrimeSense Ltd. * 5 | * * 6 | * This file is part of OpenNI. * 7 | * * 8 | * Licensed under the Apache License, Version 2.0 (the "License"); * 9 | * you may not use this file except in compliance with the License. * 10 | * You may obtain a copy of the License at * 11 | * * 12 | * http://www.apache.org/licenses/LICENSE-2.0 * 13 | * * 14 | * Unless required by applicable law or agreed to in writing, software * 15 | * distributed under the License is distributed on an "AS IS" BASIS, * 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 17 | * See the License for the specific language governing permissions and * 18 | * limitations under the License. * 19 | * * 20 | *****************************************************************************/ 21 | #ifndef ONIVERSION_H 22 | #define ONIVERSION_H 23 | 24 | #include "OniPlatform.h" 25 | 26 | #define ONI_VERSION_MAJOR 2 27 | #define ONI_VERSION_MINOR 3 28 | #define ONI_VERSION_MAINTENANCE 0 29 | #define ONI_VERSION_BUILD 15 30 | 31 | /** OpenNI version (in brief string format): "Major.Minor.Maintenance (Build)" */ 32 | #define ONI_BRIEF_VERSION_STRING \ 33 | ONI_STRINGIFY(ONI_VERSION_MAJOR) "." \ 34 | ONI_STRINGIFY(ONI_VERSION_MINOR) "." \ 35 | ONI_STRINGIFY(ONI_VERSION_MAINTENANCE) \ 36 | " (Build " ONI_STRINGIFY(ONI_VERSION_BUILD) ")" 37 | 38 | /** OpenNI version (in numeric format): (OpenNI major version * 100000000 + OpenNI minor version * 1000000 + OpenNI maintenance version * 10000 + OpenNI build version). */ 39 | #define ONI_VERSION (ONI_VERSION_MAJOR*100000000 + ONI_VERSION_MINOR*1000000 + ONI_VERSION_MAINTENANCE*10000 + ONI_VERSION_BUILD) 40 | #define ONI_CREATE_API_VERSION(major, minor) ((major)*1000 + (minor)) 41 | #define ONI_API_VERSION ONI_CREATE_API_VERSION(ONI_VERSION_MAJOR, ONI_VERSION_MINOR) 42 | 43 | /** OpenNI version (in string format): "Major.Minor.Maintenance.Build-Platform (MMM DD YYYY HH:MM:SS)". */ 44 | #define ONI_VERSION_STRING \ 45 | ONI_BRIEF_VERSION_STRING "-" \ 46 | ONI_PLATFORM_STRING " (" ONI_TIMESTAMP ")" 47 | 48 | #endif // ONIVERSION_H 49 | -------------------------------------------------------------------------------- /include/Openni2/OpenNI.h: -------------------------------------------------------------------------------- 1 | /***************************************************************************** 2 | * * 3 | * OpenNI 2.x Alpha * 4 | * Copyright (C) 2012 PrimeSense Ltd. * 5 | * * 6 | * This file is part of OpenNI. * 7 | * * 8 | * Licensed under the Apache License, Version 2.0 (the "License"); * 9 | * you may not use this file except in compliance with the License. * 10 | * You may obtain a copy of the License at * 11 | * * 12 | * http://www.apache.org/licenses/LICENSE-2.0 * 13 | * * 14 | * Unless required by applicable law or agreed to in writing, software * 15 | * distributed under the License is distributed on an "AS IS" BASIS, * 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 17 | * See the License for the specific language governing permissions and * 18 | * limitations under the License. * 19 | * * 20 | *****************************************************************************/ 21 | #ifndef OPENNI_H 22 | #define OPENNI_H 23 | 24 | #include "OniPlatform.h" 25 | #include "OniProperties.h" 26 | #include "OniEnums.h" 27 | 28 | #include "OniCAPI.h" 29 | #include "OniCProperties.h" 30 | 31 | /** 32 | openni is the namespace of the entire C++ API of OpenNI 33 | */ 34 | namespace openni 35 | { 36 | 37 | /** Pixel type used to store depth images. */ 38 | typedef uint16_t DepthPixel; 39 | 40 | /** Pixel type used to store IR images. */ 41 | typedef uint16_t Grayscale16Pixel; 42 | 43 | // structs 44 | /** Holds an OpenNI version number, which consists of four separate numbers in the format: @c major.minor.maintenance.build. For example: 2.0.0.20. */ 45 | typedef struct 46 | { 47 | /** Major version number, incremented for major API restructuring. */ 48 | int major; 49 | /** Minor version number, incremented when significant new features added. */ 50 | int minor; 51 | /** Maintenance build number, incremented for new releases that primarily provide minor bug fixes. */ 52 | int maintenance; 53 | /** Build number. Incremented for each new API build. Generally not shown on the installer and download site. */ 54 | int build; 55 | } Version; 56 | 57 | /** Holds the value of a single color image pixel in 24-bit RGB format. */ 58 | typedef struct 59 | { 60 | /* Red value of this pixel. */ 61 | uint8_t r; 62 | /* Green value of this pixel. */ 63 | uint8_t g; 64 | /* Blue value of this pixel. */ 65 | uint8_t b; 66 | } RGB888Pixel; 67 | 68 | /** 69 | Holds the value of two pixels in YUV422 format (Luminance/Chrominance,16-bits/pixel). 70 | The first pixel has the values y1, u, v. 71 | The second pixel has the values y2, u, v. 72 | */ 73 | typedef struct 74 | { 75 | /** First chrominance value for two pixels, stored as blue luminance difference signal. */ 76 | uint8_t u; 77 | /** Overall luminance value of first pixel. */ 78 | uint8_t y1; 79 | /** Second chrominance value for two pixels, stored as red luminance difference signal. */ 80 | uint8_t v; 81 | /** Overall luminance value of second pixel. */ 82 | uint8_t y2; 83 | } YUV422DoublePixel; 84 | 85 | /** 86 | Holds the value of two pixels in YUV422 format (Luminance/Chrominance,16-bits/pixel). 87 | The first pixel has the values y1, u, v. 88 | The second pixel has the values y2, u, v. 89 | */ 90 | typedef struct 91 | { 92 | /** Overall luminance value of first pixel. */ 93 | uint8_t y1; 94 | /** First chrominance value for two pixels, stored as blue luminance difference signal. */ 95 | uint8_t u; 96 | /** Overall luminance value of second pixel. */ 97 | uint8_t y2; 98 | /** Second chrominance value for two pixels, stored as red luminance difference signal. */ 99 | uint8_t v; 100 | } YUYVDoublePixel; 101 | 102 | /** This special URI can be passed to @ref Device::open() when the application has no concern for a specific device. */ 103 | class _NullString 104 | { 105 | public: 106 | _NullString() {} 107 | operator const char*() const { return NULL; } 108 | }; 109 | 110 | static const _NullString ANY_DEVICE; 111 | 112 | /** 113 | Provides a simple array class used throughout the API. Wraps a primitive array 114 | of objects, holding the elements and their count. 115 | */ 116 | template 117 | class Array 118 | { 119 | public: 120 | /** 121 | Default constructor. Creates an empty Array and sets the element count to zero. 122 | */ 123 | Array() : m_data(NULL), m_count(0), m_owner(false) {} 124 | 125 | /** 126 | Constructor. Creates new Array from an existing primitive array of known size. 127 | 128 | @tparam [in] T Object type this Array will contain. 129 | @param [in] data Pointer to a primitive array of objects of type T. 130 | @param [in] count Number of elements in the primitive array pointed to by data. 131 | */ 132 | Array(const T* data, int count) : m_owner(false) { _setData(data, count); } 133 | 134 | /** 135 | Destructor. Destroys the Array object. 136 | */ 137 | ~Array() 138 | { 139 | clear(); 140 | } 141 | 142 | /** 143 | Getter function for the Array size. 144 | @returns Current number of elements in the Array. 145 | */ 146 | int getSize() const { return m_count; } 147 | 148 | /** 149 | Implements the array indexing operator for the Array class. 150 | */ 151 | const T& operator[](int index) const {return m_data[index];} 152 | 153 | /** 154 | @internal 155 | Setter function for data. Causes this array to wrap an existing primitive array 156 | of specified type. The optional data ownership flag controls whether the primitive 157 | array this Array wraps will be destroyed when this Array is deconstructed. 158 | @param [in] T Type of objects array will contain. 159 | @param [in] data Pointer to first object in list. 160 | @param [in] count Number of objects in list. 161 | @param [in] isOwner Optional flag to indicate data ownership 162 | */ 163 | void _setData(const T* data, int count, bool isOwner = false) 164 | { 165 | clear(); 166 | m_count = count; 167 | m_owner = isOwner; 168 | if (!isOwner) 169 | { 170 | m_data = data; 171 | } 172 | else 173 | { 174 | m_data = new T[count]; 175 | memcpy((void*)m_data, data, count*sizeof(T)); 176 | } 177 | } 178 | 179 | private: 180 | Array(const Array&); 181 | Array& operator=(const Array&); 182 | 183 | void clear() 184 | { 185 | if (m_owner && m_data != NULL) 186 | delete []m_data; 187 | m_owner = false; 188 | m_data = NULL; 189 | m_count = 0; 190 | } 191 | 192 | const T* m_data; 193 | int m_count; 194 | bool m_owner; 195 | }; 196 | 197 | // Forward declaration of all 198 | class SensorInfo; 199 | class VideoStream; 200 | class VideoFrameRef; 201 | class Device; 202 | class OpenNI; 203 | class CameraSettings; 204 | class PlaybackControl; 205 | 206 | /** 207 | Encapsulates a group of settings for a @ref VideoStream. Settings stored include 208 | frame rate, resolution, and pixel format. 209 | 210 | This class is used as an input for changing the settings of a @ref VideoStream, 211 | as well as an output for reporting the current settings of that class. It is also used 212 | by @ref SensorInfo to report available video modes of a stream. 213 | 214 | Recommended practice is to use @ref SensorInfo::getSupportedVideoModes() 215 | to obtain a list of valid video modes, and then to use items from that list to pass 216 | new settings to @ref VideoStream. This is much less likely to produce an 217 | invalid video mode than instantiating and manually changing objects of this 218 | class. 219 | */ 220 | class VideoMode : private OniVideoMode 221 | { 222 | public: 223 | /** 224 | Default constructor, creates an empty VideoMode object. Application programs should, in most 225 | cases, use the copy constructor to copy an existing valid video mode. This is much less 226 | error prone that creating and attempting to configure a new VideoMode from scratch. 227 | */ 228 | VideoMode() 229 | {} 230 | 231 | /** 232 | Copy constructor, creates a new VideoMode identical to an existing VideoMode. 233 | 234 | @param [in] other Existing VideoMode to copy. 235 | */ 236 | VideoMode(const VideoMode& other) 237 | { 238 | *this = other; 239 | } 240 | 241 | /** 242 | Assignment operator. Sets the pixel format, frame rate, and resolution of this 243 | VideoMode to equal that of a different VideoMode. 244 | 245 | @param [in] other Existing VideoMode to copy settings from. 246 | */ 247 | VideoMode& operator=(const VideoMode& other) 248 | { 249 | setPixelFormat(other.getPixelFormat()); 250 | setResolution(other.getResolutionX(), other.getResolutionY()); 251 | setFps(other.getFps()); 252 | 253 | return *this; 254 | } 255 | 256 | /** 257 | Getter function for the pixel format of this VideoMode. 258 | @returns Current pixel format setting of this VideoMode. 259 | */ 260 | PixelFormat getPixelFormat() const { return (PixelFormat)pixelFormat; } 261 | 262 | /** 263 | Getter function for the X resolution of this VideoMode. 264 | @returns Current horizontal resolution of this VideoMode, in pixels. 265 | */ 266 | int getResolutionX() const { return resolutionX; } 267 | 268 | /** 269 | Getter function for the Y resolution of this VideoMode. 270 | @returns Current vertical resolution of this VideoMode, in pixels. 271 | */ 272 | int getResolutionY() const {return resolutionY;} 273 | 274 | /** 275 | Getter function for the frame rate of this VideoMode. 276 | @returns Current frame rate, measured in frames per second. 277 | */ 278 | int getFps() const { return fps; } 279 | 280 | /** 281 | Setter function for the pixel format of this VideoMode. Application use of this 282 | function is not recommended. Instead, use @ref SensorInfo::getSupportedVideoModes() 283 | to obtain a list of valid video modes. 284 | @param [in] format Desired new pixel format for this VideoMode. 285 | */ 286 | void setPixelFormat(PixelFormat format) { this->pixelFormat = (OniPixelFormat)format; } 287 | 288 | /** 289 | Setter function for the resolution of this VideoMode. Application use of this 290 | function is not recommended. Instead, use @ref SensorInfo::getSupportedVideoModes() to 291 | obtain a list of valid video modes. 292 | @param [in] resolutionX Desired new horizontal resolution in pixels. 293 | @param [in] resolutionY Desired new vertical resolution in pixels. 294 | */ 295 | void setResolution(int resolutionX, int resolutionY) 296 | { 297 | this->resolutionX = resolutionX; 298 | this->resolutionY = resolutionY; 299 | } 300 | 301 | /** 302 | Setter function for the frame rate. Application use of this function is not recommended. 303 | Instead, use @ref SensorInfo::getSupportedVideoModes() to obtain a list of valid 304 | video modes. 305 | @param [in] fps Desired new frame rate, measured in frames per second. 306 | */ 307 | void setFps(int fps) { this->fps = fps; } 308 | 309 | friend class SensorInfo; 310 | friend class VideoStream; 311 | friend class VideoFrameRef; 312 | }; 313 | 314 | /** 315 | The SensorInfo class encapsulates all info related to a specific sensor in a specific 316 | device. 317 | A @ref Device object holds a SensorInfo object for each sensor it contains. 318 | A @ref VideoStream object holds one SensorInfo object, describing the sensor used to produce that stream. 319 | 320 | A given SensorInfo object will contain the type of the sensor (Depth, IR or Color), and 321 | a list of all video modes that the sensor can support. Each available video mode will have a single 322 | VideoMode object that can be queried to get the details of that mode. 323 | 324 | SensorInfo objects should be the only source of VideoMode objects for the vast majority of 325 | application programs. 326 | 327 | Application programs will never directly instantiate objects of type SensorInfo. In fact, no 328 | public constructors are provided. SensorInfo objects should be obtained either from a Device or @ref VideoStream, 329 | and in turn be used to provide available video modes for that sensor. 330 | */ 331 | class SensorInfo 332 | { 333 | public: 334 | /** 335 | Provides the sensor type of the sensor this object is associated with. 336 | @returns Type of the sensor. 337 | */ 338 | SensorType getSensorType() const { return (SensorType)m_pInfo->sensorType; } 339 | 340 | /** 341 | Provides a list of video modes that this sensor can support. This function is the 342 | recommended method to be used by applications to obtain @ref VideoMode objects. 343 | 344 | @returns Reference to an array of @ref VideoMode objects, one for each supported 345 | video mode. 346 | */ 347 | const Array& getSupportedVideoModes() const { return m_videoModes; } 348 | 349 | private: 350 | SensorInfo(const SensorInfo&); 351 | SensorInfo& operator=(const SensorInfo&); 352 | 353 | SensorInfo() : m_pInfo(NULL), m_videoModes(NULL, 0) {} 354 | 355 | SensorInfo(const OniSensorInfo* pInfo) : m_pInfo(NULL), m_videoModes(NULL, 0) 356 | { 357 | _setInternal(pInfo); 358 | } 359 | 360 | void _setInternal(const OniSensorInfo* pInfo) 361 | { 362 | m_pInfo = pInfo; 363 | if (pInfo == NULL) 364 | { 365 | m_videoModes._setData(NULL, 0); 366 | } 367 | else 368 | { 369 | m_videoModes._setData(static_cast(pInfo->pSupportedVideoModes), pInfo->numSupportedVideoModes); 370 | } 371 | } 372 | 373 | const OniSensorInfo* m_pInfo; 374 | Array m_videoModes; 375 | 376 | friend class VideoStream; 377 | friend class Device; 378 | }; 379 | 380 | /** 381 | The DeviceInfo class encapsulates info related to a specific device. 382 | 383 | Applications will generally obtain objects of this type via calls to @ref OpenNI::enumerateDevices() or 384 | @ref openni::Device::getDeviceInfo(), and then use the various accessor functions to obtain specific 385 | information on that device. 386 | 387 | There should be no reason for application code to instantiate this object directly. 388 | */ 389 | class DeviceInfo : private OniDeviceInfo 390 | { 391 | public: 392 | /** 393 | Returns the device URI. URI can be used by @ref Device::open to open a specific device. 394 | The URI string format is determined by the driver. 395 | */ 396 | const char* getUri() const { return uri; } 397 | /** Returns a the vendor name for this device. */ 398 | const char* getVendor() const { return vendor; } 399 | /** Returns the device name for this device. */ 400 | const char* getName() const { return name; } 401 | /** Returns the USB VID code for this device. */ 402 | uint16_t getUsbVendorId() const { return usbVendorId; } 403 | /** Returns the USB PID code for this device. */ 404 | uint16_t getUsbProductId() const { return usbProductId; } 405 | 406 | friend class Device; 407 | friend class OpenNI; 408 | }; 409 | 410 | /** 411 | The @ref VideoFrameRef class encapsulates a single video frame - the output of a @ref VideoStream at a specific time. 412 | The data contained will be a single frame of color, IR, or depth video, along with associated meta data. 413 | 414 | An object of type @ref VideoFrameRef does not actually hold the data of the frame, but only a reference to it. The 415 | reference can be released by destroying the @ref VideoFrameRef object, or by calling the @ref release() method. The 416 | actual data of the frame is freed when the last reference to it is released. 417 | 418 | The usual way to obtain @ref VideoFrameRef objects is by a call to @ref VideoStream.:readFrame(). 419 | 420 | All data references by a @ref VideoFrameRef is stored as a primitive array of pixels. Each pixel will be 421 | of a type according to the configured pixel format (see @ref VideoMode). 422 | */ 423 | class VideoFrameRef 424 | { 425 | public: 426 | /** 427 | Default constructor. Creates a new empty @ref VideoFrameRef object. 428 | This object will be invalid until initialized by a call to @ref VideoStream::readFrame(). 429 | */ 430 | VideoFrameRef() 431 | { 432 | m_pFrame = NULL; 433 | } 434 | 435 | /** 436 | Destroy this object and release the reference to the frame. 437 | */ 438 | ~VideoFrameRef() 439 | { 440 | release(); 441 | } 442 | 443 | /** 444 | Copy constructor. Creates a new @ref VideoFrameRef object. The newly created 445 | object will reference the same frame current object references. 446 | @param [in] other Another @ref VideoFrameRef object. 447 | */ 448 | VideoFrameRef(const VideoFrameRef& other) : m_pFrame(NULL) 449 | { 450 | _setFrame(other.m_pFrame); 451 | } 452 | 453 | /** 454 | Make this @ref VideoFrameRef object reference the same frame that the @c other frame references. 455 | If this object referenced another frame before calling this method, the previous frame will be released. 456 | @param [in] other Another @ref VideoFrameRef object. 457 | */ 458 | VideoFrameRef& operator=(const VideoFrameRef& other) 459 | { 460 | _setFrame(other.m_pFrame); 461 | return *this; 462 | } 463 | 464 | /** 465 | Getter function for the size of the data contained by this object. Useful primarily 466 | when allocating buffers. 467 | @returns Current size of data pointed to by this object, measured in bytes. 468 | */ 469 | inline int getDataSize() const 470 | { 471 | return m_pFrame->dataSize; 472 | } 473 | 474 | /** 475 | Getter function for the array of data pointed to by this object. 476 | @returns Pointer to the actual frame data array. Type of data 477 | pointed to can be determined according to the pixel format (can be obtained by calling @ref getVideoMode()). 478 | */ 479 | inline const void* getData() const 480 | { 481 | return m_pFrame->data; 482 | } 483 | 484 | /** 485 | Getter function for the sensor type used to produce this frame. Used to determine whether 486 | this is an IR, Color or Depth frame. See the @ref SensorType enumeration for all possible return 487 | values from this function. 488 | @returns The type of sensor used to produce this frame. 489 | */ 490 | inline SensorType getSensorType() const 491 | { 492 | return (SensorType)m_pFrame->sensorType; 493 | } 494 | 495 | /** 496 | Returns a reference to the @ref VideoMode object assigned to this frame. This object describes 497 | the video mode the sensor was configured to when the frame was produced and can be used 498 | to determine the pixel format and resolution of the data. It will also provide the frame rate 499 | that the sensor was running at when it recorded this frame. 500 | @returns Reference to the @ref VideoMode assigned to this frame. 501 | */ 502 | inline const VideoMode& getVideoMode() const 503 | { 504 | return static_cast(m_pFrame->videoMode); 505 | } 506 | 507 | /** 508 | Provides a timestamp for the frame. The 'zero' point for this stamp 509 | is implementation specific, but all streams from the same device are guaranteed to use the same zero. 510 | This value can therefore be used to compute time deltas between frames from the same device, 511 | regardless of whether they are from the same stream. 512 | @returns Timestamp of frame, measured in microseconds from an arbitrary zero 513 | */ 514 | inline uint64_t getTimestamp() const 515 | { 516 | return m_pFrame->timestamp; 517 | } 518 | 519 | /** 520 | Frames are provided sequential frame ID numbers by the sensor that produced them. If frame 521 | synchronization has been enabled for a device via @ref Device::setDepthColorSyncEnabled(), then frame 522 | numbers for corresponding frames of depth and color are guaranteed to match. 523 | 524 | If frame synchronization is not enabled, then there is no guarantee of matching frame indexes between 525 | @ref VideoStream "VideoStreams". In the latter case, applications should use timestamps instead of frame indexes to 526 | align frames in time. 527 | @returns Index number for this frame. 528 | */ 529 | inline int getFrameIndex() const 530 | { 531 | return m_pFrame->frameIndex; 532 | } 533 | 534 | /** 535 | Gives the current width of this frame, measured in pixels. If cropping is enabled, this will be 536 | the width of the cropping window. If cropping is not enabled, then this will simply be equal to 537 | the X resolution of the @ref VideoMode used to produce this frame. 538 | @returns Width of this frame in pixels. 539 | */ 540 | inline int getWidth() const 541 | { 542 | return m_pFrame->width; 543 | } 544 | 545 | /** 546 | Gives the current height of this frame, measured in pixels. If cropping is enabled, this will 547 | be the length of the cropping window. If cropping is not enabled, then this will simply be equal 548 | to the Y resolution of the @ref VideoMode used to produce this frame. 549 | */ 550 | inline int getHeight() const 551 | { 552 | return m_pFrame->height; 553 | } 554 | 555 | /** 556 | Indicates whether cropping was enabled when the frame was produced. 557 | @return true if cropping is enabled, false otherwise 558 | */ 559 | inline bool getCroppingEnabled() const 560 | { 561 | return m_pFrame->croppingEnabled == TRUE; 562 | } 563 | 564 | /** 565 | Indicates the X coordinate of the upper left corner of the crop window. 566 | @return Distance of crop origin from left side of image, in pixels. 567 | */ 568 | inline int getCropOriginX() const 569 | { 570 | return m_pFrame->cropOriginX; 571 | } 572 | 573 | /** 574 | Indicates the Y coordinate of the upper left corner of the crop window. 575 | @return Distance of crop origin from top of image, in pixels. 576 | */ 577 | inline int getCropOriginY() const 578 | { 579 | return m_pFrame->cropOriginY; 580 | } 581 | 582 | /** 583 | Gives the length of one row of pixels, measured in bytes. Primarily useful 584 | for indexing the array which contains the data. 585 | @returns Stride of the array which contains the image for this frame, in bytes 586 | */ 587 | inline int getStrideInBytes() const 588 | { 589 | return m_pFrame->stride; 590 | } 591 | 592 | /** 593 | Check if this object references an actual frame. 594 | */ 595 | inline bool isValid() const 596 | { 597 | return m_pFrame != NULL; 598 | } 599 | 600 | /** 601 | Release the reference to the frame. Once this method is called, the object becomes invalid, and no method 602 | should be called other than the assignment operator, or passing this object to a @ref VideoStream::readFrame() call. 603 | */ 604 | void release() 605 | { 606 | if (m_pFrame != NULL) 607 | { 608 | oniFrameRelease(m_pFrame); 609 | m_pFrame = NULL; 610 | } 611 | } 612 | 613 | /** @internal */ 614 | void _setFrame(OniFrame* pFrame) 615 | { 616 | setReference(pFrame); 617 | if (pFrame != NULL) 618 | { 619 | oniFrameAddRef(pFrame); 620 | } 621 | } 622 | 623 | /** @internal */ 624 | OniFrame* _getFrame() 625 | { 626 | return m_pFrame; 627 | } 628 | 629 | private: 630 | friend class VideoStream; 631 | inline void setReference(OniFrame* pFrame) 632 | { 633 | // Initial - don't addref. This is the reference from OpenNI 634 | release(); 635 | m_pFrame = pFrame; 636 | } 637 | 638 | OniFrame* m_pFrame; // const!!? 639 | }; 640 | 641 | /** 642 | The @ref VideoStream object encapsulates a single video stream from a device. Once created, it is used to start data flow 643 | from the device, and to read individual frames of data. This is the central class used to obtain data in OpenNI. It 644 | provides the ability to manually read data in a polling loop, as well as providing events and a Listener class that can be 645 | used to implement event-driven data acquisition. 646 | 647 | Aside from the video data frames themselves, the class offers a number of functions used for obtaining information about a 648 | @ref VideoStream. Field of view, available video modes, and minimum and maximum valid pixel values can all be obtained. 649 | 650 | In addition to obtaining data, the @ref VideoStream object is used to set all configuration properties that apply to a specific 651 | stream (rather than to an entire device). In particular, it is used to control cropping, mirroring, and video modes. 652 | 653 | A pointer to a valid, initialized device that provides the desired stream type is required to create a stream. 654 | 655 | Several video streams can be created to stream data from the same sensor. This is useful if several components of an application 656 | need to read frames separately. 657 | 658 | While some device might allow different streams 659 | from the same sensor to have different configurations, most devices will have a single configuration for the sensor, 660 | shared by all streams. 661 | */ 662 | class VideoStream 663 | { 664 | public: 665 | /** 666 | The @ref VideoStream::NewFrameListener class is provided to allow the implementation of event driven frame reading. To use 667 | it, create a class that inherits from it and implement override the onNewFrame() method. Then, register 668 | your created class with an active @ref VideoStream using the @ref VideoStream::addNewFrameListener() function. Once this is done, the 669 | event handler function you implemented will be called whenever a new frame becomes available. You may call 670 | @ref VideoStream::readFrame() from within the event handler. 671 | */ 672 | class NewFrameListener 673 | { 674 | public: 675 | /** 676 | Default constructor. 677 | */ 678 | NewFrameListener() : m_callbackHandle(NULL) 679 | { 680 | } 681 | 682 | virtual ~NewFrameListener() 683 | { 684 | } 685 | 686 | /** 687 | Derived classes should implement this function to handle new frames. 688 | */ 689 | virtual void onNewFrame(VideoStream&) = 0; 690 | 691 | private: 692 | friend class VideoStream; 693 | 694 | static void ONI_CALLBACK_TYPE callback(OniStreamHandle streamHandle, void* pCookie) 695 | { 696 | NewFrameListener* pListener = (NewFrameListener*)pCookie; 697 | VideoStream stream; 698 | stream._setHandle(streamHandle); 699 | pListener->onNewFrame(stream); 700 | stream._setHandle(NULL); 701 | } 702 | OniCallbackHandle m_callbackHandle; 703 | }; 704 | 705 | class FrameAllocator 706 | { 707 | public: 708 | virtual ~FrameAllocator() {} 709 | virtual void* allocateFrameBuffer(int size) = 0; 710 | virtual void freeFrameBuffer(void* data) = 0; 711 | 712 | private: 713 | friend class VideoStream; 714 | 715 | static void* ONI_CALLBACK_TYPE allocateFrameBufferCallback(int size, void* pCookie) 716 | { 717 | FrameAllocator* pThis = (FrameAllocator*)pCookie; 718 | return pThis->allocateFrameBuffer(size); 719 | } 720 | 721 | static void ONI_CALLBACK_TYPE freeFrameBufferCallback(void* data, void* pCookie) 722 | { 723 | FrameAllocator* pThis = (FrameAllocator*)pCookie; 724 | pThis->freeFrameBuffer(data); 725 | } 726 | }; 727 | 728 | /** 729 | Default constructor. Creates a new, non-valid @ref VideoStream object. The object created will be invalid until its create() function 730 | is called with a valid Device. 731 | */ 732 | VideoStream() : m_stream(NULL), m_sensorInfo(), m_pCameraSettings(NULL), m_isOwner(true) 733 | {} 734 | 735 | /** 736 | Handle constructor. Creates a VideoStream object based on the given initialized handle. 737 | This object will not destroy the underlying handle when @ref destroy() or destructor is called 738 | */ 739 | explicit VideoStream(OniStreamHandle handle) : m_stream(NULL), m_sensorInfo(), m_pCameraSettings(NULL), m_isOwner(false) 740 | { 741 | _setHandle(handle); 742 | } 743 | 744 | /** 745 | Destructor. The destructor calls the destroy() function, but it is considered a best practice for applications to 746 | call destroy() manually on any @ref VideoStream that they run create() on. 747 | */ 748 | ~VideoStream() 749 | { 750 | destroy(); 751 | } 752 | 753 | /** 754 | Checks to see if this object has been properly initialized and currently points to a valid stream. 755 | @returns true if this object has been previously initialized, false otherwise. 756 | */ 757 | bool isValid() const 758 | { 759 | return m_stream != NULL; 760 | } 761 | 762 | /** 763 | Creates a stream of frames from a specific sensor type of a specific device. You must supply a reference to a 764 | Device that supplies the sensor type requested. You can use @ref Device::hasSensor() to check whether a 765 | given sensor is available on your target device before calling create(). 766 | 767 | @param [in] device A reference to the @ref Device you want to create the stream on. 768 | @param [in] sensorType The type of sensor the stream should produce data from. 769 | @returns Status code indicating success or failure for this operation. 770 | */ 771 | inline Status create(const Device& device, SensorType sensorType); 772 | 773 | /** 774 | Destroy this stream. This function is currently called automatically by the destructor, but it is 775 | considered a best practice for applications to manually call this function on any @ref VideoStream that they 776 | call create() for. 777 | */ 778 | inline void destroy(); 779 | 780 | /** 781 | Provides the @ref SensorInfo object associated with the sensor that is producing this @ref VideoStream. Note that 782 | this function will return NULL if the stream has not yet been initialized with the create() function. 783 | 784 | @ref SensorInfo is useful primarily as a means of learning which video modes are valid for this VideoStream. 785 | 786 | @returns Reference to the SensorInfo object associated with the sensor providing this stream. 787 | */ 788 | const SensorInfo& getSensorInfo() const 789 | { 790 | return m_sensorInfo; 791 | } 792 | 793 | /** 794 | Starts data generation from this video stream. 795 | */ 796 | Status start() 797 | { 798 | if (!isValid()) 799 | { 800 | return STATUS_ERROR; 801 | } 802 | 803 | return (Status)oniStreamStart(m_stream); 804 | } 805 | 806 | /** 807 | Stops data generation from this video stream. 808 | */ 809 | void stop() 810 | { 811 | if (!isValid()) 812 | { 813 | return; 814 | } 815 | 816 | oniStreamStop(m_stream); 817 | } 818 | 819 | /** 820 | Read the next frame from this video stream, delivered as a @ref VideoFrameRef. This is the primary 821 | method for manually obtaining frames of video data. 822 | If no new frame is available, the call will block until one is available. 823 | To avoid blocking, use @ref VideoStream::Listener to implement an event driven architecture. Another 824 | alternative is to use @ref OpenNI::waitForAnyStream() to wait for new frames from several streams. 825 | 826 | @param [out] pFrame Pointer to a @ref VideoFrameRef object to hold the reference to the new frame. 827 | @returns Status code to indicated success or failure of this function. 828 | */ 829 | Status readFrame(VideoFrameRef* pFrame) 830 | { 831 | if (!isValid()) 832 | { 833 | return STATUS_ERROR; 834 | } 835 | 836 | OniFrame* pOniFrame; 837 | Status rc = (Status)oniStreamReadFrame(m_stream, &pOniFrame); 838 | 839 | pFrame->setReference(pOniFrame); 840 | return rc; 841 | } 842 | 843 | /** 844 | Adds a new Listener to receive this VideoStream onNewFrame event. See @ref VideoStream::NewFrameListener for 845 | more information on implementing an event driven frame reading architecture. An instance of a listener can be added to only one source. 846 | 847 | @param [in] pListener Pointer to a @ref VideoStream::NewFrameListener object (or a derivative) that will respond to this event. 848 | @returns Status code indicating success or failure of the operation. 849 | */ 850 | Status addNewFrameListener(NewFrameListener* pListener) 851 | { 852 | if (!isValid()) 853 | { 854 | return STATUS_ERROR; 855 | } 856 | 857 | return (Status)oniStreamRegisterNewFrameCallback(m_stream, pListener->callback, pListener, &pListener->m_callbackHandle); 858 | } 859 | 860 | /** 861 | Removes a Listener from this video stream list. The listener removed will no longer receive new frame events from this stream. 862 | @param [in] pListener Pointer to the listener object to be removed. 863 | */ 864 | void removeNewFrameListener(NewFrameListener* pListener) 865 | { 866 | if (!isValid()) 867 | { 868 | return; 869 | } 870 | 871 | oniStreamUnregisterNewFrameCallback(m_stream, pListener->m_callbackHandle); 872 | pListener->m_callbackHandle = NULL; 873 | } 874 | 875 | /** 876 | Sets the frame buffers allocator for this video stream. 877 | @param [in] pAllocator Pointer to the frame buffers allocator object. Pass NULL to return to default frame allocator. 878 | @returns ONI_STATUS_OUT_OF_FLOW The frame buffers allocator cannot be set while stream is streaming. 879 | */ 880 | Status setFrameBuffersAllocator(FrameAllocator* pAllocator) 881 | { 882 | if (!isValid()) 883 | { 884 | return STATUS_ERROR; 885 | } 886 | 887 | if (pAllocator == NULL) 888 | { 889 | return (Status)oniStreamSetFrameBuffersAllocator(m_stream, NULL, NULL, NULL); 890 | } 891 | else 892 | { 893 | return (Status)oniStreamSetFrameBuffersAllocator(m_stream, pAllocator->allocateFrameBufferCallback, pAllocator->freeFrameBufferCallback, pAllocator); 894 | } 895 | } 896 | 897 | /** 898 | @internal 899 | Get an internal handle. This handle can be used via the C API. 900 | */ 901 | OniStreamHandle _getHandle() const 902 | { 903 | return m_stream; 904 | } 905 | 906 | /** 907 | Gets an object through which several camera settings can be configured. 908 | @returns NULL if the stream doesn't support camera settings. 909 | */ 910 | CameraSettings* getCameraSettings() {return m_pCameraSettings;} 911 | 912 | /** 913 | General function for obtaining the value of stream specific properties. 914 | There are convenience functions available for all commonly used properties, so it is not 915 | expected that applications will make direct use of the getProperty function very often. 916 | 917 | @param [in] propertyId The numerical ID of the property to be queried. 918 | @param [out] data Place to store the value of the property. 919 | @param [in,out] dataSize IN: Size of the buffer passed in the @c data argument. OUT: the actual written size. 920 | @returns Status code indicating success or failure of this operation. 921 | */ 922 | Status getProperty(int propertyId, void* data, int* dataSize) const 923 | { 924 | if (!isValid()) 925 | { 926 | return STATUS_ERROR; 927 | } 928 | 929 | return (Status)oniStreamGetProperty(m_stream, propertyId, data, dataSize); 930 | } 931 | 932 | /** 933 | General function for setting the value of stream specific properties. 934 | There are convenience functions available for all commonly used properties, so it is not 935 | expected that applications will make direct use of the setProperty function very often. 936 | 937 | @param [in] propertyId The numerical ID of the property to be set. 938 | @param [in] data Place to store the data to be written to the property. 939 | @param [in] dataSize Size of the data to be written to the property. 940 | @returns Status code indicating success or failure of this operation. 941 | */ 942 | Status setProperty(int propertyId, const void* data, int dataSize) 943 | { 944 | if (!isValid()) 945 | { 946 | return STATUS_ERROR; 947 | } 948 | 949 | return (Status)oniStreamSetProperty(m_stream, propertyId, data, dataSize); 950 | } 951 | 952 | /** 953 | Get the current video mode information for this video stream. 954 | This includes its resolution, fps and stream format. 955 | 956 | @returns Current video mode information for this video stream. 957 | */ 958 | VideoMode getVideoMode() const 959 | { 960 | VideoMode videoMode; 961 | getProperty(STREAM_PROPERTY_VIDEO_MODE, static_cast(&videoMode)); 962 | return videoMode; 963 | } 964 | 965 | /** 966 | Changes the current video mode of this stream. Recommended practice is to use @ref Device::getSensorInfo(), and 967 | then @ref SensorInfo::getSupportedVideoModes() to obtain a list of valid video mode settings for this stream. Then, 968 | pass a valid @ref VideoMode to @ref setVideoMode to ensure correct operation. 969 | 970 | @param [in] videoMode Desired new video mode for this stream. 971 | returns Status code indicating success or failure of this operation. 972 | */ 973 | Status setVideoMode(const VideoMode& videoMode) 974 | { 975 | return setProperty(STREAM_PROPERTY_VIDEO_MODE, static_cast(videoMode)); 976 | } 977 | 978 | /** 979 | Provides the maximum possible value for pixels obtained by this stream. This is most useful for 980 | getting the maximum possible value of depth streams. 981 | @returns Maximum possible pixel value. 982 | */ 983 | int getMaxPixelValue() const 984 | { 985 | int maxValue; 986 | Status rc = getProperty(STREAM_PROPERTY_MAX_VALUE, &maxValue); 987 | if (rc != STATUS_OK) 988 | { 989 | return 0; 990 | } 991 | return maxValue; 992 | } 993 | 994 | /** 995 | Provides the smallest possible value for pixels obtains by this VideoStream. This is most useful 996 | for getting the minimum possible value that will be reported by a depth stream. 997 | @returns Minimum possible pixel value that can come from this stream. 998 | */ 999 | int getMinPixelValue() const 1000 | { 1001 | int minValue; 1002 | Status rc = getProperty(STREAM_PROPERTY_MIN_VALUE, &minValue); 1003 | if (rc != STATUS_OK) 1004 | { 1005 | return 0; 1006 | } 1007 | return minValue; 1008 | } 1009 | 1010 | /** 1011 | Checks whether this stream supports cropping. 1012 | @returns true if the stream supports cropping, false if it does not. 1013 | */ 1014 | bool isCroppingSupported() const 1015 | { 1016 | return isPropertySupported(STREAM_PROPERTY_CROPPING); 1017 | } 1018 | 1019 | /** 1020 | Obtains the current cropping settings for this stream. 1021 | @param [out] pOriginX X coordinate of the upper left corner of the cropping window 1022 | @param [out] pOriginY Y coordinate of the upper left corner of the cropping window 1023 | @param [out] pWidth Horizontal width of the cropping window, in pixels 1024 | @param [out] pHeight Vertical width of the cropping window, in pixels 1025 | returns true if cropping is currently enabled, false if it is not. 1026 | */ 1027 | bool getCropping(int* pOriginX, int* pOriginY, int* pWidth, int* pHeight) const 1028 | { 1029 | OniCropping cropping; 1030 | bool enabled = false; 1031 | 1032 | Status rc = getProperty(STREAM_PROPERTY_CROPPING, &cropping); 1033 | 1034 | if (rc == STATUS_OK) 1035 | { 1036 | *pOriginX = cropping.originX; 1037 | *pOriginY = cropping.originY; 1038 | *pWidth = cropping.width; 1039 | *pHeight = cropping.height; 1040 | enabled = (cropping.enabled == TRUE); 1041 | } 1042 | 1043 | return enabled; 1044 | } 1045 | 1046 | /** 1047 | Changes the cropping settings for this stream. You can use the @ref isCroppingSupported() 1048 | function to make sure cropping is supported before calling this function. 1049 | @param [in] originX New X coordinate of the upper left corner of the cropping window. 1050 | @param [in] originY New Y coordinate of the upper left corner of the cropping window. 1051 | @param [in] width New horizontal width for the cropping window, in pixels. 1052 | @param [in] height New vertical height for the cropping window, in pixels. 1053 | @returns Status code indicating success or failure of this operation. 1054 | */ 1055 | Status setCropping(int originX, int originY, int width, int height) 1056 | { 1057 | OniCropping cropping; 1058 | cropping.enabled = true; 1059 | cropping.originX = originX; 1060 | cropping.originY = originY; 1061 | cropping.width = width; 1062 | cropping.height = height; 1063 | return setProperty(STREAM_PROPERTY_CROPPING, cropping); 1064 | } 1065 | 1066 | /** 1067 | Disables cropping. 1068 | @returns Status code indicating success or failure of this operation. 1069 | */ 1070 | Status resetCropping() 1071 | { 1072 | OniCropping cropping; 1073 | cropping.enabled = false; 1074 | return setProperty(STREAM_PROPERTY_CROPPING, cropping); 1075 | } 1076 | 1077 | /** 1078 | Check whether mirroring is currently turned on for this stream. 1079 | @returns true if mirroring is currently enabled, false otherwise. 1080 | */ 1081 | bool getMirroringEnabled() const 1082 | { 1083 | OniBool enabled; 1084 | Status rc = getProperty(STREAM_PROPERTY_MIRRORING, &enabled); 1085 | if (rc != STATUS_OK) 1086 | { 1087 | return false; 1088 | } 1089 | return enabled == TRUE; 1090 | } 1091 | 1092 | /** 1093 | Enable or disable mirroring for this stream. 1094 | @param [in] isEnabled true to enable mirroring, false to disable it. 1095 | @returns Status code indicating the success or failure of this operation. 1096 | */ 1097 | Status setMirroringEnabled(bool isEnabled) 1098 | { 1099 | return setProperty(STREAM_PROPERTY_MIRRORING, isEnabled ? TRUE : FALSE); 1100 | } 1101 | 1102 | /** 1103 | Gets the horizontal field of view of frames received from this stream. 1104 | @returns Horizontal field of view, in radians. 1105 | */ 1106 | float getHorizontalFieldOfView() const 1107 | { 1108 | float horizontal = 0; 1109 | getProperty(STREAM_PROPERTY_HORIZONTAL_FOV, &horizontal); 1110 | return horizontal; 1111 | } 1112 | 1113 | /** 1114 | Gets the vertical field of view of frames received from this stream. 1115 | @returns Vertical field of view, in radians. 1116 | */ 1117 | float getVerticalFieldOfView() const 1118 | { 1119 | float vertical = 0; 1120 | getProperty(STREAM_PROPERTY_VERTICAL_FOV, &vertical); 1121 | return vertical; 1122 | } 1123 | 1124 | /** 1125 | Function for setting a value of a stream property using an arbitrary input type. 1126 | There are convenience functions available for all commonly used properties, so it is not 1127 | expected that applications will make direct use of this function very often. 1128 | @tparam [in] T Data type of the value to be passed to the property. 1129 | @param [in] propertyId The numerical ID of the property to be set. 1130 | @param [in] value Data to be sent to the property. 1131 | @returns Status code indicating success or failure of this operation. 1132 | */ 1133 | template 1134 | Status setProperty(int propertyId, const T& value) 1135 | { 1136 | return setProperty(propertyId, &value, sizeof(T)); 1137 | } 1138 | 1139 | /** 1140 | Function for getting the value from a property using an arbitrary output type. 1141 | There are convenience functions available for all commonly used properties, so it is not 1142 | expected that applications will make direct use of this function very often. 1143 | @tparam [in] T Data type of the value to be read. 1144 | @param [in] propertyId The numerical ID of the property to be read. 1145 | @param [in, out] value Pointer to a place to store the value read from the property. 1146 | @returns Status code indicating success or failure of this operation. 1147 | */ 1148 | template 1149 | Status getProperty(int propertyId, T* value) const 1150 | { 1151 | int size = sizeof(T); 1152 | return getProperty(propertyId, value, &size); 1153 | } 1154 | 1155 | /** 1156 | Checks if a specific property is supported by the video stream. 1157 | @param [in] propertyId Property to be checked. 1158 | @returns true if the property is supported, false otherwise. 1159 | */ 1160 | bool isPropertySupported(int propertyId) const 1161 | { 1162 | if (!isValid()) 1163 | { 1164 | return false; 1165 | } 1166 | 1167 | return oniStreamIsPropertySupported(m_stream, propertyId) == TRUE; 1168 | } 1169 | 1170 | /** 1171 | Invokes a command that takes an arbitrary data type as its input. It is not expected that 1172 | application code will need this function frequently, as all commonly used properties have 1173 | higher level functions provided. 1174 | @param [in] commandId Numerical code of the property to be invoked. 1175 | @param [in] data Data to be passed to the property. 1176 | @param [in] dataSize size of the buffer passed in @c data. 1177 | @returns Status code indicating success or failure of this operation. 1178 | */ 1179 | Status invoke(int commandId, void* data, int dataSize) 1180 | { 1181 | if (!isValid()) 1182 | { 1183 | return STATUS_ERROR; 1184 | } 1185 | 1186 | return (Status)oniStreamInvoke(m_stream, commandId, data, dataSize); 1187 | } 1188 | 1189 | /** 1190 | Invokes a command that takes an arbitrary data type as its input. It is not expected that 1191 | application code will need this function frequently, as all commonly used properties have 1192 | higher level functions provided. 1193 | @tparam [in] T Type of data to be passed to the property. 1194 | @param [in] commandId Numerical code of the property to be invoked. 1195 | @param [in] value Data to be passed to the property. 1196 | @returns Status code indicating success or failure of this operation. 1197 | */ 1198 | template 1199 | Status invoke(int commandId, T& value) 1200 | { 1201 | return invoke(commandId, &value, sizeof(T)); 1202 | } 1203 | 1204 | /** 1205 | Checks if a specific command is supported by the video stream. 1206 | @param [in] commandId Command to be checked. 1207 | @returns true if the command is supported, false otherwise. 1208 | */ 1209 | bool isCommandSupported(int commandId) const 1210 | { 1211 | if (!isValid()) 1212 | { 1213 | return false; 1214 | } 1215 | 1216 | return (Status)oniStreamIsCommandSupported(m_stream, commandId) == TRUE; 1217 | } 1218 | 1219 | private: 1220 | friend class Device; 1221 | 1222 | void _setHandle(OniStreamHandle stream) 1223 | { 1224 | m_sensorInfo._setInternal(NULL); 1225 | m_stream = stream; 1226 | 1227 | if (stream != NULL) 1228 | { 1229 | m_sensorInfo._setInternal(oniStreamGetSensorInfo(m_stream)); 1230 | } 1231 | } 1232 | 1233 | private: 1234 | VideoStream(const VideoStream& other); 1235 | VideoStream& operator=(const VideoStream& other); 1236 | 1237 | OniStreamHandle m_stream; 1238 | SensorInfo m_sensorInfo; 1239 | CameraSettings* m_pCameraSettings; 1240 | bool m_isOwner; 1241 | }; 1242 | 1243 | /** 1244 | The Device object abstracts a specific device; either a single hardware device, or a file 1245 | device holding a recording from a hardware device. It offers the ability to connect to 1246 | the device, and obtain information about its configuration and the data streams it can offer. 1247 | 1248 | It provides the means to query and change all configuration parameters that apply to the 1249 | device as a whole. This includes enabling depth/color image registration and frame 1250 | synchronization. 1251 | 1252 | Devices are used when creating and initializing @ref VideoStream "VideoStreams" -- you will need a valid pointer to 1253 | a Device in order to use the VideoStream.create() function. This, along with configuration, is 1254 | the primary use of this class for application developers. 1255 | 1256 | Before devices can be created, @ref OpenNI::initialize() must have been run to make the device drivers 1257 | on the system available to the API. 1258 | */ 1259 | class Device 1260 | { 1261 | public: 1262 | /** 1263 | Default constructor. Creates a new empty Device object. This object will be invalid until it is initialized by 1264 | calling its open() function. 1265 | */ 1266 | Device() : m_pPlaybackControl(NULL), m_device(NULL), m_isOwner(true) 1267 | { 1268 | clearSensors(); 1269 | } 1270 | 1271 | /** 1272 | Handle constructor. Creates a Device object based on the given initialized handle. 1273 | This object will not destroy the underlying handle when @ref close() or destructor is called 1274 | */ 1275 | explicit Device(OniDeviceHandle handle) : m_pPlaybackControl(NULL), m_device(NULL), m_isOwner(false) 1276 | { 1277 | _setHandle(handle); 1278 | } 1279 | 1280 | /** 1281 | The destructor calls the @ref close() function, but it is considered a best practice for applications to 1282 | call @ref close() manually on any @ref Device that they run @ref open() on. 1283 | */ 1284 | ~Device() 1285 | { 1286 | if (m_device != NULL) 1287 | { 1288 | close(); 1289 | } 1290 | } 1291 | 1292 | /** 1293 | Opens a device. This can either open a device chosen arbitrarily from all devices 1294 | on the system, or open a specific device selected by passing this function the device URI. 1295 | 1296 | To open any device, simply pass the constant@ref ANY_DEVICE to this function. If multiple 1297 | devices are connected to the system, then one of them will be opened. This procedure is most 1298 | useful when it is known that exactly one device is (or can be) connected to the system. In that case, 1299 | requesting a list of all devices and iterating through it would be a waste of effort. 1300 | 1301 | If multiple devices are (or may be) connected to a system, then a URI will be required to select 1302 | a specific device to open. There are two ways to obtain a URI: from a DeviceConnected event, or 1303 | by calling @ref OpenNI::enumerateDevices(). 1304 | 1305 | In the case of a DeviceConnected event, the @ref OpenNI::Listener will be provided with a DeviceInfo object 1306 | as an argument to its @ref OpenNI::Listener::onDeviceConnected "onDeviceConnected()" function. 1307 | The DeviceInfo.getUri() function can then be used to obtain the URI. 1308 | 1309 | If the application is not using event handlers, then it can also call the static function 1310 | @ref OpenNI::enumerateDevices(). This will return an array of @ref DeviceInfo objects, one for each device 1311 | currently available to the system. The application can then iterate through this list and 1312 | select the desired device. The URI is again obtained via the @ref DeviceInfo::getUri() function. 1313 | 1314 | Standard codes of type Status are returned indicating whether opening was successful. 1315 | 1316 | @param [in] uri String containing the URI of the device to be opened, or @ref ANY_DEVICE. 1317 | @returns Status code with the outcome of the open operation. 1318 | 1319 | @remark For opening a recording file, pass the file path as a uri. 1320 | */ 1321 | inline Status open(const char* uri); 1322 | 1323 | /** 1324 | Closes the device. This properly closes any files or shuts down hardware, as appropriate. This 1325 | function is currently called by the destructor if not called manually by application code, but it 1326 | is considered a best practice to manually close any device that was opened. 1327 | */ 1328 | inline void close(); 1329 | 1330 | /** 1331 | Provides information about this device in the form of a DeviceInfo object. This object can 1332 | be used to access the URI of the device, as well as various USB descriptor strings that might 1333 | be useful to an application. 1334 | 1335 | Note that valid device info will not be available if this device has not yet been opened. If you are 1336 | trying to obtain a URI to open a device, use OpenNI::enumerateDevices() instead. 1337 | @returns DeviceInfo object for this Device 1338 | */ 1339 | const DeviceInfo& getDeviceInfo() const 1340 | { 1341 | return m_deviceInfo; 1342 | } 1343 | 1344 | /** 1345 | This function checks to see if one of the specific sensor types defined in @ref SensorType is 1346 | available on this device. This allows an application to, for example, query for the presence 1347 | of a depth sensor, or color sensor. 1348 | @param [in] sensorType of sensor to query for 1349 | @returns true if the Device supports the sensor queried, false otherwise. 1350 | */ 1351 | bool hasSensor(SensorType sensorType) 1352 | { 1353 | int i; 1354 | for (i = 0; (i < ONI_MAX_SENSORS) && (m_aSensorInfo[i].m_pInfo != NULL); ++i) 1355 | { 1356 | if (m_aSensorInfo[i].getSensorType() == sensorType) 1357 | { 1358 | return true; 1359 | } 1360 | } 1361 | 1362 | if (i == ONI_MAX_SENSORS) 1363 | { 1364 | return false; 1365 | } 1366 | 1367 | const OniSensorInfo* pInfo = oniDeviceGetSensorInfo(m_device, (OniSensorType)sensorType); 1368 | 1369 | if (pInfo == NULL) 1370 | { 1371 | return false; 1372 | } 1373 | 1374 | m_aSensorInfo[i]._setInternal(pInfo); 1375 | 1376 | return true; 1377 | } 1378 | 1379 | /** 1380 | Get the @ref SensorInfo for a specific sensor type on this device. The @ref SensorInfo 1381 | is useful primarily for determining which video modes are supported by the sensor. 1382 | @param [in] sensorType of sensor to get information about. 1383 | @returns SensorInfo object corresponding to the sensor type specified, or NULL if such a sensor 1384 | is not available from this device. 1385 | */ 1386 | const SensorInfo* getSensorInfo(SensorType sensorType) 1387 | { 1388 | int i; 1389 | for (i = 0; (i < ONI_MAX_SENSORS) && (m_aSensorInfo[i].m_pInfo != NULL); ++i) 1390 | { 1391 | if (m_aSensorInfo[i].getSensorType() == sensorType) 1392 | { 1393 | return &m_aSensorInfo[i]; 1394 | } 1395 | } 1396 | 1397 | // not found. check to see we have additional space 1398 | if (i == ONI_MAX_SENSORS) 1399 | { 1400 | return NULL; 1401 | } 1402 | 1403 | const OniSensorInfo* pInfo = oniDeviceGetSensorInfo(m_device, (OniSensorType)sensorType); 1404 | if (pInfo == NULL) 1405 | { 1406 | return NULL; 1407 | } 1408 | 1409 | m_aSensorInfo[i]._setInternal(pInfo); 1410 | return &m_aSensorInfo[i]; 1411 | } 1412 | 1413 | /** 1414 | @internal 1415 | Get an internal handle. This handle can be used via the C API. 1416 | */ 1417 | OniDeviceHandle _getHandle() const 1418 | { 1419 | return m_device; 1420 | } 1421 | 1422 | /** 1423 | Gets an object through which playback of a file device can be controlled. 1424 | @returns NULL if this device is not a file device. 1425 | */ 1426 | PlaybackControl* getPlaybackControl() {return m_pPlaybackControl;} 1427 | 1428 | /** 1429 | Get the value of a general property of the device. 1430 | There are convenience functions for all the commonly used properties, such as 1431 | image registration and frame synchronization. It is expected for this reason 1432 | that this function will rarely be directly used by applications. 1433 | 1434 | @param [in] propertyId Numerical ID of the property you would like to check. 1435 | @param [out] data Place to store the value of the property. 1436 | @param [in,out] dataSize IN: Size of the buffer passed in the @c data argument. OUT: the actual written size. 1437 | @returns Status code indicating results of this operation. 1438 | */ 1439 | Status getProperty(int propertyId, void* data, int* dataSize) const 1440 | { 1441 | return (Status)oniDeviceGetProperty(m_device, propertyId, data, dataSize); 1442 | } 1443 | 1444 | /** 1445 | Sets the value of a general property of the device. 1446 | There are convenience functions for all the commonly used properties, such as 1447 | image registration and frame synchronization. It is expected for this reason 1448 | that this function will rarely be directly used by applications. 1449 | 1450 | @param [in] propertyId The numerical ID of the property to be set. 1451 | @param [in] data Place to store the data to be written to the property. 1452 | @param [in] dataSize Size of the data to be written to the property. 1453 | @returns Status code indicating results of this operation. 1454 | */ 1455 | Status setProperty(int propertyId, const void* data, int dataSize) 1456 | { 1457 | return (Status)oniDeviceSetProperty(m_device, propertyId, data, dataSize); 1458 | } 1459 | 1460 | /** 1461 | Checks to see if this device can support registration of color video and depth video. 1462 | Image registration is used to properly superimpose two images from cameras located at different 1463 | points in space. Please see the OpenNi 2.0 Programmer's Guide for more information about 1464 | registration. 1465 | @returns true if image registration is supported by this device, false otherwise. 1466 | */ 1467 | bool isImageRegistrationModeSupported(ImageRegistrationMode mode) const 1468 | { 1469 | return (oniDeviceIsImageRegistrationModeSupported(m_device, (OniImageRegistrationMode)mode) == TRUE); 1470 | } 1471 | 1472 | /** 1473 | Gets the current image registration mode of this device. 1474 | Image registration is used to properly superimpose two images from cameras located at different 1475 | points in space. Please see the OpenNi 2.0 Programmer's Guide for more information about 1476 | registration. 1477 | @returns Current image registration mode. See @ref ImageRegistrationMode for possible return values. 1478 | */ 1479 | ImageRegistrationMode getImageRegistrationMode() const 1480 | { 1481 | ImageRegistrationMode mode; 1482 | Status rc = getProperty(DEVICE_PROPERTY_IMAGE_REGISTRATION, &mode); 1483 | if (rc != STATUS_OK) 1484 | { 1485 | return IMAGE_REGISTRATION_OFF; 1486 | } 1487 | return mode; 1488 | } 1489 | 1490 | /** 1491 | Sets the image registration on this device. 1492 | Image registration is used to properly superimpose two images from cameras located at different 1493 | points in space. Please see the OpenNi 2.0 Programmer's Guide for more information about 1494 | registration. 1495 | 1496 | See @ref ImageRegistrationMode for a list of valid settings to pass to this function. 1497 | 1498 | It is a good practice to first check if the mode is supported by calling @ref isImageRegistrationModeSupported(). 1499 | 1500 | @param [in] mode Desired new value for the image registration mode. 1501 | @returns Status code for the operation. 1502 | */ 1503 | Status setImageRegistrationMode(ImageRegistrationMode mode) 1504 | { 1505 | return setProperty(DEVICE_PROPERTY_IMAGE_REGISTRATION, mode); 1506 | } 1507 | 1508 | /** 1509 | Checks whether this Device object is currently connected to an actual file or hardware device. 1510 | @returns true if the Device is connected, false otherwise. 1511 | */ 1512 | bool isValid() const 1513 | { 1514 | return m_device != NULL; 1515 | } 1516 | 1517 | /** 1518 | Checks whether this device is a file device (i.e. a recording). 1519 | @returns true if this is a file device, false otherwise. 1520 | */ 1521 | bool isFile() const 1522 | { 1523 | return isPropertySupported(DEVICE_PROPERTY_PLAYBACK_SPEED) && 1524 | isPropertySupported(DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED) && 1525 | isCommandSupported(DEVICE_COMMAND_SEEK); 1526 | } 1527 | 1528 | /** 1529 | Used to turn the depth/color frame synchronization feature on and off. When frame synchronization 1530 | is enabled, the device will deliver depth and image frames that are separated in time 1531 | by some maximum value. When disabled, the phase difference between depth and image frame 1532 | generation cannot be guaranteed. 1533 | @param [in] isEnabled Set to TRUE to enable synchronization, FALSE to disable it 1534 | @returns Status code indicating success or failure of this operation 1535 | */ 1536 | Status setDepthColorSyncEnabled(bool isEnabled) 1537 | { 1538 | Status rc = STATUS_OK; 1539 | 1540 | if (isEnabled) 1541 | { 1542 | rc = (Status)oniDeviceEnableDepthColorSync(m_device); 1543 | } 1544 | else 1545 | { 1546 | oniDeviceDisableDepthColorSync(m_device); 1547 | } 1548 | 1549 | return rc; 1550 | } 1551 | 1552 | bool getDepthColorSyncEnabled() 1553 | { 1554 | return oniDeviceGetDepthColorSyncEnabled(m_device) == TRUE; 1555 | } 1556 | 1557 | /** 1558 | Sets a property that takes an arbitrary data type as its input. It is not expected that 1559 | application code will need this function frequently, as all commonly used properties have 1560 | higher level functions provided. 1561 | 1562 | @tparam T Type of data to be passed to the property. 1563 | @param [in] propertyId The numerical ID of the property to be set. 1564 | @param [in] value Place to store the data to be written to the property. 1565 | @returns Status code indicating success or failure of this operation. 1566 | */ 1567 | template 1568 | Status setProperty(int propertyId, const T& value) 1569 | { 1570 | return setProperty(propertyId, &value, sizeof(T)); 1571 | } 1572 | 1573 | /** 1574 | Checks a property that provides an arbitrary data type as its output. It is not expected that 1575 | application code will need this function frequently, as all commonly used properties have 1576 | higher level functions provided. 1577 | @tparam [in] T Data type of the value to be read. 1578 | @param [in] propertyId The numerical ID of the property to be read. 1579 | @param [in, out] value Pointer to a place to store the value read from the property. 1580 | @returns Status code indicating success or failure of this operation. 1581 | */ 1582 | template 1583 | Status getProperty(int propertyId, T* value) const 1584 | { 1585 | int size = sizeof(T); 1586 | return getProperty(propertyId, value, &size); 1587 | } 1588 | 1589 | /** 1590 | Checks if a specific property is supported by the device. 1591 | @param [in] propertyId Property to be checked. 1592 | @returns true if the property is supported, false otherwise. 1593 | */ 1594 | bool isPropertySupported(int propertyId) const 1595 | { 1596 | return oniDeviceIsPropertySupported(m_device, propertyId) == TRUE; 1597 | } 1598 | 1599 | /** 1600 | Invokes a command that takes an arbitrary data type as its input. It is not expected that 1601 | application code will need this function frequently, as all commonly used properties have 1602 | higher level functions provided. 1603 | @param [in] commandId Numerical code of the property to be invoked. 1604 | @param [in] data Data to be passed to the property. 1605 | @param [in] dataSize size of the buffer passed in @c data. 1606 | @returns Status code indicating success or failure of this operation. 1607 | */ 1608 | Status invoke(int commandId, void* data, int dataSize) 1609 | { 1610 | return (Status)oniDeviceInvoke(m_device, commandId, data, dataSize); 1611 | } 1612 | 1613 | /** 1614 | Invokes a command that takes an arbitrary data type as its input. It is not expected that 1615 | application code will need this function frequently, as all commonly used properties have 1616 | higher level functions provided. 1617 | @tparam [in] T Type of data to be passed to the property. 1618 | @param [in] propertyId Numerical code of the property to be invoked. 1619 | @param [in] value Data to be passed to the property. 1620 | @returns Status code indicating success or failure of this operation. 1621 | */ 1622 | template 1623 | Status invoke(int propertyId, T& value) 1624 | { 1625 | return invoke(propertyId, &value, sizeof(T)); 1626 | } 1627 | 1628 | /** 1629 | Checks if a specific command is supported by the device. 1630 | @param [in] commandId Command to be checked. 1631 | @returns true if the command is supported, false otherwise. 1632 | */ 1633 | bool isCommandSupported(int commandId) const 1634 | { 1635 | return oniDeviceIsCommandSupported(m_device, commandId) == TRUE; 1636 | } 1637 | 1638 | /** @internal **/ 1639 | inline Status _openEx(const char* uri, const char* mode); 1640 | 1641 | private: 1642 | Device(const Device&); 1643 | Device& operator=(const Device&); 1644 | 1645 | void clearSensors() 1646 | { 1647 | for (int i = 0; i < ONI_MAX_SENSORS; ++i) 1648 | { 1649 | m_aSensorInfo[i]._setInternal(NULL); 1650 | } 1651 | } 1652 | 1653 | inline Status _setHandle(OniDeviceHandle deviceHandle); 1654 | 1655 | private: 1656 | PlaybackControl* m_pPlaybackControl; 1657 | 1658 | OniDeviceHandle m_device; 1659 | DeviceInfo m_deviceInfo; 1660 | SensorInfo m_aSensorInfo[ONI_MAX_SENSORS]; 1661 | 1662 | bool m_isOwner; 1663 | }; 1664 | 1665 | /** 1666 | * The PlaybackControl class provides access to a series of specific to playing back 1667 | * a recording from a file device. 1668 | * 1669 | * When playing a stream back from a recording instead of playing from a live device, 1670 | * it is possible to vary playback speed, change the current time location (ie 1671 | * fast forward / rewind / seek), specify whether the playback should be repeated at the end 1672 | * of the recording, and query the total size of the recording. 1673 | * 1674 | * Since none of these functions make sense in the context of a physical device, they are 1675 | * split out into a seperate playback control class. To use, simply create your file device, 1676 | * create a PlaybackControl, and then attach the PlaybackControl to the file device. 1677 | */ 1678 | class PlaybackControl 1679 | { 1680 | public: 1681 | 1682 | /** 1683 | * Deconstructor. Destroys a PlaybackControl class. The deconstructor presently detaches 1684 | * from its recording automatically, but it is considered a best practice for applications to 1685 | * manually detach from any stream that was attached to. 1686 | */ 1687 | ~PlaybackControl() 1688 | { 1689 | detach(); 1690 | } 1691 | 1692 | /** 1693 | * Getter function for the current playback speed of this device. 1694 | * 1695 | * This value is expressed as a multiple of the speed the original 1696 | * recording was taken at. For example, if the original recording was at 30fps, and 1697 | * playback speed is set to 0.5, then the recording will play at 15fps. If playback speed 1698 | * is set to 2.0, then the recording would playback at 60fps. 1699 | * 1700 | * In addition, there are two "special" values. A playback speed of 0.0 indicates that the 1701 | * playback should occur as fast as the system is capable of returning frames. This is 1702 | * most useful when testing algorithms on large datasets, as it enables playback to be 1703 | * done at a much higher rate than would otherwise be possible. 1704 | * 1705 | * A value of -1 indicates that speed is "manual". In this mode, new frames will only 1706 | * become available when an application manually reads them. If used in a polling loop, 1707 | * this setting also enables systems to read and process frames limited only by 1708 | * available processing speeds. 1709 | * 1710 | * @returns Current playback speed of the device, measured as ratio of recording speed. 1711 | */ 1712 | float getSpeed() const 1713 | { 1714 | if (!isValid()) 1715 | { 1716 | return 0.0f; 1717 | } 1718 | float speed; 1719 | Status rc = m_pDevice->getProperty(DEVICE_PROPERTY_PLAYBACK_SPEED, &speed); 1720 | if (rc != STATUS_OK) 1721 | { 1722 | return 1.0f; 1723 | } 1724 | return speed; 1725 | } 1726 | /** 1727 | * Setter function for the playback speed of the device. For a full explanation of 1728 | * what this value means @see PlaybackControl::getSpeed(). 1729 | * 1730 | * @param [in] speed Desired new value of playback speed, as ratio of original recording. 1731 | * @returns Status code indicating success or failure of this operation. 1732 | */ 1733 | Status setSpeed(float speed) 1734 | { 1735 | if (!isValid()) 1736 | { 1737 | return STATUS_NO_DEVICE; 1738 | } 1739 | return m_pDevice->setProperty(DEVICE_PROPERTY_PLAYBACK_SPEED, speed); 1740 | } 1741 | 1742 | /** 1743 | * Gets the current repeat setting of the file device. 1744 | * 1745 | * @returns true if repeat is enabled, false if not enabled. 1746 | */ 1747 | bool getRepeatEnabled() const 1748 | { 1749 | if (!isValid()) 1750 | { 1751 | return false; 1752 | } 1753 | 1754 | OniBool repeat; 1755 | Status rc = m_pDevice->getProperty(DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED, &repeat); 1756 | if (rc != STATUS_OK) 1757 | { 1758 | return false; 1759 | } 1760 | 1761 | return repeat == TRUE; 1762 | } 1763 | 1764 | /** 1765 | * Changes the current repeat mode of the device. If repeat mode is turned on, then the recording will 1766 | * begin playback again at the beginning after the last frame is read. If turned off, no more frames 1767 | * will become available after last frame is read. 1768 | * 1769 | * @param [in] repeat New value for repeat -- true to enable, false to disable 1770 | * @returns Status code indicating success or failure of this operations. 1771 | */ 1772 | Status setRepeatEnabled(bool repeat) 1773 | { 1774 | if (!isValid()) 1775 | { 1776 | return STATUS_NO_DEVICE; 1777 | } 1778 | 1779 | return m_pDevice->setProperty(DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED, repeat ? TRUE : FALSE); 1780 | } 1781 | 1782 | /** 1783 | * Seeks within a VideoStream to a given FrameID. Note that when this function is called on one 1784 | * stream, all other streams will also be changed to the corresponding place in the recording. The FrameIDs 1785 | * of different streams may not match, since FrameIDs may differ for streams that are not synchronized, but 1786 | * the recording will set all streams to the same moment in time. 1787 | * 1788 | * @param [in] stream Stream for which the frameIndex value is valid. 1789 | * @param [in] frameIndex Frame index to move playback to 1790 | * @returns Status code indicating success or failure of this operation 1791 | */ 1792 | Status seek(const VideoStream& stream, int frameIndex) 1793 | { 1794 | if (!isValid()) 1795 | { 1796 | return STATUS_NO_DEVICE; 1797 | } 1798 | OniSeek seek; 1799 | seek.frameIndex = frameIndex; 1800 | seek.stream = stream._getHandle(); 1801 | return m_pDevice->invoke(DEVICE_COMMAND_SEEK, seek); 1802 | } 1803 | 1804 | /** 1805 | * Provides the a count of frames that this recording contains for a given stream. This is useful 1806 | * both to determine the length of the recording, and to ensure that a valid Frame Index is set when using 1807 | * the @ref PlaybackControl::seek() function. 1808 | * 1809 | * @param [in] stream The video stream to count frames for 1810 | * @returns Number of frames in provided @ref VideoStream, or 0 if the stream is not part of the recording 1811 | */ 1812 | int getNumberOfFrames(const VideoStream& stream) const 1813 | { 1814 | int numOfFrames = -1; 1815 | Status rc = stream.getProperty(STREAM_PROPERTY_NUMBER_OF_FRAMES, &numOfFrames); 1816 | if (rc != STATUS_OK) 1817 | { 1818 | return 0; 1819 | } 1820 | return numOfFrames; 1821 | } 1822 | 1823 | bool isValid() const 1824 | { 1825 | return m_pDevice != NULL; 1826 | } 1827 | private: 1828 | Status attach(Device* device) 1829 | { 1830 | if (!device->isValid() || !device->isFile()) 1831 | { 1832 | return STATUS_ERROR; 1833 | } 1834 | 1835 | detach(); 1836 | m_pDevice = device; 1837 | 1838 | return STATUS_OK; 1839 | } 1840 | void detach() 1841 | { 1842 | m_pDevice = NULL; 1843 | } 1844 | 1845 | friend class Device; 1846 | PlaybackControl(Device* pDevice) : m_pDevice(NULL) 1847 | { 1848 | if (pDevice != NULL) 1849 | { 1850 | attach(pDevice); 1851 | } 1852 | } 1853 | 1854 | Device* m_pDevice; 1855 | }; 1856 | 1857 | class CameraSettings 1858 | { 1859 | public: 1860 | // setters 1861 | Status setAutoExposureEnabled(bool enabled) 1862 | { 1863 | return setProperty(STREAM_PROPERTY_AUTO_EXPOSURE, enabled ? TRUE : FALSE); 1864 | } 1865 | Status setAutoWhiteBalanceEnabled(bool enabled) 1866 | { 1867 | return setProperty(STREAM_PROPERTY_AUTO_WHITE_BALANCE, enabled ? TRUE : FALSE); 1868 | } 1869 | 1870 | bool getAutoExposureEnabled() const 1871 | { 1872 | OniBool enabled = FALSE; 1873 | 1874 | Status rc = getProperty(STREAM_PROPERTY_AUTO_EXPOSURE, &enabled); 1875 | return rc == STATUS_OK && enabled == TRUE; 1876 | } 1877 | bool getAutoWhiteBalanceEnabled() const 1878 | { 1879 | OniBool enabled = FALSE; 1880 | 1881 | Status rc = getProperty(STREAM_PROPERTY_AUTO_WHITE_BALANCE, &enabled); 1882 | return rc == STATUS_OK && enabled == TRUE; 1883 | } 1884 | 1885 | Status setGain(int gain) 1886 | { 1887 | return setProperty(STREAM_PROPERTY_GAIN, gain); 1888 | } 1889 | Status setExposure(int exposure) 1890 | { 1891 | return setProperty(STREAM_PROPERTY_EXPOSURE, exposure); 1892 | } 1893 | int getGain() 1894 | { 1895 | int gain; 1896 | Status rc = getProperty(STREAM_PROPERTY_GAIN, &gain); 1897 | if (rc != STATUS_OK) 1898 | { 1899 | return 100; 1900 | } 1901 | return gain; 1902 | } 1903 | int getExposure() 1904 | { 1905 | int exposure; 1906 | Status rc = getProperty(STREAM_PROPERTY_EXPOSURE, &exposure); 1907 | if (rc != STATUS_OK) 1908 | { 1909 | return 0; 1910 | } 1911 | return exposure; 1912 | } 1913 | 1914 | bool isValid() const {return m_pStream != NULL;} 1915 | private: 1916 | template 1917 | Status getProperty(int propertyId, T* value) const 1918 | { 1919 | if (!isValid()) return STATUS_NOT_SUPPORTED; 1920 | 1921 | return m_pStream->getProperty(propertyId, value); 1922 | } 1923 | template 1924 | Status setProperty(int propertyId, const T& value) 1925 | { 1926 | if (!isValid()) return STATUS_NOT_SUPPORTED; 1927 | 1928 | return m_pStream->setProperty(propertyId, value); 1929 | } 1930 | 1931 | friend class VideoStream; 1932 | CameraSettings(VideoStream* pStream) 1933 | { 1934 | m_pStream = pStream; 1935 | } 1936 | 1937 | VideoStream* m_pStream; 1938 | }; 1939 | 1940 | 1941 | /** 1942 | * The OpenNI class is a static entry point to the library. It is used by every OpenNI 2.0 1943 | * application to initialize the SDK and drivers to enable creation of valid device objects. 1944 | * 1945 | * It also defines a listener class and events that enable for event driven notification of 1946 | * device connection, device disconnection, and device configuration changes. 1947 | * 1948 | * In addition, it gives access to SDK version information and provides a function that allows 1949 | * you to wait for data to become available on any one of a list of streams (as opposed to 1950 | * waiting for data on one specific stream with functions provided by the VideoStream class) 1951 | * 1952 | */ 1953 | class OpenNI 1954 | { 1955 | public: 1956 | 1957 | /** 1958 | * The OpenNI::DeviceConnectedListener class provides a means of registering for, and responding to 1959 | * when a device is connected. 1960 | * 1961 | * onDeviceConnected is called whenever a new device is connected to the system (ie this event 1962 | * would be triggered when a new sensor is manually plugged into the host system running the 1963 | * application) 1964 | * 1965 | * To use this class, you should write a new class that inherits from it, and override the 1966 | * onDeviceConnected method. Once you instantiate your class, use the 1967 | * OpenNI::addDeviceConnectedListener() function to add your listener object to OpenNI's list of listeners. Your 1968 | * handler function will then be called whenever the event occurs. A OpenNI::removeDeviceConnectedListener() 1969 | * function is also provided, if you want to have your class stop listening to these events for any 1970 | * reason. 1971 | */ 1972 | class DeviceConnectedListener 1973 | { 1974 | public: 1975 | DeviceConnectedListener() 1976 | { 1977 | m_deviceConnectedCallbacks.deviceConnected = deviceConnectedCallback; 1978 | m_deviceConnectedCallbacks.deviceDisconnected = NULL; 1979 | m_deviceConnectedCallbacks.deviceStateChanged = NULL; 1980 | m_deviceConnectedCallbacksHandle = NULL; 1981 | } 1982 | 1983 | virtual ~DeviceConnectedListener() 1984 | { 1985 | } 1986 | 1987 | /** 1988 | * Callback function for the onDeviceConnected event. This function will be 1989 | * called whenever this event occurs. When this happens, a pointer to the @ref DeviceInfo 1990 | * object for the newly connected device will be supplied. Note that once a 1991 | * device is removed, if it was opened by a @ref Device object, that object can no longer be 1992 | * used to access the device, even if it was reconnected. Once a device was reconnected, 1993 | * @ref Device::open() should be called again in order to use this device. 1994 | * 1995 | * If you wish to open the new device as it is connected, simply query the provided DeviceInfo 1996 | * object to obtain the URI of the device, and pass this URI to the Device.Open() function. 1997 | */ 1998 | virtual void onDeviceConnected(const DeviceInfo*) = 0; 1999 | private: 2000 | static void ONI_CALLBACK_TYPE deviceConnectedCallback(const OniDeviceInfo* pInfo, void* pCookie) 2001 | { 2002 | DeviceConnectedListener* pListener = (DeviceConnectedListener*)pCookie; 2003 | pListener->onDeviceConnected(static_cast(pInfo)); 2004 | } 2005 | 2006 | friend class OpenNI; 2007 | OniDeviceCallbacks m_deviceConnectedCallbacks; 2008 | OniCallbackHandle m_deviceConnectedCallbacksHandle; 2009 | 2010 | }; 2011 | /** 2012 | * The OpenNI::DeviceDisconnectedListener class provides a means of registering for, and responding to 2013 | * when a device is disconnected. 2014 | * 2015 | * onDeviceDisconnected is called when a device is removed from the system. Note that once a 2016 | * device is removed, if it was opened by a @ref Device object, that object can no longer be 2017 | * used to access the device, even if it was reconnected. Once a device was reconnected, 2018 | * @ref Device::open() should be called again in order to use this device. 2019 | * 2020 | * To use this class, you should write a new class that inherits from it, and override the 2021 | * onDeviceDisconnected method. Once you instantiate your class, use the 2022 | * OpenNI::addDeviceDisconnectedListener() function to add your listener object to OpenNI's list of listeners. Your 2023 | * handler function will then be called whenever the event occurs. A OpenNI::removeDeviceDisconnectedListener() 2024 | * function is also provided, if you want to have your class stop listening to these events for any 2025 | * reason. 2026 | */ 2027 | class DeviceDisconnectedListener 2028 | { 2029 | public: 2030 | DeviceDisconnectedListener() 2031 | { 2032 | m_deviceDisconnectedCallbacks.deviceConnected = NULL; 2033 | m_deviceDisconnectedCallbacks.deviceDisconnected = deviceDisconnectedCallback; 2034 | m_deviceDisconnectedCallbacks.deviceStateChanged = NULL; 2035 | m_deviceDisconnectedCallbacksHandle = NULL; 2036 | } 2037 | 2038 | virtual ~DeviceDisconnectedListener() 2039 | { 2040 | } 2041 | 2042 | /** 2043 | * Callback function for the onDeviceDisconnected event. This function will be 2044 | * called whenever this event occurs. When this happens, a pointer to the DeviceInfo 2045 | * object for the newly disconnected device will be supplied. Note that once a 2046 | * device is removed, if it was opened by a @ref Device object, that object can no longer be 2047 | * used to access the device, even if it was reconnected. Once a device was reconnected, 2048 | * @ref Device::open() should be called again in order to use this device. 2049 | */ 2050 | virtual void onDeviceDisconnected(const DeviceInfo*) = 0; 2051 | private: 2052 | static void ONI_CALLBACK_TYPE deviceDisconnectedCallback(const OniDeviceInfo* pInfo, void* pCookie) 2053 | { 2054 | DeviceDisconnectedListener* pListener = (DeviceDisconnectedListener*)pCookie; 2055 | pListener->onDeviceDisconnected(static_cast(pInfo)); 2056 | } 2057 | 2058 | friend class OpenNI; 2059 | OniDeviceCallbacks m_deviceDisconnectedCallbacks; 2060 | OniCallbackHandle m_deviceDisconnectedCallbacksHandle; 2061 | }; 2062 | /** 2063 | * The OpenNI::DeviceStateChangedListener class provides a means of registering for, and responding to 2064 | * when a device's state is changed. 2065 | * 2066 | * onDeviceStateChanged is triggered whenever the state of a connected device is changed. 2067 | * 2068 | * To use this class, you should write a new class that inherits from it, and override the 2069 | * onDeviceStateChanged method. Once you instantiate your class, use the 2070 | * OpenNI::addDeviceStateChangedListener() function to add your listener object to OpenNI's list of listeners. Your 2071 | * handler function will then be called whenever the event occurs. A OpenNI::removeDeviceStateChangedListener() 2072 | * function is also provided, if you want to have your class stop listening to these events for any 2073 | * reason. 2074 | */ 2075 | class DeviceStateChangedListener 2076 | { 2077 | public: 2078 | DeviceStateChangedListener() 2079 | { 2080 | m_deviceStateChangedCallbacks.deviceConnected = NULL; 2081 | m_deviceStateChangedCallbacks.deviceDisconnected = NULL; 2082 | m_deviceStateChangedCallbacks.deviceStateChanged = deviceStateChangedCallback; 2083 | m_deviceStateChangedCallbacksHandle = NULL; 2084 | } 2085 | 2086 | virtual ~DeviceStateChangedListener() 2087 | { 2088 | } 2089 | 2090 | /** 2091 | * Callback function for the onDeviceStateChanged event. This function will be 2092 | * called whenever this event occurs. When this happens, a pointer to a DeviceInfo 2093 | * object for the affected device will be supplied, as well as the new DeviceState 2094 | * value of that device. 2095 | */ 2096 | virtual void onDeviceStateChanged(const DeviceInfo*, DeviceState) = 0; 2097 | private: 2098 | static void ONI_CALLBACK_TYPE deviceStateChangedCallback(const OniDeviceInfo* pInfo, OniDeviceState state, void* pCookie) 2099 | { 2100 | DeviceStateChangedListener* pListener = (DeviceStateChangedListener*)pCookie; 2101 | pListener->onDeviceStateChanged(static_cast(pInfo), DeviceState(state)); 2102 | } 2103 | 2104 | friend class OpenNI; 2105 | OniDeviceCallbacks m_deviceStateChangedCallbacks; 2106 | OniCallbackHandle m_deviceStateChangedCallbacksHandle; 2107 | }; 2108 | 2109 | /** 2110 | Initialize the library. 2111 | This will load all available drivers, and see which devices are available 2112 | It is forbidden to call any other method in OpenNI before calling @ref initialize(). 2113 | */ 2114 | static Status initialize() 2115 | { 2116 | return (Status)oniInitialize(ONI_API_VERSION); // provide version of API, to make sure proper struct sizes are used 2117 | } 2118 | 2119 | /** 2120 | Stop using the library. Unload all drivers, close all streams and devices. 2121 | Once @ref shutdown was called, no other calls to OpenNI is allowed. 2122 | */ 2123 | static void shutdown() 2124 | { 2125 | oniShutdown(); 2126 | } 2127 | 2128 | /** 2129 | * Returns the version of OpenNI 2130 | */ 2131 | static Version getVersion() 2132 | { 2133 | OniVersion oniVersion = oniGetVersion(); 2134 | Version version; 2135 | version.major = oniVersion.major; 2136 | version.minor = oniVersion.minor; 2137 | version.maintenance = oniVersion.maintenance; 2138 | version.build = oniVersion.build; 2139 | return version; 2140 | } 2141 | 2142 | /** 2143 | * Retrieves the calling thread's last extended error information. The last extended error information is maintained 2144 | * on a per-thread basis. Multiple threads do not overwrite each other's last extended error information. 2145 | * 2146 | * The extended error information is cleared on every call to an OpenNI method, so you should call this method 2147 | * immediately after a call to an OpenNI method which have failed. 2148 | */ 2149 | static const char* getExtendedError() 2150 | { 2151 | return oniGetExtendedError(); 2152 | } 2153 | 2154 | /** 2155 | Fills up an array of @ref DeviceInfo objects with devices that are available. 2156 | @param [in,out] deviceInfoList An array to be filled with devices. 2157 | */ 2158 | static void enumerateDevices(Array* deviceInfoList) 2159 | { 2160 | OniDeviceInfo* m_pDeviceInfos; 2161 | int m_deviceInfoCount; 2162 | oniGetDeviceList(&m_pDeviceInfos, &m_deviceInfoCount); 2163 | deviceInfoList->_setData((DeviceInfo*)m_pDeviceInfos, m_deviceInfoCount, true); 2164 | oniReleaseDeviceList(m_pDeviceInfos); 2165 | } 2166 | 2167 | /** 2168 | Wait for a new frame from any of the streams provided. The function blocks until any of the streams 2169 | has a new frame available, or the timeout has passed. 2170 | @param [in] pStreams An array of streams to wait for. 2171 | @param [in] streamCount The number of streams in @c pStreams 2172 | @param [out] pReadyStreamIndex The index of the first stream that has new frame available. 2173 | @param [in] timeout [Optional] A timeout before returning if no stream has new data. Default value is @ref TIMEOUT_FOREVER. 2174 | */ 2175 | static Status waitForAnyStream(VideoStream** pStreams, int streamCount, int* pReadyStreamIndex, int timeout = TIMEOUT_FOREVER) 2176 | { 2177 | static const int ONI_MAX_STREAMS = 50; 2178 | OniStreamHandle streams[ONI_MAX_STREAMS]; 2179 | 2180 | if (streamCount > ONI_MAX_STREAMS) 2181 | { 2182 | printf("Too many streams for wait: %d > %d\n", streamCount, ONI_MAX_STREAMS); 2183 | return STATUS_BAD_PARAMETER; 2184 | } 2185 | 2186 | *pReadyStreamIndex = -1; 2187 | for (int i = 0; i < streamCount; ++i) 2188 | { 2189 | if (pStreams[i] != NULL) 2190 | { 2191 | streams[i] = pStreams[i]->_getHandle(); 2192 | } 2193 | else 2194 | { 2195 | streams[i] = NULL; 2196 | } 2197 | } 2198 | Status rc = (Status)oniWaitForAnyStream(streams, streamCount, pReadyStreamIndex, timeout); 2199 | 2200 | return rc; 2201 | } 2202 | 2203 | /** 2204 | * Add a listener to the list of objects that receive the event when a device is connected. See the 2205 | * @ref OpenNI::DeviceConnectedListener class for details on utilizing the events provided by OpenNI. 2206 | * 2207 | * @param pListener Pointer to the Listener to be added to the list 2208 | * @returns Status code indicating success or failure of this operation. 2209 | */ 2210 | static Status addDeviceConnectedListener(DeviceConnectedListener* pListener) 2211 | { 2212 | if (pListener->m_deviceConnectedCallbacksHandle != NULL) 2213 | { 2214 | return STATUS_ERROR; 2215 | } 2216 | return (Status)oniRegisterDeviceCallbacks(&pListener->m_deviceConnectedCallbacks, pListener, &pListener->m_deviceConnectedCallbacksHandle); 2217 | } 2218 | /** 2219 | * Add a listener to the list of objects that receive the event when a device is disconnected. See the 2220 | * @ref OpenNI::DeviceDisconnectedListener class for details on utilizing the events provided by OpenNI. 2221 | * 2222 | * @param pListener Pointer to the Listener to be added to the list 2223 | * @returns Status code indicating success or failure of this operation. 2224 | */ 2225 | static Status addDeviceDisconnectedListener(DeviceDisconnectedListener* pListener) 2226 | { 2227 | if (pListener->m_deviceDisconnectedCallbacksHandle != NULL) 2228 | { 2229 | return STATUS_ERROR; 2230 | } 2231 | return (Status)oniRegisterDeviceCallbacks(&pListener->m_deviceDisconnectedCallbacks, pListener, &pListener->m_deviceDisconnectedCallbacksHandle); 2232 | } 2233 | /** 2234 | * Add a listener to the list of objects that receive the event when a device's state changes. See the 2235 | * @ref OpenNI::DeviceStateChangedListener class for details on utilizing the events provided by OpenNI. 2236 | * 2237 | * @param pListener Pointer to the Listener to be added to the list 2238 | * @returns Status code indicating success or failure of this operation. 2239 | */ 2240 | static Status addDeviceStateChangedListener(DeviceStateChangedListener* pListener) 2241 | { 2242 | if (pListener->m_deviceStateChangedCallbacksHandle != NULL) 2243 | { 2244 | return STATUS_ERROR; 2245 | } 2246 | return (Status)oniRegisterDeviceCallbacks(&pListener->m_deviceStateChangedCallbacks, pListener, &pListener->m_deviceStateChangedCallbacksHandle); 2247 | } 2248 | /** 2249 | * Remove a listener from the list of objects that receive the event when a device is connected. See 2250 | * the @ref OpenNI::DeviceConnectedListener class for details on utilizing the events provided by OpenNI. 2251 | * 2252 | * @param pListener Pointer to the Listener to be removed from the list 2253 | * @returns Status code indicating the success or failure of this operation. 2254 | */ 2255 | static void removeDeviceConnectedListener(DeviceConnectedListener* pListener) 2256 | { 2257 | oniUnregisterDeviceCallbacks(pListener->m_deviceConnectedCallbacksHandle); 2258 | pListener->m_deviceConnectedCallbacksHandle = NULL; 2259 | } 2260 | /** 2261 | * Remove a listener from the list of objects that receive the event when a device is disconnected. See 2262 | * the @ref OpenNI::DeviceDisconnectedListener class for details on utilizing the events provided by OpenNI. 2263 | * 2264 | * @param pListener Pointer to the Listener to be removed from the list 2265 | * @returns Status code indicating the success or failure of this operation. 2266 | */ 2267 | static void removeDeviceDisconnectedListener(DeviceDisconnectedListener* pListener) 2268 | { 2269 | oniUnregisterDeviceCallbacks(pListener->m_deviceDisconnectedCallbacksHandle); 2270 | pListener->m_deviceDisconnectedCallbacksHandle = NULL; 2271 | } 2272 | /** 2273 | * Remove a listener from the list of objects that receive the event when a device's state changes. See 2274 | * the @ref OpenNI::DeviceStateChangedListener class for details on utilizing the events provided by OpenNI. 2275 | * 2276 | * @param pListener Pointer to the Listener to be removed from the list 2277 | * @returns Status code indicating the success or failure of this operation. 2278 | */ 2279 | static void removeDeviceStateChangedListener(DeviceStateChangedListener* pListener) 2280 | { 2281 | oniUnregisterDeviceCallbacks(pListener->m_deviceStateChangedCallbacksHandle); 2282 | pListener->m_deviceStateChangedCallbacksHandle = NULL; 2283 | } 2284 | 2285 | /** 2286 | * Change the log output folder 2287 | 2288 | * @param const char * strLogOutputFolder [in] log required folder 2289 | * 2290 | * @retval STATUS_OK Upon successful completion. 2291 | * @retval STATUS_ERROR Upon any kind of failure. 2292 | */ 2293 | static Status setLogOutputFolder(const char *strLogOutputFolder) 2294 | { 2295 | return (Status)oniSetLogOutputFolder(strLogOutputFolder); 2296 | } 2297 | 2298 | /** 2299 | * Get current log file name 2300 | 2301 | * @param char * strFileName [out] returned file name buffer 2302 | * @param int nBufferSize [in] Buffer size 2303 | * 2304 | * @retval STATUS_OK Upon successful completion. 2305 | * @retval STATUS_ERROR Upon any kind of failure. 2306 | */ 2307 | static Status getLogFileName(char *strFileName, int nBufferSize) 2308 | { 2309 | return (Status)oniGetLogFileName(strFileName, nBufferSize); 2310 | } 2311 | 2312 | /** 2313 | * Set minimum severity for log produce 2314 | 2315 | * @param int nMinSeverity [in] Logger severity 2316 | * 2317 | * @retval STATUS_OK Upon successful completion. 2318 | * @retval STATUS_ERROR Upon any kind of failure. 2319 | */ 2320 | static Status setLogMinSeverity(int nMinSeverity) 2321 | { 2322 | return(Status) oniSetLogMinSeverity(nMinSeverity); 2323 | } 2324 | 2325 | /** 2326 | * Configures if log entries will be printed to console. 2327 | 2328 | * @param const OniBool bConsoleOutput [in] TRUE to print log entries to console, FALSE otherwise. 2329 | * 2330 | * @retval STATUS_OK Upon successful completion. 2331 | * @retval STATUS_ERROR Upon any kind of failure. 2332 | */ 2333 | static Status setLogConsoleOutput(bool bConsoleOutput) 2334 | { 2335 | return (Status)oniSetLogConsoleOutput(bConsoleOutput); 2336 | } 2337 | 2338 | /** 2339 | * Configures if log entries will be printed to file. 2340 | 2341 | * @param const OniBool bConsoleOutput [in] TRUE to print log entries to file, FALSE otherwise. 2342 | * 2343 | * @retval STATUS_OK Upon successful completion. 2344 | * @retval STATUS_ERROR Upon any kind of failure. 2345 | */ 2346 | static Status setLogFileOutput(bool bFileOutput) 2347 | { 2348 | return (Status)oniSetLogFileOutput(bFileOutput); 2349 | } 2350 | 2351 | #if ONI_PLATFORM == ONI_PLATFORM_ANDROID_ARM 2352 | /** 2353 | * Configures if log entries will be printed to the Android log. 2354 | 2355 | * @param OniBool bAndroidOutput bAndroidOutput [in] TRUE to print log entries to the Android log, FALSE otherwise. 2356 | * 2357 | * @retval STATUS_OK Upon successful completion. 2358 | * @retval STATUS_ERROR Upon any kind of failure. 2359 | */ 2360 | 2361 | static Status setLogAndroidOutput(bool bAndroidOutput) 2362 | { 2363 | return (Status)oniSetLogAndroidOutput(bAndroidOutput); 2364 | } 2365 | #endif 2366 | 2367 | private: 2368 | OpenNI() 2369 | { 2370 | } 2371 | }; 2372 | 2373 | /** 2374 | The CoordinateConverter class converts points between the different coordinate systems. 2375 | 2376 | Depth and World coordinate systems 2377 | 2378 | OpenNI applications commonly use two different coordinate systems to represent depth. These two systems are referred to as Depth 2379 | and World representation. 2380 | 2381 | Depth coordinates are the native data representation. In this system, the frame is a map (two dimensional array), and each pixel is 2382 | assigned a depth value. This depth value represents the distance between the camera plane and whatever object is in the given 2383 | pixel. The X and Y coordinates are simply the location in the map, where the origin is the top-left corner of the field of view. 2384 | 2385 | World coordinates superimpose a more familiar 3D Cartesian coordinate system on the world, with the camera lens at the origin. 2386 | In this system, every point is specified by 3 points -- x, y and z. The x axis of this system is along a line that passes 2387 | through the infrared projector and CMOS imager of the camera. The y axis is parallel to the front face of the camera, and 2388 | perpendicular to the x axis (it will also be perpendicular to the ground if the camera is upright and level). The z axis 2389 | runs into the scene, perpendicular to both the x and y axis. From the perspective of the camera, an object moving from 2390 | left to right is moving along the increasing x axis. An object moving up is moving along the increasing y axis, and an object 2391 | moving away from the camera is moving along the increasing z axis. 2392 | 2393 | Mathematically, the Depth coordinate system is the projection of the scene on the CMOS. If the sensor's angular field of view and 2394 | resolution are known, then an angular size can be calculated for each pixel. This is how the conversion algorithms work. The 2395 | dependence of this calculation on FoV and resolution is the reason that a @ref VideoStream pointer must be provided to these 2396 | functions. The @ref VideoStream pointer is used to determine parameters for the specific points to be converted. 2397 | 2398 | Since Depth coordinates are a projective, the apparent size of objects in depth coordinates (measured in pixels) 2399 | will increase as an object moves closer to the sensor. The size of objects in the World coordinate system is independent of 2400 | distance from the sensor. 2401 | 2402 | Note that converting from Depth to World coordinates is relatively expensive computationally. It is generally not practical to convert 2403 | the entire raw depth map to World coordinates. A better approach is to have your computer vision algorithm work in Depth 2404 | coordinates for as long as possible, and only converting a few specific points to World coordinates right before output. 2405 | 2406 | Note that when converting from Depth to World or vice versa, the Z value remains the same. 2407 | */ 2408 | class CoordinateConverter 2409 | { 2410 | public: 2411 | /** 2412 | Converts a single point from the World coordinate system to the Depth coordinate system. 2413 | @param [in] depthStream Reference to an openni::VideoStream that will be used to determine the format of the Depth coordinates 2414 | @param [in] worldX The X coordinate of the point to be converted, measured in millimeters in World coordinates 2415 | @param [in] worldY The Y coordinate of the point to be converted, measured in millimeters in World coordinates 2416 | @param [in] worldZ The Z coordinate of the point to be converted, measured in millimeters in World coordinates 2417 | @param [out] pDepthX Pointer to a place to store the X coordinate of the output value, measured in pixels with 0 at far left of image 2418 | @param [out] pDepthY Pointer to a place to store the Y coordinate of the output value, measured in pixels with 0 at top of image 2419 | @param [out] pDepthZ Pointer to a place to store the Z(depth) coordinate of the output value, measured in the @ref PixelFormat of depthStream 2420 | */ 2421 | static Status convertWorldToDepth(const VideoStream& depthStream, float worldX, float worldY, float worldZ, int* pDepthX, int* pDepthY, DepthPixel* pDepthZ) 2422 | { 2423 | float depthX, depthY, depthZ; 2424 | Status rc = (Status)oniCoordinateConverterWorldToDepth(depthStream._getHandle(), worldX, worldY, worldZ, &depthX, &depthY, &depthZ); 2425 | *pDepthX = (int)depthX; 2426 | *pDepthY = (int)depthY; 2427 | *pDepthZ = (DepthPixel)depthZ; 2428 | return rc; 2429 | } 2430 | 2431 | /** 2432 | Converts a single point from the World coordinate system to a floating point representation of the Depth coordinate system 2433 | @param [in] depthStream Reference to an openni::VideoStream that will be used to determine the format of the Depth coordinates 2434 | @param [in] worldX The X coordinate of the point to be converted, measured in millimeters in World coordinates 2435 | @param [in] worldY The Y coordinate of the point to be converted, measured in millimeters in World coordinates 2436 | @param [in] worldZ The Z coordinate of the point to be converted, measured in millimeters in World coordinates 2437 | @param [out] pDepthX Pointer to a place to store the X coordinate of the output value, measured in pixels with 0.0 at far left of the image 2438 | @param [out] pDepthY Pointer to a place to store the Y coordinate of the output value, measured in pixels with 0.0 at the top of the image 2439 | @param [out] pDepthZ Pointer to a place to store the Z(depth) coordinate of the output value, measured in millimeters with 0.0 at the camera lens 2440 | */ 2441 | static Status convertWorldToDepth(const VideoStream& depthStream, float worldX, float worldY, float worldZ, float* pDepthX, float* pDepthY, float* pDepthZ) 2442 | { 2443 | return (Status)oniCoordinateConverterWorldToDepth(depthStream._getHandle(), worldX, worldY, worldZ, pDepthX, pDepthY, pDepthZ); 2444 | } 2445 | 2446 | /** 2447 | Converts a single point from the Depth coordinate system to the World coordinate system. 2448 | @param [in] depthStream Reference to an openi::VideoStream that will be used to determine the format of the Depth coordinates 2449 | @param [in] depthX The X coordinate of the point to be converted, measured in pixels with 0 at the far left of the image 2450 | @param [in] depthY The Y coordinate of the point to be converted, measured in pixels with 0 at the top of the image 2451 | @param [in] depthZ the Z(depth) coordinate of the point to be converted, measured in the @ref PixelFormat of depthStream 2452 | @param [out] pWorldX Pointer to a place to store the X coordinate of the output value, measured in millimeters in World coordinates 2453 | @param [out] pWorldY Pointer to a place to store the Y coordinate of the output value, measured in millimeters in World coordinates 2454 | @param [out] pWorldZ Pointer to a place to store the Z coordinate of the output value, measured in millimeters in World coordinates 2455 | */ 2456 | static Status convertDepthToWorld(const VideoStream& depthStream, int depthX, int depthY, DepthPixel depthZ, float* pWorldX, float* pWorldY, float* pWorldZ) 2457 | { 2458 | return (Status)oniCoordinateConverterDepthToWorld(depthStream._getHandle(), float(depthX), float(depthY), float(depthZ), pWorldX, pWorldY, pWorldZ); 2459 | } 2460 | 2461 | /** 2462 | Converts a single point from a floating point representation of the Depth coordinate system to the World coordinate system. 2463 | @param [in] depthStream Reference to an openi::VideoStream that will be used to determine the format of the Depth coordinates 2464 | @param [in] depthX The X coordinate of the point to be converted, measured in pixels with 0.0 at the far left of the image 2465 | @param [in] depthY The Y coordinate of the point to be converted, measured in pixels with 0.0 at the top of the image 2466 | @param [in] depthZ Z(depth) coordinate of the point to be converted, measured in the @ref PixelFormat of depthStream 2467 | @param [out] pWorldX Pointer to a place to store the X coordinate of the output value, measured in millimeters in World coordinates 2468 | @param [out] pWorldY Pointer to a place to store the Y coordinate of the output value, measured in millimeters in World coordinates 2469 | @param [out] pWorldZ Pointer to a place to store the Z coordinate of the output value, measured in millimeters in World coordinates 2470 | */ 2471 | static Status convertDepthToWorld(const VideoStream& depthStream, float depthX, float depthY, float depthZ, float* pWorldX, float* pWorldY, float* pWorldZ) 2472 | { 2473 | return (Status)oniCoordinateConverterDepthToWorld(depthStream._getHandle(), depthX, depthY, depthZ, pWorldX, pWorldY, pWorldZ); 2474 | } 2475 | 2476 | /** 2477 | For a given depth point, provides the coordinates of the corresponding color value. Useful for superimposing the depth and color images. 2478 | This operation is the same as turning on registration, but is performed on a single pixel rather than the whole image. 2479 | @param [in] depthStream Reference to a openni::VideoStream that produced the depth value 2480 | @param [in] colorStream Reference to a openni::VideoStream that we want to find the appropriate color pixel in 2481 | @param [in] depthX X value of the depth point, given in Depth coordinates and measured in pixels 2482 | @param [in] depthY Y value of the depth point, given in Depth coordinates and measured in pixels 2483 | @param [in] depthZ Z(depth) value of the depth point, given in the @ref PixelFormat of depthStream 2484 | @param [out] pColorX The X coordinate of the color pixel that overlaps the given depth pixel, measured in pixels 2485 | @param [out] pColorY The Y coordinate of the color pixel that overlaps the given depth pixel, measured in pixels 2486 | */ 2487 | static Status convertDepthToColor(const VideoStream& depthStream, const VideoStream& colorStream, int depthX, int depthY, DepthPixel depthZ, int* pColorX, int* pColorY) 2488 | { 2489 | return (Status)oniCoordinateConverterDepthToColor(depthStream._getHandle(), colorStream._getHandle(), depthX, depthY, depthZ, pColorX, pColorY); 2490 | } 2491 | }; 2492 | 2493 | /** 2494 | * The Recorder class is used to record streams to an ONI file. 2495 | * 2496 | * After a recorder is instantiated, it must be initialized with a specific filename where 2497 | * the recording will be stored. The recorder is then attached to one or more streams. Once 2498 | * this is complete, the recorder can be told to start recording. The recorder will store 2499 | * every frame from every stream to the specified file. Later, this file can be used to 2500 | * initialize a file Device, and used to play back the same data that was recorded. 2501 | * 2502 | * Opening a file device is done by passing its path as the uri to the @ref Device::open() method. 2503 | * 2504 | * @see PlaybackControl for options available to play a reorded file. 2505 | * 2506 | */ 2507 | class Recorder 2508 | { 2509 | public: 2510 | /** 2511 | * Creates a recorder. The recorder is not valid, i.e. @ref isValid() returns 2512 | * false. You must initialize the recorder before use with @ref create(). 2513 | */ 2514 | Recorder() : m_recorder(NULL) 2515 | { 2516 | } 2517 | 2518 | /** 2519 | * Destroys a recorder. This will also stop recording. 2520 | */ 2521 | ~Recorder() 2522 | { 2523 | destroy(); 2524 | } 2525 | 2526 | /** 2527 | * Initializes a recorder. You can initialize the recorder only once. Attempts 2528 | * to intialize more than once will result in an error code being returned. 2529 | * 2530 | * Initialization assigns the recorder to an output file that will be used for 2531 | * recording. Before use, the @ref attach() function must also be used to assign input 2532 | * data to the Recorder. 2533 | * 2534 | * @param [in] fileName The name of a file which will contain the recording. 2535 | * @returns Status code which indicates success or failure of the operation. 2536 | */ 2537 | Status create(const char* fileName) 2538 | { 2539 | if (!isValid()) 2540 | { 2541 | return (Status)oniCreateRecorder(fileName, &m_recorder); 2542 | } 2543 | return STATUS_ERROR; 2544 | } 2545 | 2546 | /** 2547 | * Verifies if the recorder is valid, i.e. if one can record with this recorder. A 2548 | * recorder object is not valid until the @ref create() method is called. 2549 | * 2550 | * @returns true if the recorder has been intialized, false otherwise. 2551 | */ 2552 | bool isValid() const 2553 | { 2554 | return NULL != getHandle(); 2555 | } 2556 | 2557 | /** 2558 | * Attaches a stream to the recorder. Note, this won't start recording, you 2559 | * should explicitly start it using @ref start() method. As soon as the recording 2560 | * process has been started, no more streams can be attached to the recorder. 2561 | * 2562 | * @param [in] stream The stream to be recorded. 2563 | * @param [in] allowLossyCompression [Optional] If this value is true, the recorder might use 2564 | * a lossy compression, which means that when the recording will be played-back, there might 2565 | * be small differences from the original frame. Default value is false. 2566 | */ 2567 | Status attach(VideoStream& stream, bool allowLossyCompression = false) 2568 | { 2569 | if (!isValid() || !stream.isValid()) 2570 | { 2571 | return STATUS_ERROR; 2572 | } 2573 | return (Status)oniRecorderAttachStream( 2574 | m_recorder, 2575 | stream._getHandle(), 2576 | allowLossyCompression); 2577 | } 2578 | 2579 | /** 2580 | * Starts recording. 2581 | * Once this method is called, the recorder will take all subsequent frames from the attached streams 2582 | * and store them in the file. 2583 | * You may not attach additional streams once recording was started. 2584 | */ 2585 | Status start() 2586 | { 2587 | if (!isValid()) 2588 | { 2589 | return STATUS_ERROR; 2590 | } 2591 | return (Status)oniRecorderStart(m_recorder); 2592 | } 2593 | 2594 | /** 2595 | * Stops recording. You may use @ref start() to resume the recording. 2596 | */ 2597 | void stop() 2598 | { 2599 | if (isValid()) 2600 | { 2601 | oniRecorderStop(m_recorder); 2602 | } 2603 | } 2604 | 2605 | /** 2606 | Destroys the recorder object. 2607 | */ 2608 | void destroy() 2609 | { 2610 | if (isValid()) 2611 | { 2612 | oniRecorderDestroy(&m_recorder); 2613 | } 2614 | } 2615 | 2616 | private: 2617 | Recorder(const Recorder&); 2618 | Recorder& operator=(const Recorder&); 2619 | 2620 | /** 2621 | * Returns a handle of this recorder. 2622 | */ 2623 | OniRecorderHandle getHandle() const 2624 | { 2625 | return m_recorder; 2626 | } 2627 | 2628 | 2629 | OniRecorderHandle m_recorder; 2630 | }; 2631 | 2632 | // Implemetation 2633 | Status VideoStream::create(const Device& device, SensorType sensorType) 2634 | { 2635 | OniStreamHandle streamHandle; 2636 | Status rc = (Status)oniDeviceCreateStream(device._getHandle(), (OniSensorType)sensorType, &streamHandle); 2637 | if (rc != STATUS_OK) 2638 | { 2639 | return rc; 2640 | } 2641 | 2642 | m_isOwner = true; 2643 | _setHandle(streamHandle); 2644 | 2645 | if (isPropertySupported(STREAM_PROPERTY_AUTO_WHITE_BALANCE) && isPropertySupported(STREAM_PROPERTY_AUTO_EXPOSURE)) 2646 | { 2647 | m_pCameraSettings = new CameraSettings(this); 2648 | } 2649 | 2650 | return STATUS_OK; 2651 | } 2652 | 2653 | void VideoStream::destroy() 2654 | { 2655 | if (!isValid()) 2656 | { 2657 | return; 2658 | } 2659 | 2660 | if (m_pCameraSettings != NULL) 2661 | { 2662 | delete m_pCameraSettings; 2663 | m_pCameraSettings = NULL; 2664 | } 2665 | 2666 | if (m_stream != NULL) 2667 | { 2668 | if(m_isOwner) 2669 | oniStreamDestroy(m_stream); 2670 | m_stream = NULL; 2671 | } 2672 | } 2673 | 2674 | Status Device::open(const char* uri) 2675 | { 2676 | //If we are not the owners, we stick with our own device 2677 | if(!m_isOwner) 2678 | { 2679 | if(isValid()){ 2680 | return STATUS_OK; 2681 | }else{ 2682 | return STATUS_OUT_OF_FLOW; 2683 | } 2684 | } 2685 | 2686 | OniDeviceHandle deviceHandle; 2687 | Status rc = (Status)oniDeviceOpen(uri, &deviceHandle); 2688 | if (rc != STATUS_OK) 2689 | { 2690 | return rc; 2691 | } 2692 | 2693 | _setHandle(deviceHandle); 2694 | 2695 | return STATUS_OK; 2696 | } 2697 | 2698 | Status Device::_openEx(const char* uri, const char* mode) 2699 | { 2700 | //If we are not the owners, we stick with our own device 2701 | if(!m_isOwner) 2702 | { 2703 | if(isValid()){ 2704 | return STATUS_OK; 2705 | }else{ 2706 | return STATUS_OUT_OF_FLOW; 2707 | } 2708 | } 2709 | 2710 | OniDeviceHandle deviceHandle; 2711 | Status rc = (Status)oniDeviceOpenEx(uri, mode, &deviceHandle); 2712 | if (rc != STATUS_OK) 2713 | { 2714 | return rc; 2715 | } 2716 | 2717 | _setHandle(deviceHandle); 2718 | 2719 | return STATUS_OK; 2720 | } 2721 | 2722 | Status Device::_setHandle(OniDeviceHandle deviceHandle) 2723 | { 2724 | if (m_device == NULL) 2725 | { 2726 | m_device = deviceHandle; 2727 | 2728 | clearSensors(); 2729 | 2730 | oniDeviceGetInfo(m_device, &m_deviceInfo); 2731 | 2732 | if (isFile()) 2733 | { 2734 | m_pPlaybackControl = new PlaybackControl(this); 2735 | } 2736 | 2737 | // Read deviceInfo 2738 | return STATUS_OK; 2739 | } 2740 | 2741 | return STATUS_OUT_OF_FLOW; 2742 | } 2743 | 2744 | void Device::close() 2745 | { 2746 | if (m_pPlaybackControl != NULL) 2747 | { 2748 | delete m_pPlaybackControl; 2749 | m_pPlaybackControl = NULL; 2750 | } 2751 | 2752 | if (m_device != NULL) 2753 | { 2754 | if(m_isOwner) 2755 | { 2756 | oniDeviceClose(m_device); 2757 | } 2758 | 2759 | m_device = NULL; 2760 | } 2761 | } 2762 | 2763 | } 2764 | 2765 | #endif // OPENNI_H 2766 | -------------------------------------------------------------------------------- /include/Openni2/PS1080.h: -------------------------------------------------------------------------------- 1 | /***************************************************************************** 2 | * * 3 | * OpenNI 2.x Alpha * 4 | * Copyright (C) 2012 PrimeSense Ltd. * 5 | * * 6 | * This file is part of OpenNI. * 7 | * * 8 | * Licensed under the Apache License, Version 2.0 (the "License"); * 9 | * you may not use this file except in compliance with the License. * 10 | * You may obtain a copy of the License at * 11 | * * 12 | * http://www.apache.org/licenses/LICENSE-2.0 * 13 | * * 14 | * Unless required by applicable law or agreed to in writing, software * 15 | * distributed under the License is distributed on an "AS IS" BASIS, * 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 17 | * See the License for the specific language governing permissions and * 18 | * limitations under the License. * 19 | * * 20 | *****************************************************************************/ 21 | #ifndef PS1080_H 22 | #define PS1080_H 23 | 24 | #include 25 | 26 | /** The maximum permitted Xiron device name string length. */ 27 | #define XN_DEVICE_MAX_STRING_LENGTH 200 28 | 29 | /* 30 | * private properties of PS1080 devices. 31 | * 32 | * @remarks 33 | * properties structure is 0x1080XXYY where XX is range and YY is code. 34 | * range values: 35 | * F0 - device properties 36 | * E0 - device commands 37 | * 00 - common stream properties 38 | * 10 - depth stream properties 39 | * 20 - color stream properties 40 | */ 41 | enum 42 | { 43 | /*******************************************************************/ 44 | /* Device properties */ 45 | /*******************************************************************/ 46 | 47 | /** unsigned long long (XnSensorUsbInterface) */ 48 | XN_MODULE_PROPERTY_USB_INTERFACE = 0x1080F001, // "UsbInterface" 49 | /** Boolean */ 50 | XN_MODULE_PROPERTY_MIRROR = 0x1080F002, // "Mirror" 51 | /** unsigned long long, get only */ 52 | XN_MODULE_PROPERTY_RESET_SENSOR_ON_STARTUP = 0x1080F004, // "ResetSensorOnStartup" 53 | /** unsigned long long, get only */ 54 | XN_MODULE_PROPERTY_LEAN_INIT = 0x1080F005, // "LeanInit" 55 | /** char[XN_DEVICE_MAX_STRING_LENGTH], get only */ 56 | XN_MODULE_PROPERTY_SERIAL_NUMBER = 0x1080F006, // "ID" 57 | /** XnVersions, get only */ 58 | XN_MODULE_PROPERTY_VERSION = 0x1080F007, // "Version" 59 | /** Boolean */ 60 | XN_MODULE_PROPERTY_FIRMWARE_FRAME_SYNC = 0x1080F008, 61 | /** Boolean */ 62 | XN_MODULE_PROPERTY_HOST_TIMESTAMPS = 0x1080FF77, // "HostTimestamps" 63 | /** Boolean */ 64 | XN_MODULE_PROPERTY_CLOSE_STREAMS_ON_SHUTDOWN = 0x1080FF78, // "CloseStreamsOnShutdown" 65 | /** Integer */ 66 | XN_MODULE_PROPERTY_FIRMWARE_LOG_INTERVAL = 0x1080FF7F, // "FirmwareLogInterval" 67 | /** Boolean */ 68 | XN_MODULE_PROPERTY_PRINT_FIRMWARE_LOG = 0x1080FF80, // "FirmwareLogPrint" 69 | /** Integer */ 70 | XN_MODULE_PROPERTY_FIRMWARE_LOG_FILTER = 0x1080FF81, // "FirmwareLogFilter" 71 | /** String, get only */ 72 | XN_MODULE_PROPERTY_FIRMWARE_LOG = 0x1080FF82, // "FirmwareLog" 73 | /** Integer */ 74 | XN_MODULE_PROPERTY_FIRMWARE_CPU_INTERVAL = 0x1080FF83, // "FirmwareCPUInterval" 75 | /** String, get only */ 76 | XN_MODULE_PROPERTY_PHYSICAL_DEVICE_NAME = 0x1080FF7A, // "PhysicalDeviceName" 77 | /** String, get only */ 78 | XN_MODULE_PROPERTY_VENDOR_SPECIFIC_DATA = 0x1080FF7B, // "VendorSpecificData" 79 | /** String, get only */ 80 | XN_MODULE_PROPERTY_SENSOR_PLATFORM_STRING = 0x1080FF7C, // "SensorPlatformString" 81 | 82 | /*******************************************************************/ 83 | /* Device commands (activated via SetProperty/GetProperty) */ 84 | /*******************************************************************/ 85 | 86 | /** XnInnerParam */ 87 | XN_MODULE_PROPERTY_FIRMWARE_PARAM = 0x1080E001, // "FirmwareParam" 88 | /** unsigned long long, set only */ 89 | XN_MODULE_PROPERTY_RESET = 0x1080E002, // "Reset" 90 | /** XnControlProcessingData */ 91 | XN_MODULE_PROPERTY_IMAGE_CONTROL = 0x1080E003, // "ImageControl" 92 | /** XnControlProcessingData */ 93 | XN_MODULE_PROPERTY_DEPTH_CONTROL = 0x1080E004, // "DepthControl" 94 | /** XnAHBData */ 95 | XN_MODULE_PROPERTY_AHB = 0x1080E005, // "AHB" 96 | /** XnLedState */ 97 | XN_MODULE_PROPERTY_LED_STATE = 0x1080E006, // "LedState" 98 | /** Boolean */ 99 | XN_MODULE_PROPERTY_EMITTER_STATE = 0x1080E007, // "EmitterState" 100 | 101 | /** XnCmosBlankingUnits */ 102 | XN_MODULE_PROPERTY_CMOS_BLANKING_UNITS = 0x1080FF74, // "CmosBlankingUnits" 103 | /** XnCmosBlankingTime */ 104 | XN_MODULE_PROPERTY_CMOS_BLANKING_TIME = 0x1080FF75, // "CmosBlankingTime" 105 | /** XnFlashFileList, get only */ 106 | XN_MODULE_PROPERTY_FILE_LIST = 0x1080FF84, // "FileList" 107 | /** XnParamFlashData, get only */ 108 | XN_MODULE_PROPERTY_FLASH_CHUNK = 0x1080FF85, // "FlashChunk" 109 | XN_MODULE_PROPERTY_FILE = 0x1080FF86, // "FlashFile" 110 | /** Integer */ 111 | XN_MODULE_PROPERTY_DELETE_FILE = 0x1080FF87, // "DeleteFile" 112 | XN_MODULE_PROPERTY_FILE_ATTRIBUTES = 0x1080FF88, // "FileAttributes" 113 | XN_MODULE_PROPERTY_TEC_SET_POINT = 0x1080FF89, // "TecSetPoint" 114 | /** get only */ 115 | XN_MODULE_PROPERTY_TEC_STATUS = 0x1080FF8A, // "TecStatus" 116 | /** get only */ 117 | XN_MODULE_PROPERTY_TEC_FAST_CONVERGENCE_STATUS = 0x1080FF8B, // "TecFastConvergenceStatus" 118 | XN_MODULE_PROPERTY_EMITTER_SET_POINT = 0x1080FF8C, // "EmitterSetPoint" 119 | /** get only */ 120 | XN_MODULE_PROPERTY_EMITTER_STATUS = 0x1080FF8D, // "EmitterStatus" 121 | XN_MODULE_PROPERTY_I2C = 0x1080FF8E, // "I2C" 122 | /** Integer, set only */ 123 | XN_MODULE_PROPERTY_BIST = 0x1080FF8F, // "BIST" 124 | /** XnProjectorFaultData, set only */ 125 | XN_MODULE_PROPERTY_PROJECTOR_FAULT = 0x1080FF90, // "ProjectorFault" 126 | /** Boolean, set only */ 127 | XN_MODULE_PROPERTY_APC_ENABLED = 0x1080FF91, // "APCEnabled" 128 | /** Boolean */ 129 | XN_MODULE_PROPERTY_FIRMWARE_TEC_DEBUG_PRINT = 0x1080FF92, // "TecDebugPrint" 130 | /** Boolean, set only */ 131 | XN_MODULE_PROPERTY_READ_ALL_ENDPOINTS = 0x1080FF93, 132 | 133 | 134 | /*******************************************************************/ 135 | /* Common stream properties */ 136 | /*******************************************************************/ 137 | 138 | /** unsigned long long */ 139 | XN_STREAM_PROPERTY_INPUT_FORMAT = 0x10800001, // "InputFormat" 140 | /** unsigned long long (XnCroppingMode) */ 141 | XN_STREAM_PROPERTY_CROPPING_MODE = 0x10800002, // "CroppingMode" 142 | 143 | /*******************************************************************/ 144 | /* Depth stream properties */ 145 | /*******************************************************************/ 146 | 147 | /** unsigned long long */ 148 | XN_STREAM_PROPERTY_CLOSE_RANGE = 0x1080F003, // "CloseRange" 149 | /** unsigned long long */ 150 | XN_STREAM_PROPERTY_FAST_ZOOM_CROP = 0x1080F009, // "FastZoomCrop" 151 | /** XnPixelRegistration - get only */ 152 | XN_STREAM_PROPERTY_PIXEL_REGISTRATION = 0x10801001, // "PixelRegistration" 153 | /** unsigned long long */ 154 | XN_STREAM_PROPERTY_WHITE_BALANCE_ENABLED = 0x10801002, // "WhiteBalancedEnabled" 155 | /** unsigned long long */ 156 | XN_STREAM_PROPERTY_GAIN = 0x10801003, // "Gain" 157 | /** unsigned long long */ 158 | XN_STREAM_PROPERTY_HOLE_FILTER = 0x10801004, // "HoleFilter" 159 | /** unsigned long long (XnProcessingType) */ 160 | XN_STREAM_PROPERTY_REGISTRATION_TYPE = 0x10801005, // "RegistrationType" 161 | /** XnDepthAGCBin* */ 162 | XN_STREAM_PROPERTY_AGC_BIN = 0x10801006, // "AGCBin" 163 | /** unsigned long long, get only */ 164 | XN_STREAM_PROPERTY_CONST_SHIFT = 0x10801007, // "ConstShift" 165 | /** unsigned long long, get only */ 166 | XN_STREAM_PROPERTY_PIXEL_SIZE_FACTOR = 0x10801008, // "PixelSizeFactor" 167 | /** unsigned long long, get only */ 168 | XN_STREAM_PROPERTY_MAX_SHIFT = 0x10801009, // "MaxShift" 169 | /** unsigned long long, get only */ 170 | XN_STREAM_PROPERTY_PARAM_COEFF = 0x1080100A, // "ParamCoeff" 171 | /** unsigned long long, get only */ 172 | XN_STREAM_PROPERTY_SHIFT_SCALE = 0x1080100B, // "ShiftScale" 173 | /** unsigned long long, get only */ 174 | XN_STREAM_PROPERTY_ZERO_PLANE_DISTANCE = 0x1080100C, // "ZPD" 175 | /** double, get only */ 176 | XN_STREAM_PROPERTY_ZERO_PLANE_PIXEL_SIZE = 0x1080100D, // "ZPPS" 177 | /** double, get only */ 178 | XN_STREAM_PROPERTY_EMITTER_DCMOS_DISTANCE = 0x1080100E, // "LDDIS" 179 | /** double, get only */ 180 | XN_STREAM_PROPERTY_DCMOS_RCMOS_DISTANCE = 0x1080100F, // "DCRCDIS" 181 | /** OniDepthPixel[], get only */ 182 | XN_STREAM_PROPERTY_S2D_TABLE = 0x10801010, // "S2D" 183 | /** unsigned short[], get only */ 184 | XN_STREAM_PROPERTY_D2S_TABLE = 0x10801011, // "D2S" 185 | /** get only */ 186 | XN_STREAM_PROPERTY_DEPTH_SENSOR_CALIBRATION_INFO = 0x10801012, 187 | /** Boolean */ 188 | XN_STREAM_PROPERTY_GMC_MODE = 0x1080FF44, // "GmcMode" 189 | /** Boolean */ 190 | XN_STREAM_PROPERTY_GMC_DEBUG = 0x1080FF45, // "GmcDebug" 191 | /** Boolean */ 192 | XN_STREAM_PROPERTY_WAVELENGTH_CORRECTION = 0x1080FF46, // "WavelengthCorrection" 193 | /** Boolean */ 194 | XN_STREAM_PROPERTY_WAVELENGTH_CORRECTION_DEBUG = 0x1080FF47, // "WavelengthCorrectionDebug" 195 | 196 | /*******************************************************************/ 197 | /* Color stream properties */ 198 | /*******************************************************************/ 199 | /** Integer */ 200 | XN_STREAM_PROPERTY_FLICKER = 0x10802001, // "Flicker" 201 | }; 202 | 203 | typedef enum 204 | { 205 | XN_SENSOR_FW_VER_UNKNOWN = 0, 206 | XN_SENSOR_FW_VER_0_17 = 1, 207 | XN_SENSOR_FW_VER_1_1 = 2, 208 | XN_SENSOR_FW_VER_1_2 = 3, 209 | XN_SENSOR_FW_VER_3_0 = 4, 210 | XN_SENSOR_FW_VER_4_0 = 5, 211 | XN_SENSOR_FW_VER_5_0 = 6, 212 | XN_SENSOR_FW_VER_5_1 = 7, 213 | XN_SENSOR_FW_VER_5_2 = 8, 214 | XN_SENSOR_FW_VER_5_3 = 9, 215 | XN_SENSOR_FW_VER_5_4 = 10, 216 | XN_SENSOR_FW_VER_5_5 = 11, 217 | XN_SENSOR_FW_VER_5_6 = 12, 218 | XN_SENSOR_FW_VER_5_7 = 13, 219 | XN_SENSOR_FW_VER_5_8 = 14, 220 | XN_SENSOR_FW_VER_5_9 = 15, 221 | } XnFWVer; 222 | 223 | typedef enum { 224 | XN_SENSOR_VER_UNKNOWN = 0, 225 | XN_SENSOR_VER_2_0 = 1, 226 | XN_SENSOR_VER_3_0 = 2, 227 | XN_SENSOR_VER_4_0 = 3, 228 | XN_SENSOR_VER_5_0 = 4 229 | } XnSensorVer; 230 | 231 | typedef enum { 232 | XN_SENSOR_HW_VER_UNKNOWN = 0, 233 | XN_SENSOR_HW_VER_FPDB_10 = 1, 234 | XN_SENSOR_HW_VER_CDB_10 = 2, 235 | XN_SENSOR_HW_VER_RD_3 = 3, 236 | XN_SENSOR_HW_VER_RD_5 = 4, 237 | XN_SENSOR_HW_VER_RD1081 = 5, 238 | XN_SENSOR_HW_VER_RD1082 = 6, 239 | XN_SENSOR_HW_VER_RD109 = 7 240 | } XnHWVer; 241 | 242 | typedef enum { 243 | XN_SENSOR_CHIP_VER_UNKNOWN = 0, 244 | XN_SENSOR_CHIP_VER_PS1000 = 1, 245 | XN_SENSOR_CHIP_VER_PS1080 = 2, 246 | XN_SENSOR_CHIP_VER_PS1080A6 = 3 247 | } XnChipVer; 248 | 249 | typedef enum 250 | { 251 | XN_CMOS_TYPE_IMAGE = 0, 252 | XN_CMOS_TYPE_DEPTH = 1, 253 | 254 | XN_CMOS_COUNT 255 | } XnCMOSType; 256 | 257 | typedef enum 258 | { 259 | XN_IO_IMAGE_FORMAT_BAYER = 0, 260 | XN_IO_IMAGE_FORMAT_YUV422 = 1, 261 | XN_IO_IMAGE_FORMAT_JPEG = 2, 262 | XN_IO_IMAGE_FORMAT_JPEG_420 = 3, 263 | XN_IO_IMAGE_FORMAT_JPEG_MONO = 4, 264 | XN_IO_IMAGE_FORMAT_UNCOMPRESSED_YUV422 = 5, 265 | XN_IO_IMAGE_FORMAT_UNCOMPRESSED_BAYER = 6, 266 | XN_IO_IMAGE_FORMAT_UNCOMPRESSED_YUYV = 7, 267 | } XnIOImageFormats; 268 | 269 | typedef enum 270 | { 271 | XN_IO_DEPTH_FORMAT_UNCOMPRESSED_16_BIT = 0, 272 | XN_IO_DEPTH_FORMAT_COMPRESSED_PS = 1, 273 | XN_IO_DEPTH_FORMAT_UNCOMPRESSED_10_BIT = 2, 274 | XN_IO_DEPTH_FORMAT_UNCOMPRESSED_11_BIT = 3, 275 | XN_IO_DEPTH_FORMAT_UNCOMPRESSED_12_BIT = 4, 276 | } XnIODepthFormats; 277 | 278 | typedef enum 279 | { 280 | XN_RESET_TYPE_POWER = 0, 281 | XN_RESET_TYPE_SOFT = 1, 282 | XN_RESET_TYPE_SOFT_FIRST = 2, 283 | } XnParamResetType; 284 | 285 | typedef enum XnSensorUsbInterface 286 | { 287 | XN_SENSOR_USB_INTERFACE_DEFAULT = 0, 288 | XN_SENSOR_USB_INTERFACE_ISO_ENDPOINTS = 1, 289 | XN_SENSOR_USB_INTERFACE_BULK_ENDPOINTS = 2, 290 | XN_SENSOR_USB_INTERFACE_ISO_ENDPOINTS_LOW_DEPTH = 3, 291 | } XnSensorUsbInterface; 292 | 293 | typedef enum XnProcessingType 294 | { 295 | XN_PROCESSING_DONT_CARE = 0, 296 | XN_PROCESSING_HARDWARE = 1, 297 | XN_PROCESSING_SOFTWARE = 2, 298 | } XnProcessingType; 299 | 300 | typedef enum XnCroppingMode 301 | { 302 | XN_CROPPING_MODE_NORMAL = 1, 303 | XN_CROPPING_MODE_INCREASED_FPS = 2, 304 | XN_CROPPING_MODE_SOFTWARE_ONLY = 3, 305 | } XnCroppingMode; 306 | 307 | enum 308 | { 309 | XN_ERROR_STATE_OK = 0, 310 | XN_ERROR_STATE_DEVICE_PROJECTOR_FAULT = 1, 311 | XN_ERROR_STATE_DEVICE_OVERHEAT = 2, 312 | }; 313 | 314 | typedef enum XnFirmwareCroppingMode 315 | { 316 | XN_FIRMWARE_CROPPING_MODE_DISABLED = 0, 317 | XN_FIRMWARE_CROPPING_MODE_NORMAL = 1, 318 | XN_FIRMWARE_CROPPING_MODE_INCREASED_FPS = 2, 319 | } XnFirmwareCroppingMode; 320 | 321 | typedef enum 322 | { 323 | XnLogFilterDebug = 0x0001, 324 | XnLogFilterInfo = 0x0002, 325 | XnLogFilterError = 0x0004, 326 | XnLogFilterProtocol = 0x0008, 327 | XnLogFilterAssert = 0x0010, 328 | XnLogFilterConfig = 0x0020, 329 | XnLogFilterFrameSync = 0x0040, 330 | XnLogFilterAGC = 0x0080, 331 | XnLogFilterTelems = 0x0100, 332 | 333 | XnLogFilterAll = 0xFFFF 334 | } XnLogFilter; 335 | 336 | typedef enum 337 | { 338 | XnFileAttributeReadOnly = 0x8000 339 | } XnFilePossibleAttributes; 340 | 341 | typedef enum 342 | { 343 | XnFlashFileTypeFileTable = 0x00, 344 | XnFlashFileTypeScratchFile = 0x01, 345 | XnFlashFileTypeBootSector = 0x02, 346 | XnFlashFileTypeBootManager = 0x03, 347 | XnFlashFileTypeCodeDownloader = 0x04, 348 | XnFlashFileTypeMonitor = 0x05, 349 | XnFlashFileTypeApplication = 0x06, 350 | XnFlashFileTypeFixedParams = 0x07, 351 | XnFlashFileTypeDescriptors = 0x08, 352 | XnFlashFileTypeDefaultParams = 0x09, 353 | XnFlashFileTypeImageCmos = 0x0A, 354 | XnFlashFileTypeDepthCmos = 0x0B, 355 | XnFlashFileTypeAlgorithmParams = 0x0C, 356 | XnFlashFileTypeReferenceQVGA = 0x0D, 357 | XnFlashFileTypeReferenceVGA = 0x0E, 358 | XnFlashFileTypeMaintenance = 0x0F, 359 | XnFlashFileTypeDebugParams = 0x10, 360 | XnFlashFileTypePrimeProcessor = 0x11, 361 | XnFlashFileTypeGainControl = 0x12, 362 | XnFlashFileTypeRegistartionParams = 0x13, 363 | XnFlashFileTypeIDParams = 0x14, 364 | XnFlashFileTypeSensorTECParams = 0x15, 365 | XnFlashFileTypeSensorAPCParams = 0x16, 366 | XnFlashFileTypeSensorProjectorFaultParams = 0x17, 367 | XnFlashFileTypeProductionFile = 0x18, 368 | XnFlashFileTypeUpgradeInProgress = 0x19, 369 | XnFlashFileTypeWavelengthCorrection = 0x1A, 370 | XnFlashFileTypeGMCReferenceOffset = 0x1B, 371 | XnFlashFileTypeSensorNESAParams = 0x1C, 372 | XnFlashFileTypeSensorFault = 0x1D, 373 | XnFlashFileTypeVendorData = 0x1E, 374 | } XnFlashFileType; 375 | 376 | typedef enum XnBistType 377 | { 378 | //Auto tests 379 | XN_BIST_IMAGE_CMOS = 1 << 0, 380 | XN_BIST_IR_CMOS = 1 << 1, 381 | XN_BIST_POTENTIOMETER = 1 << 2, 382 | XN_BIST_FLASH = 1 << 3, 383 | XN_BIST_FULL_FLASH = 1 << 4, 384 | XN_BIST_PROJECTOR_TEST_MASK = 1 << 5, 385 | XN_BIST_TEC_TEST_MASK = 1 << 6, 386 | 387 | // Manual tests 388 | XN_BIST_NESA_TEST_MASK = 1 << 7, 389 | XN_BIST_NESA_UNLIMITED_TEST_MASK = 1 << 8, 390 | 391 | // Mask of all the auto tests 392 | XN_BIST_ALL = (0xFFFFFFFF & ~XN_BIST_NESA_TEST_MASK & ~XN_BIST_NESA_UNLIMITED_TEST_MASK), 393 | 394 | } XnBistType; 395 | 396 | typedef enum XnBistError 397 | { 398 | XN_BIST_RAM_TEST_FAILURE = 1 << 0, 399 | XN_BIST_IR_CMOS_CONTROL_BUS_FAILURE = 1 << 1, 400 | XN_BIST_IR_CMOS_DATA_BUS_FAILURE = 1 << 2, 401 | XN_BIST_IR_CMOS_BAD_VERSION = 1 << 3, 402 | XN_BIST_IR_CMOS_RESET_FAILUE = 1 << 4, 403 | XN_BIST_IR_CMOS_TRIGGER_FAILURE = 1 << 5, 404 | XN_BIST_IR_CMOS_STROBE_FAILURE = 1 << 6, 405 | XN_BIST_COLOR_CMOS_CONTROL_BUS_FAILURE = 1 << 7, 406 | XN_BIST_COLOR_CMOS_DATA_BUS_FAILURE = 1 << 8, 407 | XN_BIST_COLOR_CMOS_BAD_VERSION = 1 << 9, 408 | XN_BIST_COLOR_CMOS_RESET_FAILUE = 1 << 10, 409 | XN_BIST_FLASH_WRITE_LINE_FAILURE = 1 << 11, 410 | XN_BIST_FLASH_TEST_FAILURE = 1 << 12, 411 | XN_BIST_POTENTIOMETER_CONTROL_BUS_FAILURE = 1 << 13, 412 | XN_BIST_POTENTIOMETER_FAILURE = 1 << 14, 413 | XN_BIST_AUDIO_TEST_FAILURE = 1 << 15, 414 | XN_BIST_PROJECTOR_TEST_LD_FAIL = 1 << 16, 415 | XN_BIST_PROJECTOR_TEST_LD_FAILSAFE_TRIG_FAIL = 1 << 17, 416 | XN_BIST_PROJECTOR_TEST_FAILSAFE_HIGH_FAIL = 1 << 18, 417 | XN_BIST_PROJECTOR_TEST_FAILSAFE_LOW_FAIL = 1 << 19, 418 | XN_TEC_TEST_HEATER_CROSSED = 1 << 20, 419 | XN_TEC_TEST_HEATER_DISCONNETED = 1 << 21, 420 | XN_TEC_TEST_TEC_CROSSED = 1 << 22, 421 | XN_TEC_TEST_TEC_FAULT = 1 << 23, 422 | } XnBistError; 423 | 424 | typedef enum XnDepthCMOSType 425 | { 426 | XN_DEPTH_CMOS_NONE = 0, 427 | XN_DEPTH_CMOS_MT9M001 = 1, 428 | XN_DEPTH_CMOS_AR130 = 2, 429 | } XnDepthCMOSType; 430 | 431 | typedef enum XnImageCMOSType 432 | { 433 | XN_IMAGE_CMOS_NONE = 0, 434 | XN_IMAGE_CMOS_MT9M112 = 1, 435 | XN_IMAGE_CMOS_MT9D131 = 2, 436 | XN_IMAGE_CMOS_MT9M114 = 3, 437 | } XnImageCMOSType; 438 | 439 | #define XN_IO_MAX_I2C_BUFFER_SIZE 10 440 | #define XN_MAX_LOG_SIZE (6*1024) 441 | 442 | #pragma pack (push, 1) 443 | 444 | typedef struct XnSDKVersion 445 | { 446 | unsigned char nMajor; 447 | unsigned char nMinor; 448 | unsigned char nMaintenance; 449 | unsigned short nBuild; 450 | } XnSDKVersion; 451 | 452 | typedef struct { 453 | unsigned char nMajor; 454 | unsigned char nMinor; 455 | unsigned short nBuild; 456 | unsigned int nChip; 457 | unsigned short nFPGA; 458 | unsigned short nSystemVersion; 459 | 460 | XnSDKVersion SDK; 461 | 462 | XnHWVer HWVer; 463 | XnFWVer FWVer; 464 | XnSensorVer SensorVer; 465 | XnChipVer ChipVer; 466 | } XnVersions; 467 | 468 | typedef struct 469 | { 470 | unsigned short nParam; 471 | unsigned short nValue; 472 | } XnInnerParamData; 473 | 474 | typedef struct XnDepthAGCBin 475 | { 476 | unsigned short nBin; 477 | unsigned short nMin; 478 | unsigned short nMax; 479 | } XnDepthAGCBin; 480 | 481 | typedef struct XnControlProcessingData 482 | { 483 | unsigned short nRegister; 484 | unsigned short nValue; 485 | } XnControlProcessingData; 486 | 487 | typedef struct XnAHBData 488 | { 489 | unsigned int nRegister; 490 | unsigned int nValue; 491 | unsigned int nMask; 492 | } XnAHBData; 493 | 494 | typedef struct XnPixelRegistration 495 | { 496 | unsigned int nDepthX; 497 | unsigned int nDepthY; 498 | uint16_t nDepthValue; 499 | unsigned int nImageXRes; 500 | unsigned int nImageYRes; 501 | unsigned int nImageX; // out 502 | unsigned int nImageY; // out 503 | } XnPixelRegistration; 504 | 505 | typedef struct XnLedState 506 | { 507 | uint16_t nLedID; 508 | uint16_t nState; 509 | } XnLedState; 510 | 511 | typedef struct XnCmosBlankingTime 512 | { 513 | XnCMOSType nCmosID; 514 | float nTimeInMilliseconds; 515 | uint16_t nNumberOfFrames; 516 | } XnCmosBlankingTime; 517 | 518 | typedef struct XnCmosBlankingUnits 519 | { 520 | XnCMOSType nCmosID; 521 | uint16_t nUnits; 522 | uint16_t nNumberOfFrames; 523 | } XnCmosBlankingUnits; 524 | 525 | typedef struct XnI2CWriteData 526 | { 527 | uint16_t nBus; 528 | uint16_t nSlaveAddress; 529 | uint16_t cpWriteBuffer[XN_IO_MAX_I2C_BUFFER_SIZE]; 530 | uint16_t nWriteSize; 531 | } XnI2CWriteData; 532 | 533 | typedef struct XnI2CReadData 534 | { 535 | uint16_t nBus; 536 | uint16_t nSlaveAddress; 537 | uint16_t cpReadBuffer[XN_IO_MAX_I2C_BUFFER_SIZE]; 538 | uint16_t cpWriteBuffer[XN_IO_MAX_I2C_BUFFER_SIZE]; 539 | uint16_t nReadSize; 540 | uint16_t nWriteSize; 541 | } XnI2CReadData; 542 | 543 | typedef struct XnTecData 544 | { 545 | uint16_t m_SetPointVoltage; 546 | uint16_t m_CompensationVoltage; 547 | uint16_t m_TecDutyCycle; //duty cycle on heater/cooler 548 | uint16_t m_HeatMode; //TRUE - heat, FALSE - cool 549 | int32_t m_ProportionalError; 550 | int32_t m_IntegralError; 551 | int32_t m_DerivativeError; 552 | uint16_t m_ScanMode; //0 - crude, 1 - precise 553 | } XnTecData; 554 | 555 | typedef struct XnTecFastConvergenceData 556 | { 557 | int16_t m_SetPointTemperature; // set point temperature in celsius, 558 | // scaled by factor of 100 (extra precision) 559 | int16_t m_MeasuredTemperature; // measured temperature in celsius, 560 | // scaled by factor of 100 (extra precision) 561 | int32_t m_ProportionalError; // proportional error in system clocks 562 | int32_t m_IntegralError; // integral error in system clocks 563 | int32_t m_DerivativeError; // derivative error in system clocks 564 | uint16_t m_ScanMode; // 0 - initial, 1 - crude, 2 - precise 565 | uint16_t m_HeatMode; // 0 - idle, 1 - heat, 2 - cool 566 | uint16_t m_TecDutyCycle; // duty cycle on heater/cooler in percents 567 | uint16_t m_TemperatureRange; // 0 - cool, 1 - room, 2 - warm 568 | } XnTecFastConvergenceData; 569 | 570 | typedef struct XnEmitterData 571 | { 572 | uint16_t m_State; //idle, calibrating 573 | uint16_t m_SetPointVoltage; //this is what should be written to the XML 574 | uint16_t m_SetPointClocks; //target cross duty cycle 575 | uint16_t m_PD_Reading; //current cross duty cycle in system clocks(high time) 576 | uint16_t m_EmitterSet; //duty cycle on emitter set in system clocks (high time). 577 | uint16_t m_EmitterSettingLogic; //TRUE = positive logic, FALSE = negative logic 578 | uint16_t m_LightMeasureLogic; //TRUE - positive logic, FALSE - negative logic 579 | uint16_t m_IsAPCEnabled; 580 | uint16_t m_EmitterSetStepSize; // in MilliVolts 581 | uint16_t m_ApcTolerance; // in system clocks (only valid up till v5.2) 582 | uint16_t m_SubClocking; //in system clocks (only valid from v5.3) 583 | uint16_t m_Precision; // (only valid from v5.3) 584 | } XnEmitterData; 585 | 586 | typedef struct 587 | { 588 | uint16_t nId; 589 | uint16_t nAttribs; 590 | } XnFileAttributes; 591 | 592 | typedef struct 593 | { 594 | uint32_t nOffset; 595 | const char* strFileName; 596 | uint16_t nAttributes; 597 | } XnParamFileData; 598 | 599 | typedef struct 600 | { 601 | uint32_t nOffset; 602 | uint32_t nSize; 603 | unsigned char* pData; 604 | } XnParamFlashData; 605 | 606 | typedef struct { 607 | uint16_t nId; 608 | uint16_t nType; 609 | uint32_t nVersion; 610 | uint32_t nOffset; 611 | uint32_t nSize; 612 | uint16_t nCrc; 613 | uint16_t nAttributes; 614 | uint16_t nReserve; 615 | } XnFlashFile; 616 | 617 | typedef struct 618 | { 619 | XnFlashFile* pFiles; 620 | uint16_t nFiles; 621 | } XnFlashFileList; 622 | 623 | typedef struct XnProjectorFaultData 624 | { 625 | uint16_t nMinThreshold; 626 | uint16_t nMaxThreshold; 627 | int32_t bProjectorFaultEvent; 628 | } XnProjectorFaultData; 629 | 630 | typedef struct XnBist 631 | { 632 | uint32_t nTestsMask; 633 | uint32_t nFailures; 634 | } XnBist; 635 | 636 | #pragma pack (pop) 637 | 638 | #endif // PS1080_H 639 | -------------------------------------------------------------------------------- /include/Openni2/PSLink.h: -------------------------------------------------------------------------------- 1 | /***************************************************************************** 2 | * * 3 | * OpenNI 2.x Alpha * 4 | * Copyright (C) 2012 PrimeSense Ltd. * 5 | * * 6 | * This file is part of OpenNI. * 7 | * * 8 | * Licensed under the Apache License, Version 2.0 (the "License"); * 9 | * you may not use this file except in compliance with the License. * 10 | * You may obtain a copy of the License at * 11 | * * 12 | * http://www.apache.org/licenses/LICENSE-2.0 * 13 | * * 14 | * Unless required by applicable law or agreed to in writing, software * 15 | * distributed under the License is distributed on an "AS IS" BASIS, * 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 17 | * See the License for the specific language governing permissions and * 18 | * limitations under the License. * 19 | * * 20 | *****************************************************************************/ 21 | #ifndef PSLINK_H 22 | #define PSLINK_H 23 | 24 | #include 25 | 26 | enum 27 | { 28 | /**** Device properties ****/ 29 | 30 | /* XnDetailedVersion, get only */ 31 | LINK_PROP_FW_VERSION = 0x12000001, // "FWVersion" 32 | /* Int, get only */ 33 | LINK_PROP_VERSIONS_INFO_COUNT = 0x12000002, // "VersionsInfoCount" 34 | /* General - array - XnComponentVersion * count elements, get only */ 35 | LINK_PROP_VERSIONS_INFO = 0x12000003, // "VersionsInfo" 36 | /* Int - 0 means off, 1 means on. */ 37 | LINK_PROP_PROJECTOR_ACTIVE = 0x12000008, // "ProjectorActive" 38 | /* String. Set only */ 39 | LINK_PROP_PRESET_FILE = 0x1200000a, // "PresetFile" 40 | /* Get only */ 41 | LINK_PROP_BOOT_STATUS = 0x1200000b, 42 | /* Int - system specific units */ 43 | LINK_PROP_PROJECTOR_POWER = 0x1200000c, 44 | /* SetAccActive*/ 45 | LINK_PROP_ACC_ENABLED = 0x1200000d, 46 | /* SetAccActive*/ 47 | LINK_PROP_VDD_ENABLED = 0x1200000e, 48 | /* SetAccActive*/ 49 | LINK_PROP_PERIODIC_BIST_ENABLED = 0x1200000f, 50 | 51 | 52 | /**** Device commands ****/ 53 | /* XnCommandGetFwStreams */ 54 | LINK_COMMAND_GET_FW_STREAM_LIST = 0x1200F001, 55 | /* XnCommandCreateStream */ 56 | LINK_COMMAND_CREATE_FW_STREAM = 0x1200F002, 57 | /* XnCommandDestroyStream */ 58 | LINK_COMMAND_DESTROY_FW_STREAM = 0x1200F003, 59 | /* XnCommandStartStream */ 60 | LINK_COMMAND_START_FW_STREAM = 0x1200F004, 61 | /* XnCommandStopStream */ 62 | LINK_COMMAND_STOP_FW_STREAM = 0x1200F005, 63 | /* XnCommandGetFwStreamVideoModeList */ 64 | LINK_COMMAND_GET_FW_STREAM_VIDEO_MODE_LIST = 0x1200F006, 65 | /* XnCommandSetFwStreamVideoMode */ 66 | LINK_COMMAND_SET_FW_STREAM_VIDEO_MODE = 0x1200F007, 67 | /* XnCommandGetFwStreamVideoMode */ 68 | LINK_COMMAND_GET_FW_STREAM_VIDEO_MODE = 0x1200F008, 69 | /* XnCommandSetProjectorPulse */ 70 | LINK_COMMAND_SET_PROJECTOR_PULSE = 0x1200F009, 71 | /* No args */ 72 | LINK_COMMAND_DISABLE_PROJECTOR_PULSE = 0x1200F00a, 73 | 74 | /**** Stream properties ****/ 75 | /* Int. 1 - Shifts 9.3, 2 - Grayscale16, 3 - YUV422, 4 - Bayer8 */ 76 | LINK_PROP_PIXEL_FORMAT = 0x12001001, // "PixelFormat" 77 | /* Int. 0 - None, 1 - 8z, 2 - 16z, 3 - 24z, 4 - 6-bit, 5 - 10-bit, 6 - 11-bit, 7 - 12-bit */ 78 | LINK_PROP_COMPRESSION = 0x12001002, // "Compression" 79 | 80 | /**** Depth Stream properties ****/ 81 | /* Real, get only */ 82 | LINK_PROP_DEPTH_SCALE = 0x1200000b, // "DepthScale" 83 | /* Int, get only */ 84 | LINK_PROP_MAX_SHIFT = 0x12002001, // "MaxShift" 85 | /* Int, get only */ 86 | LINK_PROP_ZERO_PLANE_DISTANCE = 0x12002002, // "ZPD" 87 | /* Int, get only */ 88 | LINK_PROP_CONST_SHIFT = 0x12002003, // "ConstShift" 89 | /* Int, get only */ 90 | LINK_PROP_PARAM_COEFF = 0x12002004, // "ParamCoeff" 91 | /* Int, get only */ 92 | LINK_PROP_SHIFT_SCALE = 0x12002005, // "ShiftScale" 93 | /* Real, get only */ 94 | LINK_PROP_ZERO_PLANE_PIXEL_SIZE = 0x12002006, // "ZPPS" 95 | /* Real, get only */ 96 | LINK_PROP_ZERO_PLANE_OUTPUT_PIXEL_SIZE = 0x12002007, // "ZPOPS" 97 | /* Real, get only */ 98 | LINK_PROP_EMITTER_DEPTH_CMOS_DISTANCE = 0x12002008, // "LDDIS" 99 | /* General - array - MaxShift * XnDepthPixel elements, get only */ 100 | LINK_PROP_SHIFT_TO_DEPTH_TABLE = 0x12002009, // "S2D" 101 | /* General - array - MaxDepth * uint16_t elements, get only */ 102 | LINK_PROP_DEPTH_TO_SHIFT_TABLE = 0x1200200a, // "D2S" 103 | }; 104 | 105 | typedef enum XnFileZone 106 | { 107 | XN_ZONE_FACTORY = 0x0000, 108 | XN_ZONE_UPDATE = 0x0001, 109 | } XnFileZone; 110 | 111 | typedef enum XnBootErrorCode 112 | { 113 | XN_BOOT_OK = 0x0000, 114 | XN_BOOT_BAD_CRC = 0x0001, 115 | XN_BOOT_UPLOAD_IN_PROGRESS = 0x0002, 116 | XN_BOOT_FW_LOAD_FAILED = 0x0003, 117 | } XnBootErrorCode; 118 | 119 | typedef enum XnFwStreamType 120 | { 121 | XN_FW_STREAM_TYPE_COLOR = 0x0001, 122 | XN_FW_STREAM_TYPE_IR = 0x0002, 123 | XN_FW_STREAM_TYPE_SHIFTS = 0x0003, 124 | XN_FW_STREAM_TYPE_AUDIO = 0x0004, 125 | XN_FW_STREAM_TYPE_DY = 0x0005, 126 | XN_FW_STREAM_TYPE_LOG = 0x0008, 127 | } XnFwStreamType; 128 | 129 | typedef enum XnFwPixelFormat 130 | { 131 | XN_FW_PIXEL_FORMAT_NONE = 0x0000, 132 | XN_FW_PIXEL_FORMAT_SHIFTS_9_3 = 0x0001, 133 | XN_FW_PIXEL_FORMAT_GRAYSCALE16 = 0x0002, 134 | XN_FW_PIXEL_FORMAT_YUV422 = 0x0003, 135 | XN_FW_PIXEL_FORMAT_BAYER8 = 0x0004, 136 | } XnFwPixelFormat; 137 | 138 | typedef enum XnFwCompressionType 139 | { 140 | XN_FW_COMPRESSION_NONE = 0x0000, 141 | XN_FW_COMPRESSION_8Z = 0x0001, 142 | XN_FW_COMPRESSION_16Z = 0x0002, 143 | XN_FW_COMPRESSION_24Z = 0x0003, 144 | XN_FW_COMPRESSION_6_BIT_PACKED = 0x0004, 145 | XN_FW_COMPRESSION_10_BIT_PACKED = 0x0005, 146 | XN_FW_COMPRESSION_11_BIT_PACKED = 0x0006, 147 | XN_FW_COMPRESSION_12_BIT_PACKED = 0x0007, 148 | } XnFwCompressionType; 149 | 150 | #pragma pack (push, 1) 151 | 152 | #define XN_MAX_VERSION_MODIFIER_LENGTH 16 153 | typedef struct XnDetailedVersion 154 | { 155 | uint8_t m_nMajor; 156 | uint8_t m_nMinor; 157 | uint16_t m_nMaintenance; 158 | uint32_t m_nBuild; 159 | char m_strModifier[XN_MAX_VERSION_MODIFIER_LENGTH]; 160 | } XnDetailedVersion; 161 | 162 | typedef struct XnBootStatus 163 | { 164 | XnFileZone zone; 165 | XnBootErrorCode errorCode; 166 | } XnBootStatus; 167 | 168 | typedef struct XnFwStreamInfo 169 | { 170 | XnFwStreamType type; 171 | char creationInfo[80]; 172 | } XnFwStreamInfo; 173 | 174 | typedef struct XnFwStreamVideoMode 175 | { 176 | uint32_t m_nXRes; 177 | uint32_t m_nYRes; 178 | uint32_t m_nFPS; 179 | XnFwPixelFormat m_nPixelFormat; 180 | XnFwCompressionType m_nCompression; 181 | } XnFwStreamVideoMode; 182 | 183 | typedef struct XnCommandGetFwStreamList 184 | { 185 | uint32_t count; // in: number of allocated elements in streams array. out: number of written elements in the array 186 | XnFwStreamInfo* streams; 187 | } XnCommandGetFwStreamList; 188 | 189 | typedef struct XnCommandCreateStream 190 | { 191 | XnFwStreamType type; 192 | const char* creationInfo; 193 | uint32_t id; // out 194 | } XnCommandCreateStream; 195 | 196 | typedef struct XnCommandDestroyStream 197 | { 198 | uint32_t id; 199 | } XnCommandDestroyStream; 200 | 201 | typedef struct XnCommandStartStream 202 | { 203 | uint32_t id; 204 | } XnCommandStartStream; 205 | 206 | typedef struct XnCommandStopStream 207 | { 208 | uint32_t id; 209 | } XnCommandStopStream; 210 | 211 | typedef struct XnCommandGetFwStreamVideoModeList 212 | { 213 | int streamId; 214 | uint32_t count; // in: number of allocated elements in videoModes array. out: number of written elements in the array 215 | XnFwStreamVideoMode* videoModes; 216 | } XnCommandGetFwStreamVideoModeList; 217 | 218 | typedef struct XnCommandSetFwStreamVideoMode 219 | { 220 | int streamId; 221 | XnFwStreamVideoMode videoMode; 222 | } XnCommandSetFwStreamVideoMode; 223 | 224 | typedef struct XnCommandGetFwStreamVideoMode 225 | { 226 | int streamId; 227 | XnFwStreamVideoMode videoMode; // out 228 | } XnCommandGetFwStreamVideoMode; 229 | 230 | typedef struct XnCommandSetProjectorPulse 231 | { 232 | float delay; //start delay - delay time before start pulse 233 | float width; //DC - duty cycle - the percentage of the pulse out of total cycle 234 | float cycle; 235 | } XnCommandSetProjectorPulse; 236 | 237 | #pragma pack (pop) 238 | 239 | #endif // PSLINK_H 240 | -------------------------------------------------------------------------------- /include/kinect2_tracker.hpp: -------------------------------------------------------------------------------- 1 | /** 2 | * \ref kinect2_tracker.hpp 3 | * 4 | * \date 20160322 5 | * \author Stephen Reddish 6 | * \version 1.0 7 | * \bug 8 | * It will be quicker to work out the vec3s before passing them to the transform publisher 9 | * Solve bodgey if torso elses 10 | * \copyright GNU Public License. 11 | */ 12 | 13 | #ifndef KINECT2_TRACKER_HPP_ 14 | #define KINECT2_TRACKER_HPP_ 15 | 16 | // ROS Dependencies 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | 24 | // Self-defined messages 25 | #include 26 | #include 27 | #include 28 | 29 | #include 30 | #include 31 | #include 32 | #include 33 | #include 34 | #include 35 | #include "NiTE.h" 36 | #include "visualization.hpp" 37 | #include 38 | #include 39 | #include 40 | #include 41 | #include 42 | 43 | #ifndef ALPHA 44 | #define ALPHA 1/256 45 | #endif 46 | 47 | #define MAX_USERS 10 48 | 49 | #define USER_MESSAGE(msg) \ 50 | {printf("[%08llu] User #%d:\t%s\n",ts, user.getId(),msg);} 51 | 52 | /// Joint map 53 | typedef std::map JointMap; 54 | 55 | /** 56 | * Class \ref kinect2_tracker. This class can track the skeleton of people and returns joints as a TF stream, 57 | */ 58 | class k2_tracker 59 | { 60 | public: 61 | /** 62 | * Constructor 63 | */ 64 | k2_tracker() : 65 | it_(nh_) 66 | { 67 | 68 | // Get some parameters from the server 69 | ros::NodeHandle pnh("~"); 70 | if (!pnh.getParam("tf_prefix", tf_prefix_)) 71 | { 72 | ROS_FATAL("tf_prefix not found on Param Server! Maybe you should add it to your launch file!"); 73 | ros::shutdown(); 74 | return; 75 | } 76 | if (!pnh.getParam("relative_frame", relative_frame_)) 77 | { 78 | ROS_FATAL("relative_frame not found on Param Server! Maybe you should add it to your launch file!"); 79 | ros::shutdown(); 80 | return; 81 | } 82 | 83 | // Initialize OpenNI 84 | if (openni::OpenNI::initialize() != openni::STATUS_OK) 85 | { 86 | ROS_FATAL("OpenNI initial error"); 87 | ros::shutdown(); 88 | return; 89 | } 90 | 91 | // Open the device 92 | if (devDevice_.open(openni::ANY_DEVICE) != openni::STATUS_OK) 93 | { 94 | ROS_FATAL("Can't Open Device"); 95 | ros::shutdown(); 96 | return; 97 | } 98 | ROS_INFO("Device opened"); 99 | 100 | // Initialize the tracker 101 | nite::NiTE::initialize(); 102 | 103 | // user tracker registration 104 | niteRc_ = userTracker_.create(); 105 | if (niteRc_ != nite::STATUS_OK) 106 | { 107 | ROS_FATAL("Couldn't create user tracker"); 108 | ros::shutdown(); 109 | return; 110 | } 111 | 112 | /////////////////////////////////////////////////// 113 | // Create color stream 114 | 115 | if( vsColorStream.create( devDevice_, openni::SENSOR_COLOR ) == openni::STATUS_OK ) 116 | { 117 | // set video mode 118 | openni::VideoMode mMode; 119 | //mMode.setResolution( 640, 480 ); 120 | mMode.setResolution( 640, 480 ); 121 | mMode.setFps( 30 ); 122 | mMode.setPixelFormat( openni::PIXEL_FORMAT_RGB888 ); 123 | 124 | if( vsColorStream.setVideoMode( mMode) != openni::STATUS_OK ) 125 | { 126 | ROS_INFO("Can't apply videomode\n"); 127 | //cout << "Can't apply VideoMode: " << OpenNI::getExtendedError() << endl; 128 | } 129 | 130 | // RGB image registration 131 | if( devDevice_.isImageRegistrationModeSupported( openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR ) ) 132 | { 133 | cout << "Depth Frame Working"; 134 | // devDevice_.setImageRegistrationMode( openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR ); 135 | } 136 | vsColorStream.setMirroringEnabled(false); 137 | } 138 | else 139 | { 140 | ROS_ERROR("Can't create color stream on device: ");// << OpenNI::getExtendedError() << endl; 141 | //cerr << "Can't create color stream on device: " << OpenNI::getExtendedError() << endl; 142 | return; 143 | } 144 | vsColorStream.start(); 145 | 146 | if( vsDepthStream.create( devDevice_, openni::SENSOR_DEPTH ) == openni::STATUS_OK ) 147 | { 148 | // set depth video mode 149 | openni::VideoMode mMode; 150 | mMode.setResolution( 512, 424 ); 151 | mMode.setFps( 30 ); 152 | mMode.setPixelFormat( openni::PIXEL_FORMAT_DEPTH_1_MM ); 153 | 154 | if( vsDepthStream.setVideoMode( mMode) != openni::STATUS_OK ) 155 | { 156 | ROS_INFO("Can't apply depth videomode\n"); 157 | } 158 | 159 | // depth image registration 160 | if( devDevice_.isImageRegistrationModeSupported( openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR ) ) 161 | { 162 | cout << "Depth Frame Working"; 163 | } 164 | vsDepthStream.setMirroringEnabled(false); 165 | 166 | } 167 | else 168 | { 169 | ROS_ERROR("Can't create depth stream on device: "); 170 | 171 | return; 172 | } 173 | vsDepthStream.start(); 174 | ///////////////////////////////////////////////////////// 175 | 176 | // Initialize the users IDs publisher 177 | userPub_ = nh_.advertise("/people_skeleton", 1); 178 | pointPub_ = nh_.advertise("/people_points", 1); 179 | pointVizPub_ = nh_.advertise("/people_points_viz", 1); 180 | imagePub_ = it_.advertise("/kinect_rgb", 1); 181 | DepthimagePub_ = it_.advertise("/kinect_depth", 3); 182 | // userPub_ = nh_.advertise("/people", 1); 183 | rate_ = new ros::Rate(100); 184 | 185 | } 186 | /** 187 | * Destructor 188 | */ 189 | ~k2_tracker() 190 | { 191 | nite::NiTE::shutdown(); 192 | } 193 | 194 | /** 195 | * Spinner!!! 196 | */ 197 | void spinner() 198 | { 199 | // Broadcast the joint frames (if they exist) 200 | this->getSkeleton(); 201 | this->getRGB(); 202 | this->getDepth(); 203 | rate_->sleep(); 204 | } 205 | 206 | /** 207 | * Update the Users State 208 | * @param user: the user 209 | * @param ts: timestamp 210 | */ 211 | void updateUserState(const nite::UserData& user, unsigned long long ts) 212 | { 213 | if (user.isNew()) 214 | USER_MESSAGE("New") 215 | else if (user.isVisible() && !g_visibleUsers_[user.getId()]) 216 | USER_MESSAGE("Visible") 217 | else if (!user.isVisible() && g_visibleUsers_[user.getId()]) 218 | USER_MESSAGE("Out of Scene") 219 | else if (user.isLost()) 220 | USER_MESSAGE("Lost") 221 | 222 | g_visibleUsers_[user.getId()] = user.isVisible(); 223 | 224 | if (g_skeletonStates_[user.getId()] != user.getSkeleton().getState()) 225 | { 226 | switch (g_skeletonStates_[user.getId()] = user.getSkeleton().getState()) 227 | { 228 | case nite::SKELETON_NONE: 229 | USER_MESSAGE("Stopped tracking.") 230 | break; 231 | case nite::SKELETON_CALIBRATING: 232 | USER_MESSAGE("Calibrating...") 233 | break; 234 | case nite::SKELETON_TRACKED: 235 | USER_MESSAGE("Tracking!") 236 | break; 237 | case nite::SKELETON_CALIBRATION_ERROR_NOT_IN_POSE: 238 | case nite::SKELETON_CALIBRATION_ERROR_HANDS: 239 | case nite::SKELETON_CALIBRATION_ERROR_LEGS: 240 | case nite::SKELETON_CALIBRATION_ERROR_HEAD: 241 | case nite::SKELETON_CALIBRATION_ERROR_TORSO: 242 | USER_MESSAGE("Calibration Failed... :-|") 243 | break; 244 | } 245 | } 246 | } 247 | 248 | /** 249 | * Publish the joints over the TF stream 250 | * @param j_name: joint name 251 | * @param j: the joint 252 | * @param r: relative joint (joint j connects to) 253 | * @param uid: user's ID 254 | */ 255 | void publishJointTF(std::string j_name, nite::SkeletonJoint j, std::string r_name, nite::SkeletonJoint r, int uid) 256 | { 257 | if (j.getPositionConfidence() > 0.0) 258 | { 259 | tf::Vector3 currentVec3 = tf::Vector3(j.getPosition().x / 1000.0, j.getPosition().y / 1000.0, j.getPosition().z / 1000.0); 260 | tf::Transform transform; 261 | if (j_name != "torso") 262 | { 263 | tf::Vector3 rVec3 = tf::Vector3(r.getPosition().x / 1000.0, r.getPosition().y / 1000.0, r.getPosition().z / 1000.0); 264 | transform.setOrigin(currentVec3 - rVec3); 265 | transform.setRotation(tf::Quaternion(0,0,0,1)); 266 | } 267 | else 268 | { 269 | transform.setOrigin(currentVec3); 270 | transform.setRotation(tf::Quaternion(0,0,0,1)); 271 | } 272 | 273 | std::stringstream j_frame_id_stream; //stringstream of frame id values 274 | std::string j_frame_id; // string of the stringstream 275 | j_frame_id_stream << "/" << tf_prefix_ << "/user_" << uid << "/" << j_name; 276 | j_frame_id = j_frame_id_stream.str(); 277 | 278 | std::stringstream r_frame_id_stream; //stringstream of frame id values 279 | std::string r_frame_id; // string of the stringstream 280 | r_frame_id_stream << "/" << tf_prefix_ << "/user_" << uid << "/" << r_name; 281 | r_frame_id = r_frame_id_stream.str(); 282 | 283 | if (j_name == "torso") 284 | { 285 | 286 | tfBroadcast_.sendTransform(tf::StampedTransform(transform, ros::Time::now(), relative_frame_, j_frame_id)); 287 | } 288 | else 289 | { 290 | 291 | tfBroadcast_.sendTransform(tf::StampedTransform(transform, ros::Time::now(), r_frame_id, j_frame_id)); 292 | } 293 | } 294 | return; 295 | } 296 | 297 | //Publish the calibration tf_frame as the cross product of the shoulder vectors 298 | // This function publishes the calibration_space opposite the shoulders of the user 299 | void publishCalibrationOriginTF(nite::SkeletonJoint skelTorso, nite::SkeletonJoint skelRshoulder, nite::SkeletonJoint skelLshoulder, int uid) 300 | { 301 | if (skelTorso.getPositionConfidence() > 0.0) 302 | { 303 | tf::Transform calibrationOriginTransform; 304 | tf::Transform torsoTransform; 305 | 306 | tf::Vector3 torsoVec3 = tf::Vector3(skelTorso.getPosition().x / 1000.0, skelTorso.getPosition().y / 1000.0, skelTorso.getPosition().z / 1000.0); 307 | torsoTransform.setOrigin(torsoVec3); 308 | torsoTransform.setRotation(tf::Quaternion(0,0,0,1)); 309 | 310 | tf::Vector3 RshoulderVec3 = tf::Vector3(skelRshoulder.getPosition().x / 1000.0, skelRshoulder.getPosition().y / 1000.0, skelRshoulder.getPosition().z / 1000.0); //create a vector for the right shoulder 311 | RshoulderVec3 = (RshoulderVec3 - torsoVec3); //vector is the difference of the two 312 | 313 | tf::Vector3 LshoulderVec3 = tf::Vector3(skelLshoulder.getPosition().x / 1000.0, skelLshoulder.getPosition().y / 1000.0, skelLshoulder.getPosition().z / 1000.0); //create a vector for the left shoulder 314 | LshoulderVec3 = (LshoulderVec3 - torsoVec3); 315 | tf::Vector3 calibrationOriginVec3 = RshoulderVec3.cross(LshoulderVec3); 316 | 317 | //give the calibration origin some length 318 | // calibrationOriginVec3 = calibrationOriginVec3 * 20; 319 | calibrationOriginTransform.setOrigin(calibrationOriginVec3);// set the x,y,z coordinates of the calibration transform frame 320 | 321 | Eigen::Vector3d eigencalibrationOriginVec3; 322 | tf::vectorTFToEigen(calibrationOriginVec3, eigencalibrationOriginVec3); //conversion of tf:Vec3 to eigen 323 | Eigen::Vector3d eigenTorsoVec3; 324 | tf::vectorTFToEigen(torsoVec3, eigenTorsoVec3); //conversion of torse tf:vec3 to eigen 325 | Eigen::Quaterniond eigen_calibrationOrigenQuaternion; 326 | eigen_calibrationOrigenQuaternion.setFromTwoVectors(eigencalibrationOriginVec3, eigenTorsoVec3); 327 | tf::Quaternion tf_calibrationOriginQuaternion; 328 | tf::quaternionEigenToTF(eigen_calibrationOrigenQuaternion, tf_calibrationOriginQuaternion); 329 | 330 | calibrationOriginTransform.setRotation(tf_calibrationOriginQuaternion); 331 | 332 | std::stringstream calibration_frame_id_stream; //stringstream of frame id values 333 | std::string calibration_frame_id; // string of the stringstream 334 | calibration_frame_id_stream << "/" << tf_prefix_ << "/user_" << uid << "/calibrationOrigin"; 335 | calibration_frame_id = calibration_frame_id_stream.str(); 336 | 337 | std::stringstream r_frame_id_stream; //stringstream of frame id values 338 | std::string r_frame_id; // string of the stringstream 339 | r_frame_id_stream << "/" << tf_prefix_ << "/user_" << uid << "/torso"; 340 | r_frame_id = r_frame_id_stream.str(); 341 | 342 | tfBroadcast_.sendTransform(tf::StampedTransform(calibrationOriginTransform, ros::Time::now(), r_frame_id, calibration_frame_id)); 343 | } 344 | return; 345 | } 346 | 347 | /** 348 | * Get the RGB feed and publish it to ROS 349 | */ 350 | void getRGB() 351 | { 352 | openni::VideoFrameRef vfColorFrame; 353 | cv::Mat mImageBGR; 354 | if( vsColorStream.readFrame( &vfColorFrame ) == openni::STATUS_OK ) 355 | { 356 | // convert data to OpenCV format 357 | const cv::Mat mImageRGB( vfColorFrame.getHeight(), vfColorFrame.getWidth(), CV_8UC3, const_cast( vfColorFrame.getData() ) ); 358 | // convert form RGB to BGR 359 | cv::cvtColor( mImageRGB, mImageBGR, CV_RGB2BGR ); 360 | vfColorFrame.release(); 361 | 362 | sensor_msgs::ImagePtr msg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", mImageBGR).toImageMsg(); 363 | imagePub_.publish(msg); 364 | } 365 | 366 | } 367 | 368 | /** 369 | * Get the Depth feed and publish it to ROS 370 | */ 371 | void getDepth() 372 | { 373 | openni::VideoFrameRef vfDepthFrame; 374 | cv::Mat mImageBGR; 375 | if( vsDepthStream.readFrame( &vfDepthFrame ) == openni::STATUS_OK ) 376 | { 377 | // convert depth data to OpenCV format 378 | const cv::Mat mImageDepth( vfDepthFrame.getHeight(), vfDepthFrame.getWidth(), CV_16UC1, const_cast( vfDepthFrame.getData() ) ); 379 | vfDepthFrame.release(); 380 | 381 | sensor_msgs::ImagePtr depth_msg = cv_bridge::CvImage(std_msgs::Header(), "mono16", mImageDepth).toImageMsg(); 382 | DepthimagePub_.publish(depth_msg); 383 | } 384 | } 385 | /** 386 | * Get the skeleton's joints and the users IDs and make them all relative to the Torso joint 387 | */ 388 | void getSkeleton() 389 | { 390 | // skeleton_tracker::user_IDs ids; 391 | kinect2_tracker::user_IDs ids; 392 | kinect2_tracker::user_points points; 393 | 394 | niteRc_ = userTracker_.readFrame(&userTrackerFrame_); 395 | if (niteRc_ != nite::STATUS_OK) 396 | { 397 | printf("Get next frame failed\n"); 398 | return; 399 | } 400 | 401 | // Get all the users 402 | const nite::Array& users = userTrackerFrame_.getUsers(); 403 | 404 | // Get the skeleton for every user 405 | for (int i = 0; i < users.getSize(); ++i) 406 | { 407 | const nite::UserData& user = users[i]; 408 | updateUserState(user, userTrackerFrame_.getTimestamp()); 409 | if (user.isNew()) 410 | { 411 | userTracker_.startSkeletonTracking(user.getId()); 412 | } 413 | else if (user.getSkeleton().getState() == nite::SKELETON_TRACKED) 414 | { 415 | JointMap named_joints; 416 | 417 | named_joints["torso"] = (user.getSkeleton().getJoint(nite::JOINT_TORSO));// this value is Joint_name, position & orientation & confidence, userid 418 | named_joints["left_hip"] = (user.getSkeleton().getJoint(nite::JOINT_LEFT_HIP)); 419 | named_joints["right_hip"] = (user.getSkeleton().getJoint(nite::JOINT_RIGHT_HIP)); 420 | named_joints["left_knee"] = (user.getSkeleton().getJoint(nite::JOINT_LEFT_KNEE)); 421 | named_joints["right_knee"] = (user.getSkeleton().getJoint(nite::JOINT_RIGHT_KNEE)); 422 | named_joints["left_foot"] = (user.getSkeleton().getJoint(nite::JOINT_LEFT_FOOT)); 423 | named_joints["right_foot"] = (user.getSkeleton().getJoint(nite::JOINT_RIGHT_FOOT)); 424 | named_joints["neck"] = (user.getSkeleton().getJoint(nite::JOINT_NECK)); 425 | named_joints["head"] = (user.getSkeleton().getJoint(nite::JOINT_HEAD)); 426 | named_joints["left_shoulder"] = (user.getSkeleton().getJoint(nite::JOINT_LEFT_SHOULDER)); 427 | named_joints["right_shoulder"] = (user.getSkeleton().getJoint(nite::JOINT_RIGHT_SHOULDER)); 428 | named_joints["left_elbow"] = (user.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW)); 429 | named_joints["right_elbow"] = (user.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW)); 430 | named_joints["left_hand"] = (user.getSkeleton().getJoint(nite::JOINT_LEFT_HAND)); 431 | named_joints["right_hand"] = (user.getSkeleton().getJoint(nite::JOINT_RIGHT_HAND)); 432 | //Publish the joint (name, niteConstruct, ConnectedJoint name, niteConstruct, User 433 | publishJointTF("torso", named_joints["torso"], "torso", named_joints["torso"], user.getId()); 434 | publishJointTF("left_hip", named_joints["left_hip"], "torso", named_joints["torso"], user.getId()); 435 | publishJointTF("right_hip", named_joints["right_hip"], "torso", named_joints["torso"], user.getId()); 436 | publishJointTF("left_knee", named_joints["left_knee"], "left_hip", named_joints["left_hip"], user.getId()); 437 | publishJointTF("right_knee", named_joints["right_knee"], "right_hip", named_joints["right_hip"], user.getId()); 438 | publishJointTF("left_foot", named_joints["left_foot"], "left_knee", named_joints["left_knee"], user.getId()); 439 | publishJointTF("right_foot", named_joints["right_foot"], "right_knee", named_joints["right_knee"], user.getId()); 440 | publishJointTF("neck", named_joints["neck"], "torso", named_joints["torso"], user.getId()); 441 | publishJointTF("head", named_joints["head"], "neck", named_joints["neck"], user.getId()); 442 | publishJointTF("left_shoulder", named_joints["left_shoulder"], "torso", named_joints["torso"], user.getId()); 443 | publishJointTF("right_shoulder", named_joints["right_shoulder"], "torso", named_joints["torso"], user.getId()); 444 | publishJointTF("left_elbow", named_joints["left_elbow"], "left_shoulder", named_joints["left_shoulder"], user.getId()); 445 | publishJointTF("right_elbow", named_joints["right_elbow"], "right_shoulder", named_joints["right_shoulder"], user.getId()); 446 | publishJointTF("left_hand", named_joints["left_hand"], "left_elbow", named_joints["left_elbow"], user.getId()); 447 | publishJointTF("right_hand", named_joints["right_hand"], "right_elbow", named_joints["right_elbow"], user.getId()); 448 | 449 | //publishes the funny normal vector from the users chest 450 | publishCalibrationOriginTF(named_joints["torso"], named_joints["left_shoulder"], named_joints["right_shoulder"], user.getId()); 451 | 452 | // Add the user's ID 453 | ids.users.push_back(int(user.getId())); 454 | } 455 | if(user.isVisible()){ 456 | // Adding center of mass of users 457 | points.users.push_back(int(user.getId())); 458 | nite::Point3f user_point = user.getCenterOfMass(); 459 | nite::BoundingBox boundingBox = user.getBoundingBox(); 460 | geometry_msgs::PointStamped p; 461 | kinect2_tracker::bounding_box bbox; 462 | bbox.min.point.x = boundingBox.min.x / 1000; 463 | bbox.min.point.y = boundingBox.min.y / 1000; 464 | bbox.min.point.z = boundingBox.min.z / 1000; 465 | bbox.max.point.x = boundingBox.max.x / 1000; 466 | bbox.max.point.y = boundingBox.max.y / 1000; 467 | bbox.max.point.z = boundingBox.max.z / 1000; 468 | p.header.stamp = ros::Time::now(); 469 | p.header.frame_id = relative_frame_; 470 | bbox.min.header = p.header; 471 | bbox.max.header = p.header; 472 | p.point.x = user_point.x / 1000; 473 | p.point.y = user_point.y / 1000; 474 | p.point.z = user_point.z / 1000; 475 | points.people_points.push_back(p); 476 | points.boxes.push_back(bbox); 477 | } 478 | } 479 | // Publish the users' IDs 480 | userPub_.publish(ids); 481 | pointPub_.publish(points); 482 | pointVizPub_.publish(getMarkers(points.people_points, relative_frame_)); 483 | } 484 | 485 | /// ROS NodeHandle 486 | ros::NodeHandle nh_; 487 | 488 | bool g_visibleUsers_[MAX_USERS] = {false}; 489 | nite::SkeletonState g_skeletonStates_[MAX_USERS] = {nite::SKELETON_NONE}; 490 | 491 | /// Image transport 492 | image_transport::ImageTransport it_; 493 | std::string tf_prefix_, relative_frame_; 494 | 495 | /// Frame broadcaster 496 | tf::TransformBroadcaster tfBroadcast_; 497 | 498 | /// The openni device 499 | openni::Device devDevice_; 500 | openni::VideoStream vsColorStream; 501 | openni::VideoStream vsDepthStream; 502 | 503 | /// Some NITE stuff 504 | nite::UserTracker userTracker_; 505 | nite::Status niteRc_; 506 | nite::UserTrackerFrameRef userTrackerFrame_; 507 | 508 | /// Users IDs publisher 509 | ros::Publisher userPub_; 510 | ros::Publisher pointPub_; 511 | ros::Publisher pointVizPub_; 512 | ros::Publisher boxPub_; 513 | 514 | //Image publisher 515 | // image_transport::ImageTransport it_; 516 | image_transport::Publisher imagePub_; 517 | image_transport::Publisher DepthimagePub_; 518 | /// Image message 519 | sensor_msgs::ImagePtr msg_; 520 | sensor_msgs::ImagePtr depth_msg_; 521 | /// Node rate 522 | ros::Rate* rate_; 523 | 524 | } 525 | ; 526 | 527 | #endif /* KINECT2_TRACKER_HPP_ */ 528 | -------------------------------------------------------------------------------- /include/visualization.hpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | using namespace std; 6 | 7 | visualization_msgs::Marker getMarkers(vector &pointArr, string &frame){ 8 | 9 | visualization_msgs::Marker sphere_list; 10 | sphere_list.header.frame_id= frame; 11 | sphere_list.header.stamp= ros::Time::now(); 12 | sphere_list.ns= "spheres"; 13 | sphere_list.action= visualization_msgs::Marker::ADD; 14 | sphere_list.pose.orientation.w= 1.0; 15 | 16 | sphere_list.id = 0; 17 | 18 | sphere_list.type = visualization_msgs::Marker::SPHERE_LIST; 19 | 20 | 21 | // POINTS markers use x and y scale for width/height respectively 22 | sphere_list.scale.x = 0.1; 23 | sphere_list.scale.y = 0.1; 24 | sphere_list.scale.z = 0.1; 25 | 26 | // Points are red 27 | sphere_list.color.r = 1.0f; 28 | sphere_list.color.a = 1.0; 29 | 30 | for (auto&& p : pointArr){ 31 | sphere_list.points.push_back(p.point); 32 | } 33 | 34 | return sphere_list; 35 | 36 | } -------------------------------------------------------------------------------- /launch/tracker.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /msg/bounding_box.msg: -------------------------------------------------------------------------------- 1 | geometry_msgs/PointStamped min 2 | geometry_msgs/PointStamped max -------------------------------------------------------------------------------- /msg/user_IDs.msg: -------------------------------------------------------------------------------- 1 | uint8[] users -------------------------------------------------------------------------------- /msg/user_points.msg: -------------------------------------------------------------------------------- 1 | # Message storing the points of users. 2 | # Header header 3 | uint8[] users 4 | geometry_msgs/PointStamped[] people_points 5 | bounding_box[] boxes -------------------------------------------------------------------------------- /package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | kinect2_tracker 4 | 0.0.1 5 | The kinect2_tracker package 6 | 7 | Stephen 8 | BSD 9 | 10 | Stephen Reddish 11 | 12 | catkin 13 | cmake_modules 14 | cv_bridge 15 | geometry_msgs 16 | visualization_msgs 17 | image_transport 18 | kdl_conversions 19 | pcl_conversions 20 | pcl_ros 21 | roscpp 22 | roslib 23 | tf 24 | tf_conversions 25 | glut 26 | libxmu-dev 27 | libxi-dev 28 | message_generation 29 | 30 | cmake_modules 31 | cv_bridge 32 | geometry_msgs 33 | visualization_msgs 34 | image_transport 35 | kdl_conversions 36 | pcl_conversions 37 | pcl_ros 38 | roscpp 39 | roslib 40 | tf 41 | tf_conversions 42 | message_runtime 43 | 44 | 45 | -------------------------------------------------------------------------------- /setup_nite.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | ln -s ~/package_ws/NiTE-Linux-x64-2.2/Samples/Bin/NiTE2/ ~/.ros/NiTE2 3 | -------------------------------------------------------------------------------- /src/kinect2_tracker_node.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * * 3 | * PrimeSense NiTE 2.0 - User Viewer Sample * 4 | * Copyright (C) 2012 PrimeSense Ltd. * 5 | * * 6 | *******************************************************************************/ 7 | 8 | #include "kinect2_tracker.hpp" 9 | 10 | int main(int argc, char** argv) 11 | { 12 | //initiate ros skeleton_tracker 13 | ros::init(argc, argv, "skeleton_tracker"); 14 | 15 | k2_tracker* skeleton_tracker = new k2_tracker(); 16 | 17 | while (ros::ok()) 18 | { 19 | skeleton_tracker->spinner(); 20 | } 21 | 22 | delete skeleton_tracker; 23 | 24 | } 25 | --------------------------------------------------------------------------------