├── .gitignore ├── OpenNI.ini ├── OpenNI2 └── Drivers │ ├── PS1080.ini │ ├── PSLink.ini │ ├── libOniFile.so │ ├── libPS1080.so │ └── libPSLink.so ├── README.md ├── libOpenNI2.jni.so ├── libOpenNI2.so ├── org.openni.jar ├── primesense ├── __init__.py ├── __init__.pyc ├── _nite2.py ├── _openni2.py ├── _openni2.pyc ├── nite2.py ├── openni2.py ├── openni2.pyc ├── utils.py └── utils.pyc ├── testPythonOpenni.py ├── testPythonOpenniFull.py └── testPythonOpenniQuarter.py /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | 3 | -------------------------------------------------------------------------------- /OpenNI.ini: -------------------------------------------------------------------------------- 1 | [Log] 2 | ; 0 - Verbose; 1 - Info; 2 - Warning; 3 - Error. Default - None 3 | Verbosity=3 4 | LogToConsole=0 5 | LogToFile=0 6 | 7 | [Device] 8 | ;Override="" 9 | 10 | [Drivers] 11 | ; Location of the drivers specified by a relative path based on OpenNI's shared library or an absolute path. 12 | ; Path separator "/" can be used to be portable for any platforms. 13 | ; Default - OpenNI2/Drivers 14 | ;Repository=OpenNI2/Drivers 15 | -------------------------------------------------------------------------------- /OpenNI2/Drivers/PS1080.ini: -------------------------------------------------------------------------------- 1 | ;---------------- Sensor Default Configuration ------------------- 2 | [Device] 3 | ; Mirroring. 0 - Off (default), 1 - On 4 | ;Mirror=1 5 | 6 | ; FrameSync. 0 - Off (default), 1 - On 7 | ;FrameSync=1 8 | 9 | ; Stream Data Timestamps. 0 - milliseconds, 1 - microseconds (default) 10 | ;HighResTimestamps=1 11 | 12 | ; Stream Data Timestamps Source. 0 - Firmware (default), 1 - Host 13 | ;HostTimestamps=0 14 | 15 | ; A filter for the firmware log. Default is determined by firmware. 16 | ;FirmwareLogFilter=0 17 | 18 | ; Automatic firmare log retrieval. 0 - Off (default), or the number of milliseconds between log retrievals operations. 19 | ;FirmwareLogInterval=1000 20 | 21 | ; Print firmware log to console when automatic firmware log retrieval is on. 0 - Off (default), 1 - On 22 | ;FirmwareLogPrint=1 23 | 24 | ; Is APC enabled. 0 - Off, 1 - On (default) 25 | ;APCEnabled=1 26 | 27 | ; USB interface to be used. 0 - FW Default, 1 - ISO endpoints (default on Windows), 2 - BULK endpoints (default on Linux/Mac/Android machines), 3 - ISO endpoints for low-bandwidth depth 28 | ;UsbInterface=2 29 | 30 | [Depth] 31 | ; Output format. 100 - 1mm depth values (default), 102 - u9.2 Shift values. 32 | ;OutputFormat=102 33 | 34 | ; Is stream mirrored. 0 - Off, 1 - On 35 | ;Mirror=1 36 | 37 | ; 0 - QVGA, 1 - VGA, 4 - QQVGA. Default: Arm - 4, other platforms - 0 38 | ;Resolution=1 39 | 40 | ; Frames per second (default is 30) 41 | ;FPS=30 42 | 43 | ; Min depth cutoff. 0-10000 mm (default is 0) 44 | ;MinDepthValue=0 45 | 46 | ; Max depth cutoff. 0-10000 mm (default is 10000) 47 | ;MaxDepthValue=10000 48 | 49 | ; Input format. 0 - Uncompressed 16-bit, 1 - PS Compression, 3 - Packed 11-bit, 4 - Packed 12-bit. Default: Arm - 4, other platforms - 3 50 | ;InputFormat=1 51 | 52 | ; Registration. 0 - Off (default), 1 - On 53 | ;Registration=1 54 | 55 | ; Registration Type. 0 - Don't care (default), 1 - use hardware accelaration, 2 - perform in software 56 | ;RegistrationType=0 57 | 58 | ; Hole Filler. 0 - Off, 1 - On (default) 59 | ;HoleFilter=1 60 | 61 | ; White Balance. 0 - Off, 1 - On (default) 62 | ;WhiteBalancedEnabled=1 63 | 64 | ; Gain. 0-50 (0 - Auto, 1 - Min., 50 - Max.). Default value is set by firmware. 65 | ;Gain=0 66 | 67 | ; Close Range Mode. 0 - Off (default), 1 - On 68 | ;CloseRange=0 69 | 70 | ; GMC Mode. 0 - Off, 1 - On (default) 71 | ;GMCMode=0 72 | 73 | ; GMC Debug. 0 - Off (default), 1 - On 74 | ;GMCDebug=1 75 | 76 | ; Depth Auto Gain Region-of-Interest. Default values are set by firmware. 77 | ;DepthAGCBin0MinDepth=500 78 | ;DepthAGCBin0MaxDepth=800 79 | ;DepthAGCBin1MinDepth=1500 80 | ;DepthAGCBin1MaxDepth=1800 81 | ;DepthAGCBin2MinDepth=2500 82 | ;DepthAGCBin2MaxDepth=2800 83 | ;DepthAGCBin3MinDepth=3500 84 | ;DepthAGCBin3MaxDepth=3800 85 | 86 | ; Wavelength Correction Mechanism. 0 - Off (default), 1 - On 87 | ;WavelengthCorrection=1 88 | 89 | ; Wavelength Correction debug info. 0 - Off (default), 1 - On 90 | ;WavelengthCorrectionDebug=1 91 | 92 | ; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only 93 | ;CroppingMode=1 94 | 95 | ; Cropping area 96 | [Depth.Cropping] 97 | ;OffsetX=0 98 | ;OffsetY=0 99 | ;SizeX=320 100 | ;SizeY=240 101 | ;Enabled=1 102 | 103 | [Image] 104 | ; Output format. 200 - RGB888 (default), 201 - YUV422, 202 - Gray8 (2.0 MP only), 205 - YUYV 105 | ;OutputFormat=200 106 | 107 | ; Is stream mirrored. 0 - Off, 1 - On 108 | ;Mirror=1 109 | 110 | ; 0 - QVGA (default), 1 - VGA, 2 - SXGA (1.3MP), 3 - UXGA (2.0MP), 14 - 720p, 15 - 1280x960 111 | ;Resolution=1 112 | 113 | ; Frames per second (default is 30) 114 | ;FPS=30 115 | 116 | ; Input format. 0 - Compressed 8-bit BAYER (1.3MP or 2.0MP only), 1 - Compressed YUV422 (default in BULK), 2 - Jpeg, 5 - Uncompressed YUV422 (default in ISO), 6 - Uncompressed 8-bit BAYER (1.3MP or 2.0MP only), 7 - Uncompressed YUYV 117 | ;InputFormat=5 118 | 119 | ; Anti Flicker. 0 - Off (default), 50 - 50Hz, 60 - 60 Hz. 120 | ;Flicker=50 121 | 122 | ; Image quality when using Jpeg. 1-10 (1 - Lowest, 10 - Highest (default)) 123 | ;Quality=10 124 | 125 | ; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only 126 | ;CroppingMode=1 127 | 128 | ; Cropping area 129 | [Image.Cropping] 130 | ;OffsetX=0 131 | ;OffsetY=0 132 | ;SizeX=320 133 | ;SizeY=240 134 | ;Enabled=1 135 | 136 | [IR] 137 | ; Output format. 200 - RGB888, 203 - Grayscale 16-bit (default) 138 | ;OutputFormat=203 139 | 140 | ; Is stream mirrored. 0 - Off, 1 - On 141 | ;Mirror=1 142 | 143 | ; 0 - QVGA (default), 1 - VGA, 2 - SXGA(1.3MP) 144 | ;Resolution=1 145 | 146 | ; Frames per second (default is 30) 147 | ;FPS=30 148 | 149 | ; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only 150 | ;CroppingMode=1 151 | 152 | ; Cropping area 153 | [IR.Cropping] 154 | ;OffsetX=0 155 | ;OffsetY=0 156 | ;SizeX=320 157 | ;SizeY=240 158 | ;Enabled=1 159 | -------------------------------------------------------------------------------- /OpenNI2/Drivers/PSLink.ini: -------------------------------------------------------------------------------- 1 | ;---------------- PSLink Driver Default Configuration ------------------- 2 | 3 | [Device] 4 | ; USB interface to be used. 0 - FW Default, 1 - ISO endpoints (default on Windows), 2 - BULK endpoints (default on Linux/Mac/Android machines) 5 | ;UsbInterface=2 6 | 7 | [Depth] 8 | ; Allows dumping all frames to files. 0 - Off (default), 1 - On 9 | ;DumpData=1 10 | 11 | ; Allow flipping the frame horizontally. 0 - Off, 1 - On (default) 12 | ;Mirror=0 13 | 14 | ; Compression of the data passed from device to host. 0 - None, 2 - 16z, 6 - 11-bit packed, 7 - 12-bit packed. Default is set by the firmware 15 | ;Compression=2 16 | 17 | [Depth.VideoMode] 18 | ; Pixel Format. 100 - Depth 1 mm, 101 - Depth 100 um, 102 - Shifts 9.2, 103 - Shifts 9.3 19 | ;PixelFormat=100 20 | ; Requested X resolution 21 | ;XResolution=320 22 | ; Requested Y resolution 23 | ;YResolution=240 24 | ; Requested FPS 25 | ;FPS=30 26 | 27 | [IR] 28 | ; Allows dumping all frames to files. 0 - Off (default), 1 - On 29 | ;DumpData=1 30 | 31 | ; Allow flipping the frame horizontally. 0 - Off, 1 - On (default) 32 | ;Mirror=0 33 | 34 | ; Compression of the data passed from device to host. 0 - None, 5 - 10-bit packed. Default is set by the firmware 35 | ;Compression=5 36 | 37 | [IR.VideoMode] 38 | ; Pixel Format. 200 - RGB888, 202 - Grayscale 8-bit, 203 - Grayscale 16-bit 39 | ;PixelFormat=200 40 | ; Requested X resolution 41 | ;XResolution=320 42 | ; Requested Y resolution 43 | ;YResolution=240 44 | ; Requested FPS 45 | ;FPS=30 46 | -------------------------------------------------------------------------------- /OpenNI2/Drivers/libOniFile.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/leezl/OpenNi-Python/4a0731a1f76011260b0c044922e8d20ccb61adb6/OpenNI2/Drivers/libOniFile.so -------------------------------------------------------------------------------- /OpenNI2/Drivers/libPS1080.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/leezl/OpenNi-Python/4a0731a1f76011260b0c044922e8d20ccb61adb6/OpenNI2/Drivers/libPS1080.so -------------------------------------------------------------------------------- /OpenNI2/Drivers/libPSLink.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/leezl/OpenNi-Python/4a0731a1f76011260b0c044922e8d20ccb61adb6/OpenNI2/Drivers/libPSLink.so -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | OpenNI Python tests 2 | ==================== 3 | This is a small test section for playing with the python bindings for OpenNI. 4 | 5 | Currently on my system: 6 | Ubuntu 12.04 7 | OpenNI 2.2 8 | NiTE 2.2 9 | Python Bindings (primsense) 2.2 10 | Carmine 1.08 Camera 11 | 12 | My computer is an old HP Pavilion dv6. I could not get the Camera working on a Lenovo Y500: After fixing the permissions errors (had to be root), I had timeout issues which may still be usb setting problems. 13 | 14 | Oddly this error went away after several restarts... 15 | 16 | New Problem: Arch Linux install 17 | 18 | Strange Issues: 19 | ===================== 20 | OpenNI and Python: 21 | * Color Streams appear strange it they are stopped, and then restarted. 22 | * Color and Depth Syncing cannot be activated? 23 | -> the function takes no parameters, but complains in the OpenNI code that a aparameter is wrong 24 | * Color and Depth can be read at the same time in Quarter size, (320,240, :), or if depth is (640, 480, 1), but not if color is (640, 480, 3), regardless of what depth is. Note: Color can be read at this size by itself. 25 | -------------------------------------------------------------------------------- /libOpenNI2.jni.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/leezl/OpenNi-Python/4a0731a1f76011260b0c044922e8d20ccb61adb6/libOpenNI2.jni.so -------------------------------------------------------------------------------- /libOpenNI2.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/leezl/OpenNi-Python/4a0731a1f76011260b0c044922e8d20ccb61adb6/libOpenNI2.so -------------------------------------------------------------------------------- /org.openni.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/leezl/OpenNi-Python/4a0731a1f76011260b0c044922e8d20ccb61adb6/org.openni.jar -------------------------------------------------------------------------------- /primesense/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/leezl/OpenNi-Python/4a0731a1f76011260b0c044922e8d20ccb61adb6/primesense/__init__.py -------------------------------------------------------------------------------- /primesense/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/leezl/OpenNi-Python/4a0731a1f76011260b0c044922e8d20ccb61adb6/primesense/__init__.pyc -------------------------------------------------------------------------------- /primesense/_nite2.py: -------------------------------------------------------------------------------- 1 | # Auto-generated file; do not edit directly 2 | # Tue Jul 16 14:20:09 2013 3 | import sys 4 | 5 | import ctypes 6 | from primesense.utils import CEnum, UnloadedDLL 7 | 8 | TRUE = 1 9 | FALSE = 0 10 | ONI_MAX_STR = 256 11 | ONI_MAX_SENSORS = 10 12 | NITE_JOINT_COUNT = 15 13 | NITE_POSE_COUNT = 2 14 | NITE_VERSION_MAJOR = 2 15 | NITE_VERSION_MINOR = 2 16 | NITE_VERSION_MAINTENANCE = 0 17 | NITE_VERSION_BUILD = 10 18 | NITE_VERSION = ( NITE_VERSION_MAJOR * 100000000 + NITE_VERSION_MINOR * 1000000 + NITE_VERSION_MAINTENANCE * 10000 + NITE_VERSION_BUILD ) 19 | 20 | def _get_calling_conv(*args): 21 | if sys.platform == 'win32': 22 | return ctypes.WINFUNCTYPE(*args) 23 | else: 24 | return ctypes.CFUNCTYPE(*args) 25 | 26 | from _openni2 import OniStatus 27 | from _openni2 import OniSensorType 28 | from _openni2 import OniPixelFormat 29 | from _openni2 import OniDeviceState 30 | from _openni2 import OniImageRegistrationMode 31 | class _anon_enum_5(CEnum): 32 | _names_ = {'ONI_TIMEOUT_NONE': 0, 'ONI_TIMEOUT_FOREVER': -1} 33 | _values_ = {0: 'ONI_TIMEOUT_NONE', -1: 'ONI_TIMEOUT_FOREVER'} 34 | ONI_TIMEOUT_NONE = 0 35 | ONI_TIMEOUT_FOREVER = -1 36 | 37 | ONI_TIMEOUT_NONE = _anon_enum_5.ONI_TIMEOUT_NONE 38 | ONI_TIMEOUT_FOREVER = _anon_enum_5.ONI_TIMEOUT_FOREVER 39 | 40 | from _openni2 import OniCallbackHandleImpl 41 | from _openni2 import OniVersion 42 | from _openni2 import OniVideoMode 43 | from _openni2 import OniSensorInfo 44 | from _openni2 import OniDeviceInfo 45 | class _OniDevice(ctypes.Structure): 46 | 47 | def __repr__(self): 48 | return '_OniDevice()' % () 49 | 50 | class _OniStream(ctypes.Structure): 51 | 52 | def __repr__(self): 53 | return '_OniStream()' % () 54 | 55 | class _OniRecorder(ctypes.Structure): 56 | 57 | def __repr__(self): 58 | return '_OniRecorder()' % () 59 | 60 | from _openni2 import OniFrame 61 | from _openni2 import OniDeviceCallbacks 62 | from _openni2 import OniCropping 63 | from _openni2 import OniRGB888Pixel 64 | from _openni2 import OniYUV422DoublePixel 65 | from _openni2 import OniSeek 66 | class NiteJointType(CEnum): 67 | _names_ = {'NITE_JOINT_RIGHT_ELBOW': 5, 'NITE_JOINT_LEFT_ELBOW': 4, 'NITE_JOINT_RIGHT_KNEE': 12, 'NITE_JOINT_RIGHT_HAND': 7, 'NITE_JOINT_RIGHT_SHOULDER': 3, 'NITE_JOINT_HEAD': 0, 'NITE_JOINT_LEFT_HAND': 6, 'NITE_JOINT_LEFT_SHOULDER': 2, 'NITE_JOINT_LEFT_KNEE': 11, 'NITE_JOINT_TORSO': 8, 'NITE_JOINT_NECK': 1, 'NITE_JOINT_LEFT_HIP': 9, 'NITE_JOINT_RIGHT_HIP': 10, 'NITE_JOINT_LEFT_FOOT': 13, 'NITE_JOINT_RIGHT_FOOT': 14} 68 | _values_ = {0: 'NITE_JOINT_HEAD', 1: 'NITE_JOINT_NECK', 2: 'NITE_JOINT_LEFT_SHOULDER', 3: 'NITE_JOINT_RIGHT_SHOULDER', 4: 'NITE_JOINT_LEFT_ELBOW', 5: 'NITE_JOINT_RIGHT_ELBOW', 6: 'NITE_JOINT_LEFT_HAND', 7: 'NITE_JOINT_RIGHT_HAND', 8: 'NITE_JOINT_TORSO', 9: 'NITE_JOINT_LEFT_HIP', 10: 'NITE_JOINT_RIGHT_HIP', 11: 'NITE_JOINT_LEFT_KNEE', 12: 'NITE_JOINT_RIGHT_KNEE', 13: 'NITE_JOINT_LEFT_FOOT', 14: 'NITE_JOINT_RIGHT_FOOT'} 69 | NITE_JOINT_HEAD = 0 70 | NITE_JOINT_NECK = 1 71 | NITE_JOINT_LEFT_SHOULDER = 2 72 | NITE_JOINT_RIGHT_SHOULDER = 3 73 | NITE_JOINT_LEFT_ELBOW = 4 74 | NITE_JOINT_RIGHT_ELBOW = 5 75 | NITE_JOINT_LEFT_HAND = 6 76 | NITE_JOINT_RIGHT_HAND = 7 77 | NITE_JOINT_TORSO = 8 78 | NITE_JOINT_LEFT_HIP = 9 79 | NITE_JOINT_RIGHT_HIP = 10 80 | NITE_JOINT_LEFT_KNEE = 11 81 | NITE_JOINT_RIGHT_KNEE = 12 82 | NITE_JOINT_LEFT_FOOT = 13 83 | NITE_JOINT_RIGHT_FOOT = 14 84 | 85 | class NiteSkeletonState(CEnum): 86 | _names_ = {'NITE_SKELETON_CALIBRATION_ERROR_HEAD': 5, 'NITE_SKELETON_CALIBRATION_ERROR_NOT_IN_POSE': 3, 'NITE_SKELETON_TRACKED': 2, 'NITE_SKELETON_CALIBRATION_ERROR_LEGS': 6, 'NITE_SKELETON_CALIBRATION_ERROR_HANDS': 4, 'NITE_SKELETON_CALIBRATING': 1, 'NITE_SKELETON_CALIBRATION_ERROR_TORSO': 7, 'NITE_SKELETON_NONE': 0} 87 | _values_ = {0: 'NITE_SKELETON_NONE', 1: 'NITE_SKELETON_CALIBRATING', 2: 'NITE_SKELETON_TRACKED', 3: 'NITE_SKELETON_CALIBRATION_ERROR_NOT_IN_POSE', 4: 'NITE_SKELETON_CALIBRATION_ERROR_HANDS', 5: 'NITE_SKELETON_CALIBRATION_ERROR_HEAD', 6: 'NITE_SKELETON_CALIBRATION_ERROR_LEGS', 7: 'NITE_SKELETON_CALIBRATION_ERROR_TORSO'} 88 | NITE_SKELETON_NONE = 0 89 | NITE_SKELETON_CALIBRATING = 1 90 | NITE_SKELETON_TRACKED = 2 91 | NITE_SKELETON_CALIBRATION_ERROR_NOT_IN_POSE = 3 92 | NITE_SKELETON_CALIBRATION_ERROR_HANDS = 4 93 | NITE_SKELETON_CALIBRATION_ERROR_HEAD = 5 94 | NITE_SKELETON_CALIBRATION_ERROR_LEGS = 6 95 | NITE_SKELETON_CALIBRATION_ERROR_TORSO = 7 96 | 97 | class NiteUserState(CEnum): 98 | _names_ = {'NITE_USER_STATE_VISIBLE': 1, 'NITE_USER_STATE_NEW': 2, 'NITE_USER_STATE_LOST': 4} 99 | _values_ = {1: 'NITE_USER_STATE_VISIBLE', 2: 'NITE_USER_STATE_NEW', 4: 'NITE_USER_STATE_LOST'} 100 | NITE_USER_STATE_VISIBLE = 1 101 | NITE_USER_STATE_NEW = 2 102 | NITE_USER_STATE_LOST = 4 103 | 104 | class NiteStatus(CEnum): 105 | _names_ = {'NITE_STATUS_OUT_OF_FLOW': 3, 'NITE_STATUS_ERROR': 1, 'NITE_STATUS_BAD_USER_ID': 2, 'NITE_STATUS_OK': 0} 106 | _values_ = {0: 'NITE_STATUS_OK', 1: 'NITE_STATUS_ERROR', 2: 'NITE_STATUS_BAD_USER_ID', 3: 'NITE_STATUS_OUT_OF_FLOW'} 107 | NITE_STATUS_OK = 0 108 | NITE_STATUS_ERROR = 1 109 | NITE_STATUS_BAD_USER_ID = 2 110 | NITE_STATUS_OUT_OF_FLOW = 3 111 | 112 | class NitePoseType(CEnum): 113 | _names_ = {'NITE_POSE_CROSSED_HANDS': 1, 'NITE_POSE_PSI': 0} 114 | _values_ = {0: 'NITE_POSE_PSI', 1: 'NITE_POSE_CROSSED_HANDS'} 115 | NITE_POSE_PSI = 0 116 | NITE_POSE_CROSSED_HANDS = 1 117 | 118 | class NitePoseState(CEnum): 119 | _names_ = {'NITE_POSE_STATE_IN_POSE': 2, 'NITE_POSE_STATE_ENTER': 4, 'NITE_POSE_STATE_DETECTING': 1, 'NITE_POSE_STATE_EXIT': 8} 120 | _values_ = {8: 'NITE_POSE_STATE_EXIT', 1: 'NITE_POSE_STATE_DETECTING', 2: 'NITE_POSE_STATE_IN_POSE', 4: 'NITE_POSE_STATE_ENTER'} 121 | NITE_POSE_STATE_DETECTING = 1 122 | NITE_POSE_STATE_IN_POSE = 2 123 | NITE_POSE_STATE_ENTER = 4 124 | NITE_POSE_STATE_EXIT = 8 125 | 126 | class NiteGestureType(CEnum): 127 | _names_ = {'NITE_GESTURE_HAND_RAISE': 2, 'NITE_GESTURE_CLICK': 1, 'NITE_GESTURE_WAVE': 0} 128 | _values_ = {0: 'NITE_GESTURE_WAVE', 1: 'NITE_GESTURE_CLICK', 2: 'NITE_GESTURE_HAND_RAISE'} 129 | NITE_GESTURE_WAVE = 0 130 | NITE_GESTURE_CLICK = 1 131 | NITE_GESTURE_HAND_RAISE = 2 132 | 133 | class NiteGestureState(CEnum): 134 | _names_ = {'NITE_GESTURE_STATE_IN_PROGRESS': 2, 'NITE_GESTURE_STATE_COMPLETED': 4, 'NITE_GESTURE_STATE_NEW': 1} 135 | _values_ = {1: 'NITE_GESTURE_STATE_NEW', 2: 'NITE_GESTURE_STATE_IN_PROGRESS', 4: 'NITE_GESTURE_STATE_COMPLETED'} 136 | NITE_GESTURE_STATE_NEW = 1 137 | NITE_GESTURE_STATE_IN_PROGRESS = 2 138 | NITE_GESTURE_STATE_COMPLETED = 4 139 | 140 | class NiteHandState(CEnum): 141 | _names_ = {'NITE_HAND_STATE_NEW': 1, 'NITE_HAND_STATE_TRACKED': 2, 'NITE_HAND_STATE_LOST': 0, 'NITE_HAND_STATE_TOUCHING_FOV': 4} 142 | _values_ = {0: 'NITE_HAND_STATE_LOST', 1: 'NITE_HAND_STATE_NEW', 2: 'NITE_HAND_STATE_TRACKED', 4: 'NITE_HAND_STATE_TOUCHING_FOV'} 143 | NITE_HAND_STATE_LOST = 0 144 | NITE_HAND_STATE_NEW = 1 145 | NITE_HAND_STATE_TRACKED = 2 146 | NITE_HAND_STATE_TOUCHING_FOV = 4 147 | 148 | class NiteUserTracker(ctypes.Structure): 149 | 150 | def __repr__(self): 151 | return 'NiteUserTracker()' % () 152 | 153 | class NitePoint3f(ctypes.Structure): 154 | x = 'ctypes.c_float' 155 | y = 'ctypes.c_float' 156 | z = 'ctypes.c_float' 157 | 158 | def __repr__(self): 159 | return 'NitePoint3f(x = %r, y = %r, z = %r)' % (self.x, self.y, self.z) 160 | 161 | class NiteQuaternion(ctypes.Structure): 162 | x = 'ctypes.c_float' 163 | y = 'ctypes.c_float' 164 | z = 'ctypes.c_float' 165 | w = 'ctypes.c_float' 166 | 167 | def __repr__(self): 168 | return 'NiteQuaternion(x = %r, y = %r, z = %r, w = %r)' % (self.x, self.y, self.z, self.w) 169 | 170 | class NiteSkeletonJoint(ctypes.Structure): 171 | jointType = 'NiteJointType' 172 | position = 'NitePoint3f' 173 | positionConfidence = 'ctypes.c_float' 174 | orientation = 'NiteQuaternion' 175 | orientationConfidence = 'ctypes.c_float' 176 | 177 | def __repr__(self): 178 | return 'NiteSkeletonJoint(jointType = %r, position = %r, positionConfidence = %r, orientation = %r, orientationConfidence = %r)' % (self.jointType, self.position, self.positionConfidence, self.orientation, self.orientationConfidence) 179 | 180 | class NiteBoundingBox(ctypes.Structure): 181 | min = 'NitePoint3f' 182 | max = 'NitePoint3f' 183 | 184 | def __repr__(self): 185 | return 'NiteBoundingBox(min = %r, max = %r)' % (self.min, self.max) 186 | 187 | class NitePoseData(ctypes.Structure): 188 | type = 'NitePoseType' 189 | state = 'ctypes.c_int' 190 | 191 | def __repr__(self): 192 | return 'NitePoseData(type = %r, state = %r)' % (self.type, self.state) 193 | 194 | class NiteSkeleton(ctypes.Structure): 195 | joints = '(NiteSkeletonJoint * 15)' 196 | state = 'NiteSkeletonState' 197 | 198 | def __repr__(self): 199 | return 'NiteSkeleton(joints = %r, state = %r)' % (self.joints, self.state) 200 | 201 | class NiteUserData(ctypes.Structure): 202 | id = 'NiteUserId' 203 | boundingBox = 'NiteBoundingBox' 204 | centerOfMass = 'NitePoint3f' 205 | state = 'ctypes.c_int' 206 | skeleton = 'NiteSkeleton' 207 | poses = '(NitePoseData * 2)' 208 | 209 | def __repr__(self): 210 | return 'NiteUserData(id = %r, boundingBox = %r, centerOfMass = %r, state = %r, skeleton = %r, poses = %r)' % (self.id, self.boundingBox, self.centerOfMass, self.state, self.skeleton, self.poses) 211 | 212 | class NiteUserMap(ctypes.Structure): 213 | pixels = 'ctypes.POINTER(NiteUserId)' 214 | width = 'ctypes.c_int' 215 | height = 'ctypes.c_int' 216 | stride = 'ctypes.c_int' 217 | 218 | def __repr__(self): 219 | return 'NiteUserMap(pixels = %r, width = %r, height = %r, stride = %r)' % (self.pixels, self.width, self.height, self.stride) 220 | 221 | class NitePlane(ctypes.Structure): 222 | point = 'NitePoint3f' 223 | normal = 'NitePoint3f' 224 | 225 | def __repr__(self): 226 | return 'NitePlane(point = %r, normal = %r)' % (self.point, self.normal) 227 | 228 | class NiteUserTrackerFrame(ctypes.Structure): 229 | userCount = 'ctypes.c_int' 230 | pUser = 'ctypes.POINTER(NiteUserData)' 231 | userMap = 'NiteUserMap' 232 | pDepthFrame = 'ctypes.POINTER(OniFrame)' 233 | timestamp = 'ctypes.c_ulonglong' 234 | frameIndex = 'ctypes.c_int' 235 | floorConfidence = 'ctypes.c_float' 236 | floor = 'NitePlane' 237 | 238 | def __repr__(self): 239 | return 'NiteUserTrackerFrame(userCount = %r, pUser = %r, userMap = %r, pDepthFrame = %r, timestamp = %r, frameIndex = %r, floorConfidence = %r, floor = %r)' % (self.userCount, self.pUser, self.userMap, self.pDepthFrame, self.timestamp, self.frameIndex, self.floorConfidence, self.floor) 240 | 241 | class NiteUserTrackerCallbacks(ctypes.Structure): 242 | readyForNextFrame = 'OniGeneralCallback' 243 | 244 | def __repr__(self): 245 | return 'NiteUserTrackerCallbacks(readyForNextFrame = %r)' % (self.readyForNextFrame) 246 | 247 | class NiteHandData(ctypes.Structure): 248 | id = 'NiteHandId' 249 | position = 'NitePoint3f' 250 | state = 'ctypes.c_int' 251 | 252 | def __repr__(self): 253 | return 'NiteHandData(id = %r, position = %r, state = %r)' % (self.id, self.position, self.state) 254 | 255 | class NiteGestureData(ctypes.Structure): 256 | type = 'NiteGestureType' 257 | currentPosition = 'NitePoint3f' 258 | state = 'ctypes.c_int' 259 | 260 | def __repr__(self): 261 | return 'NiteGestureData(type = %r, currentPosition = %r, state = %r)' % (self.type, self.currentPosition, self.state) 262 | 263 | class NiteHandTrackerFrame(ctypes.Structure): 264 | handCount = 'ctypes.c_int' 265 | pHands = 'ctypes.POINTER(NiteHandData)' 266 | gestureCount = 'ctypes.c_int' 267 | pGestures = 'ctypes.POINTER(NiteGestureData)' 268 | pDepthFrame = 'ctypes.POINTER(OniFrame)' 269 | timestamp = 'ctypes.c_ulonglong' 270 | frameIndex = 'ctypes.c_int' 271 | 272 | def __repr__(self): 273 | return 'NiteHandTrackerFrame(handCount = %r, pHands = %r, gestureCount = %r, pGestures = %r, pDepthFrame = %r, timestamp = %r, frameIndex = %r)' % (self.handCount, self.pHands, self.gestureCount, self.pGestures, self.pDepthFrame, self.timestamp, self.frameIndex) 274 | 275 | class NiteHandTrackerCallbacks(ctypes.Structure): 276 | readyForNextFrame = 'OniGeneralCallback' 277 | 278 | def __repr__(self): 279 | return 'NiteHandTrackerCallbacks(readyForNextFrame = %r)' % (self.readyForNextFrame) 280 | 281 | class NiteVersion(ctypes.Structure): 282 | major = 'ctypes.c_int' 283 | minor = 'ctypes.c_int' 284 | maintenance = 'ctypes.c_int' 285 | build = 'ctypes.c_int' 286 | 287 | def __repr__(self): 288 | return 'NiteVersion(major = %r, minor = %r, maintenance = %r, build = %r)' % (self.major, self.minor, self.maintenance, self.build) 289 | 290 | class NiteHandTracker(ctypes.Structure): 291 | 292 | def __repr__(self): 293 | return 'NiteHandTracker()' % () 294 | 295 | OniBool = ctypes.c_int 296 | OniCallbackHandle = ctypes.POINTER(OniCallbackHandleImpl) 297 | OniHardwareVersion = ctypes.c_int 298 | OniDeviceHandle = ctypes.POINTER(_OniDevice) 299 | OniStreamHandle = ctypes.POINTER(_OniStream) 300 | OniRecorderHandle = ctypes.POINTER(_OniRecorder) 301 | OniNewFrameCallback = _get_calling_conv(None, OniStreamHandle, ctypes.c_void_p) 302 | OniGeneralCallback = _get_calling_conv(None, ctypes.c_void_p) 303 | OniDeviceInfoCallback = _get_calling_conv(None, ctypes.POINTER(OniDeviceInfo), ctypes.c_void_p) 304 | OniDeviceStateCallback = _get_calling_conv(None, ctypes.POINTER(OniDeviceInfo), OniDeviceState, ctypes.c_void_p) 305 | OniFrameAllocBufferCallback = _get_calling_conv(ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p) 306 | OniFrameFreeBufferCallback = _get_calling_conv(None, ctypes.c_void_p, ctypes.c_void_p) 307 | OniDepthPixel = ctypes.c_ushort 308 | OniGrayscale16Pixel = ctypes.c_ushort 309 | OniGrayscale8Pixel = ctypes.c_ubyte 310 | NiteUserId = ctypes.c_short 311 | NiteUserTrackerHandle = ctypes.POINTER(NiteUserTracker) 312 | NiteHandId = ctypes.c_short 313 | NiteHandTrackerHandle = ctypes.POINTER(NiteHandTracker) 314 | 315 | _OniDevice._fields_ = [ 316 | ] 317 | 318 | _OniStream._fields_ = [ 319 | ] 320 | 321 | _OniRecorder._fields_ = [ 322 | ] 323 | 324 | NiteUserTracker._fields_ = [ 325 | ] 326 | 327 | NitePoint3f._fields_ = [ 328 | ('x', ctypes.c_float), 329 | ('y', ctypes.c_float), 330 | ('z', ctypes.c_float), 331 | ] 332 | 333 | NiteQuaternion._fields_ = [ 334 | ('x', ctypes.c_float), 335 | ('y', ctypes.c_float), 336 | ('z', ctypes.c_float), 337 | ('w', ctypes.c_float), 338 | ] 339 | 340 | NiteSkeletonJoint._fields_ = [ 341 | ('jointType', NiteJointType), 342 | ('position', NitePoint3f), 343 | ('positionConfidence', ctypes.c_float), 344 | ('orientation', NiteQuaternion), 345 | ('orientationConfidence', ctypes.c_float), 346 | ] 347 | 348 | NiteBoundingBox._fields_ = [ 349 | ('min', NitePoint3f), 350 | ('max', NitePoint3f), 351 | ] 352 | 353 | NitePoseData._fields_ = [ 354 | ('type', NitePoseType), 355 | ('state', ctypes.c_int), 356 | ] 357 | 358 | NiteSkeleton._fields_ = [ 359 | ('joints', (NiteSkeletonJoint * 15)), 360 | ('state', NiteSkeletonState), 361 | ] 362 | 363 | NiteUserData._fields_ = [ 364 | ('id', NiteUserId), 365 | ('boundingBox', NiteBoundingBox), 366 | ('centerOfMass', NitePoint3f), 367 | ('state', ctypes.c_int), 368 | ('skeleton', NiteSkeleton), 369 | ('poses', (NitePoseData * 2)), 370 | ] 371 | 372 | NiteUserMap._fields_ = [ 373 | ('pixels', ctypes.POINTER(NiteUserId)), 374 | ('width', ctypes.c_int), 375 | ('height', ctypes.c_int), 376 | ('stride', ctypes.c_int), 377 | ] 378 | 379 | NitePlane._fields_ = [ 380 | ('point', NitePoint3f), 381 | ('normal', NitePoint3f), 382 | ] 383 | 384 | NiteUserTrackerFrame._fields_ = [ 385 | ('userCount', ctypes.c_int), 386 | ('pUser', ctypes.POINTER(NiteUserData)), 387 | ('userMap', NiteUserMap), 388 | ('pDepthFrame', ctypes.POINTER(OniFrame)), 389 | ('timestamp', ctypes.c_ulonglong), 390 | ('frameIndex', ctypes.c_int), 391 | ('floorConfidence', ctypes.c_float), 392 | ('floor', NitePlane), 393 | ] 394 | 395 | NiteUserTrackerCallbacks._fields_ = [ 396 | ('readyForNextFrame', OniGeneralCallback), 397 | ] 398 | 399 | NiteHandData._fields_ = [ 400 | ('id', NiteHandId), 401 | ('position', NitePoint3f), 402 | ('state', ctypes.c_int), 403 | ] 404 | 405 | NiteGestureData._fields_ = [ 406 | ('type', NiteGestureType), 407 | ('currentPosition', NitePoint3f), 408 | ('state', ctypes.c_int), 409 | ] 410 | 411 | NiteHandTrackerFrame._fields_ = [ 412 | ('handCount', ctypes.c_int), 413 | ('pHands', ctypes.POINTER(NiteHandData)), 414 | ('gestureCount', ctypes.c_int), 415 | ('pGestures', ctypes.POINTER(NiteGestureData)), 416 | ('pDepthFrame', ctypes.POINTER(OniFrame)), 417 | ('timestamp', ctypes.c_ulonglong), 418 | ('frameIndex', ctypes.c_int), 419 | ] 420 | 421 | NiteHandTrackerCallbacks._fields_ = [ 422 | ('readyForNextFrame', OniGeneralCallback), 423 | ] 424 | 425 | NiteVersion._fields_ = [ 426 | ('major', ctypes.c_int), 427 | ('minor', ctypes.c_int), 428 | ('maintenance', ctypes.c_int), 429 | ('build', ctypes.c_int), 430 | ] 431 | 432 | NiteHandTracker._fields_ = [ 433 | ] 434 | 435 | _dll = UnloadedDLL 436 | _niteInitialize = UnloadedDLL 437 | _niteShutdown = UnloadedDLL 438 | _niteGetVersion = UnloadedDLL 439 | _niteInitializeUserTracker = UnloadedDLL 440 | _niteInitializeUserTrackerByDevice = UnloadedDLL 441 | _niteShutdownUserTracker = UnloadedDLL 442 | _niteStartSkeletonTracking = UnloadedDLL 443 | _niteStopSkeletonTracking = UnloadedDLL 444 | _niteIsSkeletonTracking = UnloadedDLL 445 | _niteSetSkeletonSmoothing = UnloadedDLL 446 | _niteGetSkeletonSmoothing = UnloadedDLL 447 | _niteStartPoseDetection = UnloadedDLL 448 | _niteStopPoseDetection = UnloadedDLL 449 | _niteStopAllPoseDetection = UnloadedDLL 450 | _niteRegisterUserTrackerCallbacks = UnloadedDLL 451 | _niteUnregisterUserTrackerCallbacks = UnloadedDLL 452 | _niteReadUserTrackerFrame = UnloadedDLL 453 | _niteUserTrackerFrameAddRef = UnloadedDLL 454 | _niteUserTrackerFrameRelease = UnloadedDLL 455 | _niteInitializeHandTracker = UnloadedDLL 456 | _niteInitializeHandTrackerByDevice = UnloadedDLL 457 | _niteShutdownHandTracker = UnloadedDLL 458 | _niteStartHandTracking = UnloadedDLL 459 | _niteStopHandTracking = UnloadedDLL 460 | _niteStopAllHandTracking = UnloadedDLL 461 | _niteSetHandSmoothingFactor = UnloadedDLL 462 | _niteGetHandSmoothingFactor = UnloadedDLL 463 | _niteRegisterHandTrackerCallbacks = UnloadedDLL 464 | _niteUnregisterHandTrackerCallbacks = UnloadedDLL 465 | _niteReadHandTrackerFrame = UnloadedDLL 466 | _niteHandTrackerFrameAddRef = UnloadedDLL 467 | _niteHandTrackerFrameRelease = UnloadedDLL 468 | _niteStartGestureDetection = UnloadedDLL 469 | _niteStopGestureDetection = UnloadedDLL 470 | _niteStopAllGestureDetection = UnloadedDLL 471 | _niteConvertJointCoordinatesToDepth = UnloadedDLL 472 | _niteConvertDepthCoordinatesToJoint = UnloadedDLL 473 | _niteConvertHandCoordinatesToDepth = UnloadedDLL 474 | _niteConvertDepthCoordinatesToHand = UnloadedDLL 475 | 476 | def load_dll(dllname): 477 | global _dll 478 | if _dll: 479 | raise ValueError('DLL already loaded') 480 | _dll = ctypes.CDLL(dllname) 481 | 482 | global _niteInitialize 483 | _niteInitialize = _dll.niteInitialize 484 | _niteInitialize.restype = NiteStatus 485 | _niteInitialize.argtypes = [] 486 | 487 | global _niteShutdown 488 | _niteShutdown = _dll.niteShutdown 489 | _niteShutdown.restype = None 490 | _niteShutdown.argtypes = [] 491 | 492 | global _niteGetVersion 493 | _niteGetVersion = _dll.niteGetVersion 494 | _niteGetVersion.restype = NiteVersion 495 | _niteGetVersion.argtypes = [] 496 | 497 | global _niteInitializeUserTracker 498 | _niteInitializeUserTracker = _dll.niteInitializeUserTracker 499 | _niteInitializeUserTracker.restype = NiteStatus 500 | _niteInitializeUserTracker.argtypes = [ctypes.POINTER(NiteUserTrackerHandle)] 501 | 502 | global _niteInitializeUserTrackerByDevice 503 | _niteInitializeUserTrackerByDevice = _dll.niteInitializeUserTrackerByDevice 504 | _niteInitializeUserTrackerByDevice.restype = NiteStatus 505 | _niteInitializeUserTrackerByDevice.argtypes = [ctypes.c_void_p, ctypes.POINTER(NiteUserTrackerHandle)] 506 | 507 | global _niteShutdownUserTracker 508 | _niteShutdownUserTracker = _dll.niteShutdownUserTracker 509 | _niteShutdownUserTracker.restype = NiteStatus 510 | _niteShutdownUserTracker.argtypes = [NiteUserTrackerHandle] 511 | 512 | global _niteStartSkeletonTracking 513 | _niteStartSkeletonTracking = _dll.niteStartSkeletonTracking 514 | _niteStartSkeletonTracking.restype = NiteStatus 515 | _niteStartSkeletonTracking.argtypes = [NiteUserTrackerHandle, NiteUserId] 516 | 517 | global _niteStopSkeletonTracking 518 | _niteStopSkeletonTracking = _dll.niteStopSkeletonTracking 519 | _niteStopSkeletonTracking.restype = None 520 | _niteStopSkeletonTracking.argtypes = [NiteUserTrackerHandle, NiteUserId] 521 | 522 | global _niteIsSkeletonTracking 523 | _niteIsSkeletonTracking = _dll.niteIsSkeletonTracking 524 | _niteIsSkeletonTracking.restype = ctypes.c_bool 525 | _niteIsSkeletonTracking.argtypes = [NiteUserTrackerHandle, NiteUserId] 526 | 527 | global _niteSetSkeletonSmoothing 528 | _niteSetSkeletonSmoothing = _dll.niteSetSkeletonSmoothing 529 | _niteSetSkeletonSmoothing.restype = NiteStatus 530 | _niteSetSkeletonSmoothing.argtypes = [NiteUserTrackerHandle, ctypes.c_float] 531 | 532 | global _niteGetSkeletonSmoothing 533 | _niteGetSkeletonSmoothing = _dll.niteGetSkeletonSmoothing 534 | _niteGetSkeletonSmoothing.restype = NiteStatus 535 | _niteGetSkeletonSmoothing.argtypes = [NiteUserTrackerHandle, ctypes.POINTER(ctypes.c_float)] 536 | 537 | global _niteStartPoseDetection 538 | _niteStartPoseDetection = _dll.niteStartPoseDetection 539 | _niteStartPoseDetection.restype = NiteStatus 540 | _niteStartPoseDetection.argtypes = [NiteUserTrackerHandle, NiteUserId, NitePoseType] 541 | 542 | global _niteStopPoseDetection 543 | _niteStopPoseDetection = _dll.niteStopPoseDetection 544 | _niteStopPoseDetection.restype = None 545 | _niteStopPoseDetection.argtypes = [NiteUserTrackerHandle, NiteUserId, NitePoseType] 546 | 547 | global _niteStopAllPoseDetection 548 | _niteStopAllPoseDetection = _dll.niteStopAllPoseDetection 549 | _niteStopAllPoseDetection.restype = None 550 | _niteStopAllPoseDetection.argtypes = [NiteUserTrackerHandle, NiteUserId] 551 | 552 | global _niteRegisterUserTrackerCallbacks 553 | _niteRegisterUserTrackerCallbacks = _dll.niteRegisterUserTrackerCallbacks 554 | _niteRegisterUserTrackerCallbacks.restype = NiteStatus 555 | _niteRegisterUserTrackerCallbacks.argtypes = [NiteUserTrackerHandle, ctypes.POINTER(NiteUserTrackerCallbacks), ctypes.c_void_p] 556 | 557 | global _niteUnregisterUserTrackerCallbacks 558 | _niteUnregisterUserTrackerCallbacks = _dll.niteUnregisterUserTrackerCallbacks 559 | _niteUnregisterUserTrackerCallbacks.restype = None 560 | _niteUnregisterUserTrackerCallbacks.argtypes = [NiteUserTrackerHandle, ctypes.POINTER(NiteUserTrackerCallbacks)] 561 | 562 | global _niteReadUserTrackerFrame 563 | _niteReadUserTrackerFrame = _dll.niteReadUserTrackerFrame 564 | _niteReadUserTrackerFrame.restype = NiteStatus 565 | _niteReadUserTrackerFrame.argtypes = [NiteUserTrackerHandle, ctypes.POINTER(ctypes.POINTER(NiteUserTrackerFrame))] 566 | 567 | global _niteUserTrackerFrameAddRef 568 | _niteUserTrackerFrameAddRef = _dll.niteUserTrackerFrameAddRef 569 | _niteUserTrackerFrameAddRef.restype = NiteStatus 570 | _niteUserTrackerFrameAddRef.argtypes = [NiteUserTrackerHandle, ctypes.POINTER(NiteUserTrackerFrame)] 571 | 572 | global _niteUserTrackerFrameRelease 573 | _niteUserTrackerFrameRelease = _dll.niteUserTrackerFrameRelease 574 | _niteUserTrackerFrameRelease.restype = NiteStatus 575 | _niteUserTrackerFrameRelease.argtypes = [NiteUserTrackerHandle, ctypes.POINTER(NiteUserTrackerFrame)] 576 | 577 | global _niteInitializeHandTracker 578 | _niteInitializeHandTracker = _dll.niteInitializeHandTracker 579 | _niteInitializeHandTracker.restype = NiteStatus 580 | _niteInitializeHandTracker.argtypes = [ctypes.POINTER(NiteHandTrackerHandle)] 581 | 582 | global _niteInitializeHandTrackerByDevice 583 | _niteInitializeHandTrackerByDevice = _dll.niteInitializeHandTrackerByDevice 584 | _niteInitializeHandTrackerByDevice.restype = NiteStatus 585 | _niteInitializeHandTrackerByDevice.argtypes = [ctypes.c_void_p, ctypes.POINTER(NiteHandTrackerHandle)] 586 | 587 | global _niteShutdownHandTracker 588 | _niteShutdownHandTracker = _dll.niteShutdownHandTracker 589 | _niteShutdownHandTracker.restype = NiteStatus 590 | _niteShutdownHandTracker.argtypes = [NiteHandTrackerHandle] 591 | 592 | global _niteStartHandTracking 593 | _niteStartHandTracking = _dll.niteStartHandTracking 594 | _niteStartHandTracking.restype = NiteStatus 595 | _niteStartHandTracking.argtypes = [NiteHandTrackerHandle, ctypes.POINTER(NitePoint3f), ctypes.POINTER(NiteHandId)] 596 | 597 | global _niteStopHandTracking 598 | _niteStopHandTracking = _dll.niteStopHandTracking 599 | _niteStopHandTracking.restype = None 600 | _niteStopHandTracking.argtypes = [NiteHandTrackerHandle, NiteHandId] 601 | 602 | global _niteStopAllHandTracking 603 | _niteStopAllHandTracking = _dll.niteStopAllHandTracking 604 | _niteStopAllHandTracking.restype = None 605 | _niteStopAllHandTracking.argtypes = [NiteHandTrackerHandle] 606 | 607 | global _niteSetHandSmoothingFactor 608 | _niteSetHandSmoothingFactor = _dll.niteSetHandSmoothingFactor 609 | _niteSetHandSmoothingFactor.restype = NiteStatus 610 | _niteSetHandSmoothingFactor.argtypes = [NiteHandTrackerHandle, ctypes.c_float] 611 | 612 | global _niteGetHandSmoothingFactor 613 | _niteGetHandSmoothingFactor = _dll.niteGetHandSmoothingFactor 614 | _niteGetHandSmoothingFactor.restype = NiteStatus 615 | _niteGetHandSmoothingFactor.argtypes = [NiteHandTrackerHandle, ctypes.POINTER(ctypes.c_float)] 616 | 617 | global _niteRegisterHandTrackerCallbacks 618 | _niteRegisterHandTrackerCallbacks = _dll.niteRegisterHandTrackerCallbacks 619 | _niteRegisterHandTrackerCallbacks.restype = NiteStatus 620 | _niteRegisterHandTrackerCallbacks.argtypes = [NiteHandTrackerHandle, ctypes.POINTER(NiteHandTrackerCallbacks), ctypes.c_void_p] 621 | 622 | global _niteUnregisterHandTrackerCallbacks 623 | _niteUnregisterHandTrackerCallbacks = _dll.niteUnregisterHandTrackerCallbacks 624 | _niteUnregisterHandTrackerCallbacks.restype = None 625 | _niteUnregisterHandTrackerCallbacks.argtypes = [NiteHandTrackerHandle, ctypes.POINTER(NiteHandTrackerCallbacks)] 626 | 627 | global _niteReadHandTrackerFrame 628 | _niteReadHandTrackerFrame = _dll.niteReadHandTrackerFrame 629 | _niteReadHandTrackerFrame.restype = NiteStatus 630 | _niteReadHandTrackerFrame.argtypes = [NiteHandTrackerHandle, ctypes.POINTER(ctypes.POINTER(NiteHandTrackerFrame))] 631 | 632 | global _niteHandTrackerFrameAddRef 633 | _niteHandTrackerFrameAddRef = _dll.niteHandTrackerFrameAddRef 634 | _niteHandTrackerFrameAddRef.restype = NiteStatus 635 | _niteHandTrackerFrameAddRef.argtypes = [NiteHandTrackerHandle, ctypes.POINTER(NiteHandTrackerFrame)] 636 | 637 | global _niteHandTrackerFrameRelease 638 | _niteHandTrackerFrameRelease = _dll.niteHandTrackerFrameRelease 639 | _niteHandTrackerFrameRelease.restype = NiteStatus 640 | _niteHandTrackerFrameRelease.argtypes = [NiteHandTrackerHandle, ctypes.POINTER(NiteHandTrackerFrame)] 641 | 642 | global _niteStartGestureDetection 643 | _niteStartGestureDetection = _dll.niteStartGestureDetection 644 | _niteStartGestureDetection.restype = NiteStatus 645 | _niteStartGestureDetection.argtypes = [NiteHandTrackerHandle, NiteGestureType] 646 | 647 | global _niteStopGestureDetection 648 | _niteStopGestureDetection = _dll.niteStopGestureDetection 649 | _niteStopGestureDetection.restype = None 650 | _niteStopGestureDetection.argtypes = [NiteHandTrackerHandle, NiteGestureType] 651 | 652 | global _niteStopAllGestureDetection 653 | _niteStopAllGestureDetection = _dll.niteStopAllGestureDetection 654 | _niteStopAllGestureDetection.restype = None 655 | _niteStopAllGestureDetection.argtypes = [NiteHandTrackerHandle] 656 | 657 | global _niteConvertJointCoordinatesToDepth 658 | _niteConvertJointCoordinatesToDepth = _dll.niteConvertJointCoordinatesToDepth 659 | _niteConvertJointCoordinatesToDepth.restype = NiteStatus 660 | _niteConvertJointCoordinatesToDepth.argtypes = [NiteUserTrackerHandle, ctypes.c_float, ctypes.c_float, ctypes.c_float, ctypes.POINTER(ctypes.c_float), ctypes.POINTER(ctypes.c_float)] 661 | 662 | global _niteConvertDepthCoordinatesToJoint 663 | _niteConvertDepthCoordinatesToJoint = _dll.niteConvertDepthCoordinatesToJoint 664 | _niteConvertDepthCoordinatesToJoint.restype = NiteStatus 665 | _niteConvertDepthCoordinatesToJoint.argtypes = [NiteUserTrackerHandle, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.POINTER(ctypes.c_float), ctypes.POINTER(ctypes.c_float)] 666 | 667 | global _niteConvertHandCoordinatesToDepth 668 | _niteConvertHandCoordinatesToDepth = _dll.niteConvertHandCoordinatesToDepth 669 | _niteConvertHandCoordinatesToDepth.restype = NiteStatus 670 | _niteConvertHandCoordinatesToDepth.argtypes = [NiteHandTrackerHandle, ctypes.c_float, ctypes.c_float, ctypes.c_float, ctypes.POINTER(ctypes.c_float), ctypes.POINTER(ctypes.c_float)] 671 | 672 | global _niteConvertDepthCoordinatesToHand 673 | _niteConvertDepthCoordinatesToHand = _dll.niteConvertDepthCoordinatesToHand 674 | _niteConvertDepthCoordinatesToHand.restype = NiteStatus 675 | _niteConvertDepthCoordinatesToHand.argtypes = [NiteHandTrackerHandle, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.POINTER(ctypes.c_float), ctypes.POINTER(ctypes.c_float)] 676 | 677 | import functools 678 | from primesense.utils import NiteError 679 | 680 | def nite_call(func): 681 | @functools.wraps(func) 682 | def wrapper(*args): 683 | res = func(*args) 684 | if res != NiteStatus.NITE_STATUS_OK: 685 | raise NiteError(res) 686 | return res 687 | 688 | return wrapper 689 | 690 | @nite_call 691 | def niteInitialize(): 692 | '''NiteStatus niteInitialize()''' 693 | return _niteInitialize() 694 | 695 | def niteShutdown(): 696 | '''void niteShutdown()''' 697 | return _niteShutdown() 698 | 699 | def niteGetVersion(): 700 | '''NiteVersion niteGetVersion()''' 701 | return _niteGetVersion() 702 | 703 | @nite_call 704 | def niteInitializeUserTracker(pUserTracker): 705 | '''NiteStatus niteInitializeUserTracker(NiteUserTrackerHandle* pUserTracker)''' 706 | return _niteInitializeUserTracker(pUserTracker) 707 | 708 | @nite_call 709 | def niteInitializeUserTrackerByDevice(pDevice, pUserTracker): 710 | '''NiteStatus niteInitializeUserTrackerByDevice(void* pDevice, NiteUserTrackerHandle* pUserTracker)''' 711 | return _niteInitializeUserTrackerByDevice(pDevice, pUserTracker) 712 | 713 | @nite_call 714 | def niteShutdownUserTracker(userTracker): 715 | '''NiteStatus niteShutdownUserTracker(NiteUserTrackerHandle userTracker)''' 716 | return _niteShutdownUserTracker(userTracker) 717 | 718 | @nite_call 719 | def niteStartSkeletonTracking(userTracker, id): 720 | '''NiteStatus niteStartSkeletonTracking(NiteUserTrackerHandle userTracker, NiteUserId id)''' 721 | return _niteStartSkeletonTracking(userTracker, id) 722 | 723 | def niteStopSkeletonTracking(userTracker, id): 724 | '''void niteStopSkeletonTracking(NiteUserTrackerHandle userTracker, NiteUserId id)''' 725 | return _niteStopSkeletonTracking(userTracker, id) 726 | 727 | def niteIsSkeletonTracking(userTracker, id): 728 | '''bool niteIsSkeletonTracking(NiteUserTrackerHandle userTracker, NiteUserId id)''' 729 | return _niteIsSkeletonTracking(userTracker, id) 730 | 731 | @nite_call 732 | def niteSetSkeletonSmoothing(userTracker, factor): 733 | '''NiteStatus niteSetSkeletonSmoothing(NiteUserTrackerHandle userTracker, float factor)''' 734 | return _niteSetSkeletonSmoothing(userTracker, factor) 735 | 736 | @nite_call 737 | def niteGetSkeletonSmoothing(userTracker, pFactor): 738 | '''NiteStatus niteGetSkeletonSmoothing(NiteUserTrackerHandle userTracker, float* pFactor)''' 739 | return _niteGetSkeletonSmoothing(userTracker, pFactor) 740 | 741 | @nite_call 742 | def niteStartPoseDetection(userTracker, id, poseType): 743 | '''NiteStatus niteStartPoseDetection(NiteUserTrackerHandle userTracker, NiteUserId id, NitePoseType poseType)''' 744 | return _niteStartPoseDetection(userTracker, id, poseType) 745 | 746 | def niteStopPoseDetection(userTracker, id, poseType): 747 | '''void niteStopPoseDetection(NiteUserTrackerHandle userTracker, NiteUserId id, NitePoseType poseType)''' 748 | return _niteStopPoseDetection(userTracker, id, poseType) 749 | 750 | def niteStopAllPoseDetection(userTracker, id): 751 | '''void niteStopAllPoseDetection(NiteUserTrackerHandle userTracker, NiteUserId id)''' 752 | return _niteStopAllPoseDetection(userTracker, id) 753 | 754 | @nite_call 755 | def niteRegisterUserTrackerCallbacks(userTracker, pCallbacks, pCookie): 756 | '''NiteStatus niteRegisterUserTrackerCallbacks(NiteUserTrackerHandle userTracker, NiteUserTrackerCallbacks* pCallbacks, void* pCookie)''' 757 | return _niteRegisterUserTrackerCallbacks(userTracker, pCallbacks, pCookie) 758 | 759 | def niteUnregisterUserTrackerCallbacks(userTracker, pCallbacks): 760 | '''void niteUnregisterUserTrackerCallbacks(NiteUserTrackerHandle userTracker, NiteUserTrackerCallbacks* pCallbacks)''' 761 | return _niteUnregisterUserTrackerCallbacks(userTracker, pCallbacks) 762 | 763 | @nite_call 764 | def niteReadUserTrackerFrame(userTracker, pUserTrackerFrame): 765 | '''NiteStatus niteReadUserTrackerFrame(NiteUserTrackerHandle userTracker, NiteUserTrackerFrame** pUserTrackerFrame)''' 766 | return _niteReadUserTrackerFrame(userTracker, pUserTrackerFrame) 767 | 768 | @nite_call 769 | def niteUserTrackerFrameAddRef(userTracker, pUserTrackerFrame): 770 | '''NiteStatus niteUserTrackerFrameAddRef(NiteUserTrackerHandle userTracker, NiteUserTrackerFrame* pUserTrackerFrame)''' 771 | return _niteUserTrackerFrameAddRef(userTracker, pUserTrackerFrame) 772 | 773 | @nite_call 774 | def niteUserTrackerFrameRelease(userTracker, pUserTrackerFrame): 775 | '''NiteStatus niteUserTrackerFrameRelease(NiteUserTrackerHandle userTracker, NiteUserTrackerFrame* pUserTrackerFrame)''' 776 | return _niteUserTrackerFrameRelease(userTracker, pUserTrackerFrame) 777 | 778 | @nite_call 779 | def niteInitializeHandTracker(pHandTracker): 780 | '''NiteStatus niteInitializeHandTracker(NiteHandTrackerHandle* pHandTracker)''' 781 | return _niteInitializeHandTracker(pHandTracker) 782 | 783 | @nite_call 784 | def niteInitializeHandTrackerByDevice(pDevice, pHandTracker): 785 | '''NiteStatus niteInitializeHandTrackerByDevice(void* pDevice, NiteHandTrackerHandle* pHandTracker)''' 786 | return _niteInitializeHandTrackerByDevice(pDevice, pHandTracker) 787 | 788 | @nite_call 789 | def niteShutdownHandTracker(handTracker): 790 | '''NiteStatus niteShutdownHandTracker(NiteHandTrackerHandle handTracker)''' 791 | return _niteShutdownHandTracker(handTracker) 792 | 793 | @nite_call 794 | def niteStartHandTracking(handTracker, pPosition, pNewHandId): 795 | '''NiteStatus niteStartHandTracking(NiteHandTrackerHandle handTracker, NitePoint3f* pPosition, NiteHandId* pNewHandId)''' 796 | return _niteStartHandTracking(handTracker, pPosition, pNewHandId) 797 | 798 | def niteStopHandTracking(handTracker, id): 799 | '''void niteStopHandTracking(NiteHandTrackerHandle handTracker, NiteHandId id)''' 800 | return _niteStopHandTracking(handTracker, id) 801 | 802 | def niteStopAllHandTracking(handTracker): 803 | '''void niteStopAllHandTracking(NiteHandTrackerHandle handTracker)''' 804 | return _niteStopAllHandTracking(handTracker) 805 | 806 | @nite_call 807 | def niteSetHandSmoothingFactor(handTracker, factor): 808 | '''NiteStatus niteSetHandSmoothingFactor(NiteHandTrackerHandle handTracker, float factor)''' 809 | return _niteSetHandSmoothingFactor(handTracker, factor) 810 | 811 | @nite_call 812 | def niteGetHandSmoothingFactor(handTracker, pFactor): 813 | '''NiteStatus niteGetHandSmoothingFactor(NiteHandTrackerHandle handTracker, float* pFactor)''' 814 | return _niteGetHandSmoothingFactor(handTracker, pFactor) 815 | 816 | @nite_call 817 | def niteRegisterHandTrackerCallbacks(handTracker, pCallbacks, pCookie): 818 | '''NiteStatus niteRegisterHandTrackerCallbacks(NiteHandTrackerHandle handTracker, NiteHandTrackerCallbacks* pCallbacks, void* pCookie)''' 819 | return _niteRegisterHandTrackerCallbacks(handTracker, pCallbacks, pCookie) 820 | 821 | def niteUnregisterHandTrackerCallbacks(handTracker, pCallbacks): 822 | '''void niteUnregisterHandTrackerCallbacks(NiteHandTrackerHandle handTracker, NiteHandTrackerCallbacks* pCallbacks)''' 823 | return _niteUnregisterHandTrackerCallbacks(handTracker, pCallbacks) 824 | 825 | @nite_call 826 | def niteReadHandTrackerFrame(handTracker, pHandTrackerFrame): 827 | '''NiteStatus niteReadHandTrackerFrame(NiteHandTrackerHandle handTracker, NiteHandTrackerFrame** pHandTrackerFrame)''' 828 | return _niteReadHandTrackerFrame(handTracker, pHandTrackerFrame) 829 | 830 | @nite_call 831 | def niteHandTrackerFrameAddRef(handTracker, pHandTrackerFrame): 832 | '''NiteStatus niteHandTrackerFrameAddRef(NiteHandTrackerHandle handTracker, NiteHandTrackerFrame* pHandTrackerFrame)''' 833 | return _niteHandTrackerFrameAddRef(handTracker, pHandTrackerFrame) 834 | 835 | @nite_call 836 | def niteHandTrackerFrameRelease(handTracker, pHandTrackerFrame): 837 | '''NiteStatus niteHandTrackerFrameRelease(NiteHandTrackerHandle handTracker, NiteHandTrackerFrame* pHandTrackerFrame)''' 838 | return _niteHandTrackerFrameRelease(handTracker, pHandTrackerFrame) 839 | 840 | @nite_call 841 | def niteStartGestureDetection(handTracker, type): 842 | '''NiteStatus niteStartGestureDetection(NiteHandTrackerHandle handTracker, NiteGestureType type)''' 843 | return _niteStartGestureDetection(handTracker, type) 844 | 845 | def niteStopGestureDetection(handTracker, type): 846 | '''void niteStopGestureDetection(NiteHandTrackerHandle handTracker, NiteGestureType type)''' 847 | return _niteStopGestureDetection(handTracker, type) 848 | 849 | def niteStopAllGestureDetection(handTracker): 850 | '''void niteStopAllGestureDetection(NiteHandTrackerHandle handTracker)''' 851 | return _niteStopAllGestureDetection(handTracker) 852 | 853 | @nite_call 854 | def niteConvertJointCoordinatesToDepth(userTracker, x, y, z, pX, pY): 855 | '''NiteStatus niteConvertJointCoordinatesToDepth(NiteUserTrackerHandle userTracker, float x, float y, float z, float* pX, float* pY)''' 856 | return _niteConvertJointCoordinatesToDepth(userTracker, x, y, z, pX, pY) 857 | 858 | @nite_call 859 | def niteConvertDepthCoordinatesToJoint(userTracker, x, y, z, pX, pY): 860 | '''NiteStatus niteConvertDepthCoordinatesToJoint(NiteUserTrackerHandle userTracker, int x, int y, int z, float* pX, float* pY)''' 861 | return _niteConvertDepthCoordinatesToJoint(userTracker, x, y, z, pX, pY) 862 | 863 | @nite_call 864 | def niteConvertHandCoordinatesToDepth(handTracker, x, y, z, pX, pY): 865 | '''NiteStatus niteConvertHandCoordinatesToDepth(NiteHandTrackerHandle handTracker, float x, float y, float z, float* pX, float* pY)''' 866 | return _niteConvertHandCoordinatesToDepth(handTracker, x, y, z, pX, pY) 867 | 868 | @nite_call 869 | def niteConvertDepthCoordinatesToHand(handTracker, x, y, z, pX, pY): 870 | '''NiteStatus niteConvertDepthCoordinatesToHand(NiteHandTrackerHandle handTracker, int x, int y, int z, float* pX, float* pY)''' 871 | return _niteConvertDepthCoordinatesToHand(handTracker, x, y, z, pX, pY) 872 | 873 | all_types = [ 874 | OniStatus, 875 | OniSensorType, 876 | OniPixelFormat, 877 | OniDeviceState, 878 | OniImageRegistrationMode, 879 | _anon_enum_5, 880 | OniBool, 881 | OniCallbackHandleImpl, 882 | OniCallbackHandle, 883 | OniVersion, 884 | OniHardwareVersion, 885 | OniVideoMode, 886 | OniSensorInfo, 887 | OniDeviceInfo, 888 | _OniDevice, 889 | OniDeviceHandle, 890 | _OniStream, 891 | OniStreamHandle, 892 | _OniRecorder, 893 | OniRecorderHandle, 894 | OniFrame, 895 | OniNewFrameCallback, 896 | OniGeneralCallback, 897 | OniDeviceInfoCallback, 898 | OniDeviceStateCallback, 899 | OniFrameAllocBufferCallback, 900 | OniFrameFreeBufferCallback, 901 | OniDeviceCallbacks, 902 | OniCropping, 903 | OniDepthPixel, 904 | OniGrayscale16Pixel, 905 | OniGrayscale8Pixel, 906 | OniRGB888Pixel, 907 | OniYUV422DoublePixel, 908 | OniSeek, 909 | NiteJointType, 910 | NiteSkeletonState, 911 | NiteUserState, 912 | NiteStatus, 913 | NitePoseType, 914 | NitePoseState, 915 | NiteGestureType, 916 | NiteGestureState, 917 | NiteHandState, 918 | NiteUserId, 919 | NiteUserTracker, 920 | NiteUserTrackerHandle, 921 | NitePoint3f, 922 | NiteQuaternion, 923 | NiteSkeletonJoint, 924 | NiteBoundingBox, 925 | NitePoseData, 926 | NiteSkeleton, 927 | NiteUserData, 928 | NiteUserMap, 929 | NitePlane, 930 | NiteUserTrackerFrame, 931 | NiteUserTrackerCallbacks, 932 | NiteHandId, 933 | NiteHandData, 934 | NiteGestureData, 935 | NiteHandTrackerFrame, 936 | NiteHandTrackerCallbacks, 937 | NiteVersion, 938 | NiteHandTracker, 939 | NiteHandTrackerHandle, 940 | ] 941 | 942 | all_funcs = [ 943 | niteInitialize, 944 | niteShutdown, 945 | niteGetVersion, 946 | niteInitializeUserTracker, 947 | niteInitializeUserTrackerByDevice, 948 | niteShutdownUserTracker, 949 | niteStartSkeletonTracking, 950 | niteStopSkeletonTracking, 951 | niteIsSkeletonTracking, 952 | niteSetSkeletonSmoothing, 953 | niteGetSkeletonSmoothing, 954 | niteStartPoseDetection, 955 | niteStopPoseDetection, 956 | niteStopAllPoseDetection, 957 | niteRegisterUserTrackerCallbacks, 958 | niteUnregisterUserTrackerCallbacks, 959 | niteReadUserTrackerFrame, 960 | niteUserTrackerFrameAddRef, 961 | niteUserTrackerFrameRelease, 962 | niteInitializeHandTracker, 963 | niteInitializeHandTrackerByDevice, 964 | niteShutdownHandTracker, 965 | niteStartHandTracking, 966 | niteStopHandTracking, 967 | niteStopAllHandTracking, 968 | niteSetHandSmoothingFactor, 969 | niteGetHandSmoothingFactor, 970 | niteRegisterHandTrackerCallbacks, 971 | niteUnregisterHandTrackerCallbacks, 972 | niteReadHandTrackerFrame, 973 | niteHandTrackerFrameAddRef, 974 | niteHandTrackerFrameRelease, 975 | niteStartGestureDetection, 976 | niteStopGestureDetection, 977 | niteStopAllGestureDetection, 978 | niteConvertJointCoordinatesToDepth, 979 | niteConvertDepthCoordinatesToJoint, 980 | niteConvertHandCoordinatesToDepth, 981 | niteConvertDepthCoordinatesToHand, 982 | ] 983 | -------------------------------------------------------------------------------- /primesense/_openni2.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/leezl/OpenNi-Python/4a0731a1f76011260b0c044922e8d20ccb61adb6/primesense/_openni2.pyc -------------------------------------------------------------------------------- /primesense/nite2.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=W0212,W0603 2 | 3 | import sys 4 | import os 5 | import weakref 6 | import atexit 7 | import ctypes 8 | import platform 9 | from primesense import _nite2 as c_api 10 | from primesense import openni2 11 | from primesense.utils import inherit_properties, ClosedHandle, HandleObject, InitializationError 12 | 13 | 14 | arch = int(platform.architecture()[0].lower().replace("bit", "")) 15 | 16 | _default_dll_directories = [] 17 | if arch == 32: 18 | if "NITE2_REDIST" in os.environ: 19 | _default_dll_directories.append(os.environ["NITE2_REDIST"]) 20 | elif arch == 64: 21 | if "NITE2_REDIST64" in os.environ: 22 | _default_dll_directories.append(os.environ["NITE2_REDIST64"]) 23 | elif "NITE2_REDIST" in os.environ: 24 | _default_dll_directories.append(os.environ["NITE2_REDIST"]) 25 | 26 | _default_dll_directories.append(".") 27 | 28 | if sys.platform == "win32": 29 | _dll_name = "NiTE2.dll" 30 | elif sys.platform == "darwin": 31 | _dll_name = "libNiTE2.dylib" 32 | else: 33 | _dll_name = "libNiTE2.so" 34 | 35 | 36 | _nite2_initialized = False 37 | loaded_dll_directory = None 38 | def initialize(dll_directories = _default_dll_directories): 39 | global _nite2_initialized 40 | global loaded_dll_directory 41 | if _nite2_initialized: 42 | return 43 | if isinstance(dll_directories, str): 44 | dll_directories = [dll_directories] 45 | 46 | if not openni2.is_initialized(): 47 | openni2.initialize() 48 | 49 | if loaded_dll_directory: 50 | c_api.niteInitialize() 51 | _nite2_initialized = True 52 | return 53 | 54 | found = False 55 | prev = os.getcwd() 56 | exceptions = [] 57 | dll_directories = [os.path.normpath(os.path.abspath(d)) for d in dll_directories] 58 | 59 | for dlldir in dll_directories: 60 | if not os.path.isdir(dlldir): 61 | exceptions.append((dlldir, "Directory does not exist")) 62 | continue 63 | fullpath = os.path.join(dlldir, _dll_name) 64 | if not os.path.isfile(fullpath): 65 | exceptions.append((fullpath, "file does not exist")) 66 | continue 67 | try: 68 | os.chdir(dlldir) 69 | c_api.load_dll(fullpath) 70 | c_api.niteInitialize() 71 | except Exception as ex: 72 | exceptions.append((fullpath, ex)) 73 | else: 74 | found = True 75 | loaded_dll_directory = dlldir 76 | break 77 | 78 | os.chdir(prev) 79 | if not found: 80 | raise InitializationError("NiTE2 could not be loaded:\n %s" % 81 | ("\n ".join("%s: %s" % (dir, ex) for dir, ex in exceptions)),) 82 | 83 | _nite2_initialized = True 84 | 85 | def is_initialized(): 86 | return _nite2_initialized 87 | 88 | _registered_user_trackers = weakref.WeakSet() 89 | _registered_user_tracker_frames = weakref.WeakSet() 90 | _registered_hand_trackers = weakref.WeakSet() 91 | _registered_hand_tracker_frames = weakref.WeakSet() 92 | _registered_user_tracker_listeners = weakref.WeakSet() 93 | _registered_hand_tracker_listeners = weakref.WeakSet() 94 | 95 | def unload(): 96 | global _nite2_initialized 97 | if not _nite2_initialized: 98 | return 99 | for coll in [_registered_user_tracker_frames, _registered_hand_tracker_frames, _registered_hand_trackers, 100 | _registered_user_trackers, _registered_user_tracker_listeners, _registered_hand_tracker_listeners]: 101 | for hndl in coll: 102 | hndl.close() 103 | coll.clear() 104 | 105 | _nite2_initialized = False 106 | c_api.niteShutdown() 107 | 108 | atexit.register(unload) 109 | 110 | def get_version(): 111 | return c_api.niteGetVersion() 112 | 113 | Point3f = c_api.NitePoint3f 114 | Plane = c_api.NitePlane 115 | Quaternion = c_api.NiteQuaternion 116 | BoundingBox = c_api.NiteBoundingBox 117 | UserId = c_api.NiteUserId 118 | HandId = c_api.NiteHandId 119 | UserMap = c_api.NiteUserMap 120 | SkeletonJoint = c_api.NiteSkeletonJoint 121 | 122 | @inherit_properties(c_api.NitePoseData, "_posedata") 123 | class PoseData(object): 124 | __slots__ = ["_posedata"] 125 | def __init__(self, posedata): 126 | self._posedata = posedata 127 | def is_held(self): 128 | return (self.state & c_api.NitePoseState.NITE_POSE_STATE_IN_POSE) != 0 129 | def is_entered(self): 130 | return (self.state & c_api.NitePoseState.NITE_POSE_STATE_ENTER) != 0 131 | def is_exited(self): 132 | return (self.state & c_api.NitePoseState.NITE_POSE_STATE_EXIT) != 0 133 | 134 | @inherit_properties(c_api.NiteSkeleton, "_skeleton") 135 | class Skeleton(object): 136 | __slots__ = ["_skeleton"] 137 | def __init__(self, skeleton): 138 | self._skeleton = skeleton 139 | def get_joint(self, jointtype): 140 | return self.joints[jointtype] 141 | 142 | @inherit_properties(c_api.NiteUserData, "_userdata") 143 | class UserData(object): 144 | __slots__ = ["_userdata"] 145 | def __init__(self, userdata): 146 | self._userdata = userdata 147 | def is_new(self): 148 | return (self.state & c_api.NiteUserState.NITE_USER_STATE_NEW) != 0 149 | def is_visible(self): 150 | return (self.state & c_api.NiteUserState.NITE_USER_STATE_VISIBLE) != 0; 151 | def is_lost(self): 152 | return (self.state & c_api.NiteUserState.NITE_USER_STATE_LOST) != 0; 153 | def get_pose(self, posetype): 154 | return PoseData(self.poses[posetype]) 155 | 156 | @inherit_properties(c_api.NiteUserTrackerFrame, "_frame") 157 | class UserTrackerFrame(HandleObject): 158 | __slots__ = ["_frame", "_user_tracker_handle", "_depth_frame", "users", "users_by_id", "__weakref__"] 159 | def __init__(self, pframe, user_tracker_handle): 160 | self._frame = pframe[0] 161 | self._user_tracker_handle = user_tracker_handle 162 | self._depth_frame = None 163 | c_api.niteUserTrackerFrameAddRef(user_tracker_handle, pframe) 164 | HandleObject.__init__(self, pframe) 165 | self.users = [] 166 | self.users_by_id = {} 167 | for i in range(self.userCount): 168 | u = UserData(self.pUser[i]) 169 | self.users.append(u) 170 | self.users_by_id[u.id] = u 171 | _registered_user_tracker_frames.add(self) 172 | 173 | def _close(self): 174 | if is_initialized(): 175 | c_api.niteUserTrackerFrameRelease(self._user_tracker_handle, self._handle) 176 | self._frame = ClosedHandle 177 | self._user_tracker_handle = ClosedHandle 178 | del self.users[:] 179 | 180 | def get_depth_frame(self): 181 | if self._depth_frame is None: 182 | self._depth_frame = openni2.VideoFrame(self.pDepthFrame) 183 | return self._depth_frame 184 | 185 | 186 | class _NiteDevStruct(ctypes.Structure): 187 | _fields_ = [ 188 | ("pPlaybackControl", ctypes.c_void_p), 189 | ("device", openni2.c_api.OniDeviceHandle), 190 | ] 191 | 192 | class UserTracker(HandleObject): 193 | def __init__(self, device): 194 | handle = c_api.NiteUserTrackerHandle() 195 | if not device: 196 | c_api.niteInitializeUserTracker(ctypes.byref(handle)) 197 | else: 198 | self._devstruct = _NiteDevStruct() 199 | self._devstruct.device = device._handle 200 | c_api.niteInitializeUserTrackerByDevice(ctypes.byref(self._devstruct), ctypes.byref(handle)) 201 | HandleObject.__init__(self, handle) 202 | _registered_user_trackers.add(self) 203 | 204 | @classmethod 205 | def open_any(cls): 206 | return UserTracker(None) 207 | 208 | def _close(self): 209 | if is_initialized(): 210 | c_api.niteShutdownUserTracker(self._handle) 211 | 212 | def read_frame(self): 213 | pnf = ctypes.POINTER(c_api.NiteUserTrackerFrame)() 214 | c_api.niteReadUserTrackerFrame(self._handle, ctypes.byref(pnf)) 215 | return UserTrackerFrame(pnf, self._handle) 216 | 217 | def set_skeleton_smoothing_factor(self, factor): 218 | return c_api.niteSetSkeletonSmoothing(self._handle, factor) 219 | def get_skeleton_smoothing_factor(self): 220 | factor = ctypes.c_float() 221 | c_api.niteGetSkeletonSmoothing(self._handle, ctypes.byref(factor)) 222 | return factor.value 223 | skeleton_smoothing_factor = property(get_skeleton_smoothing_factor, set_skeleton_smoothing_factor) 224 | 225 | def start_skeleton_tracking(self, userid): 226 | c_api.niteStartSkeletonTracking(self._handle, userid) 227 | def stop_skeleton_tracking(self, userid): 228 | c_api.niteStopSkeletonTracking(self._handle, userid) 229 | 230 | def is_tracking(self, userid): 231 | c_api.niteIsSkeletonTracking(self._handle, userid) 232 | 233 | def start_pose_detection(self, userid, posetype): 234 | c_api.niteStartPoseDetection(self._handle, userid, posetype) 235 | def stop_pose_detection(self, userid, posetype): 236 | c_api.niteStopPoseDetection(self._handle, userid, posetype) 237 | def stop_all_pose_detection(self, userid): 238 | c_api.niteStopAllPoseDetection(self._handle, userid) 239 | 240 | def convert_joint_coordinates_to_depth(self, x, y, z): 241 | outX = ctypes.c_float() 242 | outY = ctypes.c_float() 243 | c_api.niteConvertJointCoordinatesToDepth(self._handle, x, y, z, ctypes.byref(outX), ctypes.byref(outY)) 244 | return (outX.value, outY.value) 245 | 246 | def convert_depth_coordinates_to_joint(self, x, y, z): 247 | outX = ctypes.c_float() 248 | outY = ctypes.c_float() 249 | c_api.niteConvertDepthCoordinatesToJoint(self._handle, x, y, z, ctypes.byref(outX), ctypes.byref(outY)) 250 | return (outX.value, outY.value) 251 | 252 | 253 | @inherit_properties(c_api.NiteGestureData, "_gesture") 254 | class GestureData(object): 255 | def __init__(self, gesture): 256 | self._gesture = gesture 257 | 258 | def is_complete(self): 259 | return (self.state & c_api.NiteGestureState.NITE_GESTURE_STATE_COMPLETED) != 0 260 | def is_in_progress(self): 261 | return (self.state & c_api.NiteGestureState.NITE_GESTURE_STATE_IN_PROGRESS) != 0 262 | 263 | 264 | @inherit_properties(c_api.NiteHandData, "_handdata") 265 | class HandData(object): 266 | def __init__(self, handdata): 267 | self._handdata = handdata 268 | 269 | def is_new(self): 270 | return (self.state & c_api.NiteHandState.NITE_HAND_STATE_NEW) != 0 271 | def is_lost(self): 272 | return self.state == c_api.NiteHandState.NITE_HAND_STATE_LOST 273 | def is_tracking(self): 274 | return (self.state & c_api.NiteHandState.NITE_HAND_STATE_TRACKED) != 0 275 | def is_touching_fov(self): 276 | return (self.state & c_api.NiteHandState.NITE_HAND_STATE_TOUCHING_FOV) != 0 277 | 278 | @inherit_properties(c_api.NiteHandTrackerFrame, "_frame") 279 | class HandTrackerFrame(HandleObject): 280 | def __init__(self, hand_tracker_handle, pframe): 281 | self._hand_tracker_handle = hand_tracker_handle 282 | self._frame = pframe[0] 283 | c_api.niteHandTrackerFrameAddRef(hand_tracker_handle, pframe) 284 | HandleObject.__init__(self, pframe) 285 | self._depth_frame = None 286 | self._hands = None 287 | self._gestures = None 288 | _registered_hand_tracker_frames.add(self) 289 | 290 | def _close(self): 291 | if is_initialized(): 292 | c_api.niteHandTrackerFrameRelease(self._hand_tracker_handle, self._handle) 293 | 294 | @property 295 | def depth_frame(self): 296 | if self._depth_frame is None: 297 | self._depth_frame = openni2.VideoFrame(self._frame.pDepthFrame) 298 | return self._depth_frame 299 | 300 | @property 301 | def hands(self): 302 | if self._hands is None: 303 | self._hands = [self._frame.pHands[i] for i in range(self._frame.handCount)] 304 | return self._hands 305 | 306 | @property 307 | def gestures(self): 308 | if self._gestures is None: 309 | self._gestures = [self._frame.pGestures[i] for i in range(self._frame.gestureCount)] 310 | return self._gestures 311 | 312 | class HandTracker(HandleObject): 313 | def __init__(self, device): 314 | self.device = device 315 | handle = c_api.NiteHandTrackerHandle() 316 | if not device: 317 | c_api.niteInitializeHandTracker(ctypes.byref(handle)) 318 | else: 319 | self._devstruct = _NiteDevStruct() 320 | self._devstruct.device = device._handle 321 | c_api.niteInitializeHandTrackerByDevice(ctypes.byref(self._devstruct), ctypes.byref(handle)) 322 | HandleObject.__init__(self, handle) 323 | _registered_hand_trackers.add(self) 324 | 325 | @classmethod 326 | def open_any(cls): 327 | return cls(None) 328 | 329 | def _close(self): 330 | if is_initialized(): 331 | c_api.niteShutdownHandTracker(self._handle) 332 | 333 | def read_frame(self): 334 | pfrm = ctypes.POINTER(c_api.NiteHandTrackerFrame)() 335 | c_api.niteReadHandTrackerFrame(self._handle, ctypes.byref(pfrm)) 336 | return HandTrackerFrame(self._handle, pfrm) 337 | 338 | def set_smoothing_factor(self, factor): 339 | c_api.niteSetHandSmoothingFactor(self._handle, factor) 340 | def get_smoothing_factor(self): 341 | factor = ctypes.c_float() 342 | c_api.niteGetHandSmoothingFactor(self._handle, ctypes.byref(factor)) 343 | return factor.value 344 | smoothing_factor = property(get_smoothing_factor, set_smoothing_factor) 345 | 346 | def start_hand_tracking(self, *position): 347 | new_hand_id = HandId() 348 | if len(position) == 3: 349 | position = Point3f(*position) 350 | elif len(position) == 1: 351 | position = position[0] 352 | else: 353 | raise TypeError("Either Point3f or three values required") 354 | c_api.niteStartHandTracking(self._handle, ctypes.byref(position), ctypes.byref(new_hand_id)) 355 | return new_hand_id 356 | def stop_hand_tracking(self, handid): 357 | c_api.niteStopHandTracking(self._handle, handid) 358 | 359 | def start_gesture_detection(self, gesture_type): 360 | c_api.niteStartGestureDetection(self._handle, gesture_type) 361 | def stop_gesture_detection(self, gesture_type): 362 | c_api.niteStopGestureDetection(self._handle, gesture_type) 363 | 364 | def convert_hand_coordinates_to_depth(self, x, y, z): 365 | outX = ctypes.c_float() 366 | outY = ctypes.c_float() 367 | c_api.niteConvertHandCoordinatesToDepth(self._handle, x, y, z, ctypes.byref(outX), ctypes.byref(outY)) 368 | return outX.value, outY.value 369 | 370 | def convert_depth_coordinates_to_hand(self, x, y, z): 371 | outX = ctypes.c_float() 372 | outY = ctypes.c_float() 373 | c_api.niteConvertDepthCoordinatesToHand(self._handle, x, y, z, ctypes.byref(outX), ctypes.byref(outY)) 374 | return outX.value, outY.value 375 | 376 | def stop_all_hand_tracking(self): 377 | c_api.niteStopAllHandTracking(self._handle) 378 | 379 | def stop_all_gesture_detection(self): 380 | c_api.niteStopAllGestureDetection(self._handle) 381 | 382 | 383 | class UserTrackerListener(HandleObject): 384 | def __init__(self, user_tracker): 385 | self.user_tracker = user_tracker 386 | self._callbacks = c_api.NiteUserTrackerCallbacks( 387 | readyForNextFrame = c_api.OniGeneralCallback(self._on_ready_for_next_frame)) 388 | handle = ctypes.pointer(self._callbacks) 389 | c_api.niteRegisterUserTrackerCallbacks(self.user_tracker._handle, handle, None) 390 | HandleObject.__init__(self, handle) 391 | _registered_user_tracker_listeners.add(self) 392 | 393 | def unregister(self): 394 | self.close() 395 | 396 | def _close(self): 397 | if is_initialized(): 398 | c_api.niteUnregisterUserTrackerCallbacks(self.user_tracker._handle, self._handle) 399 | self.user_tracker = None 400 | 401 | def _on_ready_for_next_frame(self, _): 402 | self.on_ready_for_next_frame() 403 | def on_ready_for_next_frame(self): 404 | """Implement me""" 405 | pass 406 | 407 | 408 | class HandTrackerListener(HandleObject): 409 | def __init__(self, hand_tracker): 410 | self.hand_tracker = hand_tracker 411 | self._callbacks = c_api.NiteHandTrackerCallbacks( 412 | readyForNextFrame = c_api.OniGeneralCallback(self._on_ready_for_next_frame)) 413 | handle = ctypes.pointer(self._callbacks) 414 | c_api.niteRegisterHandTrackerCallbacks(self.hand_tracker._handle, handle, None) 415 | HandleObject.__init__(self, handle) 416 | _registered_hand_tracker_listeners.add(self) 417 | 418 | def _close(self): 419 | if is_initialized(): 420 | c_api.niteUnregisterHandTrackerCallbacks(self.hand_tracker._handle, self._handle) 421 | self.hand_tracker = None 422 | 423 | def _on_ready_for_next_frame(self, _): 424 | self.on_ready_for_next_frame() 425 | def on_ready_for_next_frame(self): 426 | """Implement me""" 427 | pass 428 | 429 | 430 | -------------------------------------------------------------------------------- /primesense/openni2.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=W0212,W0603 2 | 3 | import sys 4 | import os 5 | import ctypes 6 | import weakref 7 | import atexit 8 | import platform 9 | from primesense import _openni2 as c_api 10 | from primesense.utils import (inherit_properties, HandleObject, _py_to_ctype_obj, ClosedHandle, InitializationError, 11 | OpenNIError) 12 | 13 | 14 | arch = int(platform.architecture()[0].lower().replace("bit", "")) 15 | 16 | _default_dll_directories = [] 17 | if arch == 32: 18 | if "OPENNI2_REDIST" in os.environ: 19 | _default_dll_directories.append(os.environ["OPENNI2_REDIST"]) 20 | elif arch == 64: 21 | if "OPENNI2_REDIST64" in os.environ: 22 | _default_dll_directories.append(os.environ["OPENNI2_REDIST64"]) 23 | elif "OPENNI2_REDIST" in os.environ: 24 | _default_dll_directories.append(os.environ["OPENNI2_REDIST"]) 25 | 26 | _default_dll_directories.append(".") 27 | 28 | if sys.platform == "win32": 29 | _dll_name = "OpenNI2.dll" 30 | elif sys.platform == "darwin": 31 | _dll_name = "libOpenNI2.dylib" 32 | else: 33 | _dll_name = "libOpenNI2.so" 34 | 35 | #The types of streams: use in (device).get_sensor_info() to get supported video modes 36 | SENSOR_IR = c_api.OniSensorType.ONI_SENSOR_IR 37 | SENSOR_COLOR = c_api.OniSensorType.ONI_SENSOR_COLOR 38 | SENSOR_DEPTH = c_api.OniSensorType.ONI_SENSOR_DEPTH 39 | 40 | #use following in (stream).set_video_mode() 41 | PIXEL_FORMAT_DEPTH_1_MM = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_DEPTH_1_MM 42 | PIXEL_FORMAT_DEPTH_100_UM = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_DEPTH_100_UM 43 | PIXEL_FORMAT_SHIFT_9_2 = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_SHIFT_9_2 44 | PIXEL_FORMAT_SHIFT_9_3 = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_SHIFT_9_3 45 | PIXEL_FORMAT_RGB888 = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_RGB888 46 | PIXEL_FORMAT_YUV422 = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_YUV422 47 | PIXEL_FORMAT_GRAY8 = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_GRAY8 48 | PIXEL_FORMAT_GRAY16 = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_GRAY16 49 | PIXEL_FORMAT_JPEG = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_JPEG 50 | PIXEL_FORMAT_YUYV = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_YUYV 51 | 52 | IMAGE_REGISTRATION_DEPTH_TO_COLOR = c_api.OniImageRegistrationMode.ONI_IMAGE_REGISTRATION_DEPTH_TO_COLOR 53 | IMAGE_REGISTRATION_OFF = c_api.OniImageRegistrationMode.ONI_IMAGE_REGISTRATION_OFF 54 | 55 | 56 | _openni2_initialized = False 57 | loaded_dll_directory = None 58 | 59 | def initialize(dll_directories = _default_dll_directories): 60 | global _openni2_initialized 61 | global loaded_dll_directory 62 | if _openni2_initialized: 63 | return 64 | if isinstance(dll_directories, str): 65 | dll_directories = [dll_directories] 66 | if loaded_dll_directory: 67 | c_api.oniInitialize(c_api.ONI_API_VERSION) 68 | _openni2_initialized = True 69 | return 70 | 71 | found = False 72 | prev = os.getcwd() 73 | exceptions = [] 74 | dll_directories = [os.path.normpath(os.path.abspath(d)) for d in dll_directories] 75 | 76 | for dlldir in dll_directories: 77 | if not os.path.isdir(dlldir): 78 | exceptions.append((dlldir, "Directory does not exist")) 79 | continue 80 | fullpath = os.path.join(dlldir, _dll_name) 81 | if not os.path.isfile(fullpath): 82 | exceptions.append((fullpath, "file does not exist")) 83 | continue 84 | try: 85 | os.chdir(dlldir) 86 | c_api.load_dll(fullpath) 87 | c_api.oniInitialize(c_api.ONI_API_VERSION) 88 | except Exception as ex: 89 | exceptions.append((fullpath, ex)) 90 | else: 91 | found = True 92 | loaded_dll_directory = dlldir 93 | break 94 | 95 | os.chdir(prev) 96 | if not found: 97 | raise InitializationError("OpenNI2 could not be loaded:\n %s" % 98 | ("\n ".join("%s: %s" % (dir, ex) for dir, ex in exceptions)),) 99 | 100 | _openni2_initialized = True 101 | 102 | 103 | def is_initialized(): 104 | return _openni2_initialized 105 | 106 | _registered_devices = weakref.WeakSet() 107 | _registered_video_frames = weakref.WeakSet() 108 | _registered_video_streams = weakref.WeakSet() 109 | _registered_recorders = weakref.WeakSet() 110 | _registered_device_listeners = weakref.WeakSet() 111 | 112 | def unload(): 113 | global _openni2_initialized 114 | if not _openni2_initialized: 115 | return 116 | for coll in [_registered_video_frames, _registered_recorders, _registered_video_streams, _registered_device_listeners, 117 | _registered_devices]: 118 | for hndl in coll: 119 | hndl.close() 120 | coll.clear() 121 | 122 | _openni2_initialized = False 123 | c_api.oniShutdown() 124 | 125 | atexit.register(unload) 126 | 127 | def get_version(): 128 | return c_api.oniGetVersion() 129 | 130 | def wait_for_any_stream(streams, timeout = None): 131 | if timeout is None: 132 | timeout = c_api.ONI_TIMEOUT_FOREVER 133 | else: 134 | timeout = int(timeout * 1000) # in msec 135 | ready_stream_index = ctypes.c_int(-1) 136 | arr = (c_api.OniStreamHandle * len(streams))() 137 | for i, s in enumerate(streams): 138 | arr[i] = s._handle 139 | try: 140 | c_api.oniWaitForAnyStream(arr, len(streams), ctypes.byref(ready_stream_index), timeout) 141 | except OpenNIError as ex: 142 | if ex.code == c_api.OniStatus.ONI_STATUS_TIME_OUT: 143 | # timed out 144 | return None 145 | else: 146 | raise 147 | if ready_stream_index.value >= 0: 148 | return streams[ready_stream_index.value] 149 | else: 150 | return None 151 | 152 | VideoMode = c_api.OniVideoMode 153 | DeviceInfo = c_api.OniDeviceInfo 154 | 155 | ''' 156 | Use this class to: 157 | *find out what video modes are available: 158 | *resolution, frame rate, pixel type 159 | ''' 160 | class SensorInfo(object): 161 | def __init__(self, info): 162 | self.sensorType = info.sensorType 163 | self.videoModes = [info.pSupportedVideoModes[i] for i in range(info.numSupportedVideoModes)] 164 | @classmethod 165 | def from_stream_handle(cls, handle): 166 | pinfo = c_api.oniStreamGetSensorInfo(handle) 167 | if pinfo == 0: 168 | return None 169 | return cls(pinfo[0]) 170 | @classmethod 171 | def from_device_handle(cls, handle, sensor_type): 172 | pinfo = c_api.oniDeviceGetSensorInfo(handle, sensor_type) 173 | if not pinfo: 174 | return None 175 | return cls(pinfo[0]) 176 | 177 | class PlaybackSupport(object): 178 | __slots__ = ["device"] 179 | def __init__(self, device): 180 | self.device = weakref.proxy(device) 181 | def get_speed(self): 182 | return self.device.get_property(c_api.ONI_DEVICE_PROPERTY_PLAYBACK_SPEED, ctypes.c_float) 183 | def set_speed(self, speed): 184 | return self.device.set_property(c_api.ONI_DEVICE_PROPERTY_PLAYBACK_SPEED, speed) 185 | speed = property(get_speed, set_speed) 186 | 187 | def get_repeat_enabled(self): 188 | return bool(self.device.get_property(c_api.ONI_DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED, c_api.OniBool)) 189 | def set_repeat_enabled(self, enable): 190 | self.device.set_property(c_api.ONI_DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED, enable) 191 | repeat = property(get_repeat_enabled, set_repeat_enabled) 192 | 193 | def seek(self, stream, frame_index): 194 | seek = c_api.OniSeek(frameIndex = frame_index, stream = stream._handle) 195 | self.device.invoke(c_api.ONI_DEVICE_COMMAND_SEEK, ctypes.byref(seek)) 196 | def get_number_of_frames(self, stream): 197 | return stream.get_number_of_frames() 198 | 199 | ''' 200 | Use this class to: 201 | *Keep track of your cameras 202 | *Get streams (rgb, depth, ir) 203 | *get the above sensor info class 204 | *sync the streams 205 | ''' 206 | class Device(HandleObject): 207 | def __init__(self, uri, mode = None): 208 | self._orig_uri = uri 209 | self.uri = uri 210 | self._mode = mode 211 | HandleObject.__init__(self, None) 212 | self._reopen() 213 | _registered_devices.add(self) 214 | 215 | def _reopen(self): 216 | self.close() 217 | self._handle = c_api.OniDeviceHandle() 218 | assert not bool(self._handle) 219 | if self._mode: 220 | c_api.oniDeviceOpenEx(self._orig_uri, self._mode, ctypes.byref(self._handle)) 221 | else: 222 | c_api.oniDeviceOpen(self._orig_uri, ctypes.byref(self._handle)) 223 | assert bool(self._handle), "Handle is NULL after open" 224 | if self.is_file(): 225 | self.playback = PlaybackSupport(self) 226 | else: 227 | self.playback = None 228 | self._sensor_infos = {} 229 | self._devinfo = None 230 | self.get_device_info() 231 | 232 | @classmethod 233 | def enumerate_uris(cls): 234 | pdevs = ctypes.POINTER(c_api.OniDeviceInfo)() 235 | count = ctypes.c_int() 236 | c_api.oniGetDeviceList(ctypes.byref(pdevs), ctypes.byref(count)) 237 | uris = [pdevs[i].uri for i in range(count.value)] 238 | c_api.oniReleaseDeviceList(pdevs) 239 | return uris 240 | 241 | @classmethod 242 | def open_all(cls): 243 | return [cls(uri) for uri in cls.enumerate_uris()] 244 | 245 | #this will get you a camera if there is one that has openni drivers 246 | @classmethod 247 | def open_any(cls): 248 | return cls(None) 249 | 250 | @classmethod 251 | def open_file(cls, filename): 252 | return cls(filename) 253 | 254 | def _close(self): 255 | if is_initialized(): 256 | c_api.oniDeviceClose(self._handle) 257 | self.playback = None 258 | 259 | def get_device_info(self): 260 | if self._devinfo is None: 261 | self._devinfo = c_api.OniDeviceInfo() 262 | c_api.oniDeviceGetInfo(self._handle, ctypes.byref(self._devinfo)) 263 | self.uri = self._devinfo.uri 264 | return self._devinfo 265 | device_info = property(get_device_info) 266 | 267 | def get_sensor_info(self, sensor_type): 268 | if sensor_type in self._sensor_infos: 269 | return self._sensor_infos[sensor_type] 270 | 271 | info = SensorInfo.from_device_handle(self._handle, sensor_type) 272 | self._sensor_infos[sensor_type] = info 273 | return info 274 | 275 | def has_sensor(self, sensor_type): 276 | return self.get_sensor_info(sensor_type) is not None 277 | 278 | def create_stream(self, sensor_type): 279 | return VideoMode(self, sensor_type) 280 | 281 | def create_depth_stream(self): 282 | if not self.has_sensor(SENSOR_DEPTH): 283 | return None 284 | return VideoStream(self, SENSOR_DEPTH) 285 | 286 | def create_color_stream(self): 287 | if not self.has_sensor(SENSOR_COLOR): 288 | return None 289 | return VideoStream(self, SENSOR_COLOR) 290 | 291 | def create_ir_stream(self): 292 | if not self.has_sensor(SENSOR_IR): 293 | return None 294 | return VideoStream(self, SENSOR_IR) 295 | 296 | def get_property(self, property_id, rettype): 297 | ret = rettype() 298 | size = ctypes.c_int(ctypes.sizeof(ret)) 299 | #oniStreamGetProperty(self._handle, property_id, ctypes.byref(ret), ctypes.byref(size)) 300 | c_api.oniDeviceGetProperty(self._handle, property_id, ctypes.byref(ret), ctypes.byref(size)) 301 | return ret 302 | def get_int_property(self, property_id): 303 | return self.get_property(property_id, ctypes.c_int).value 304 | def set_property(self, property_id, obj, size = None): 305 | obj, size = _py_to_ctype_obj(obj) 306 | if size is None: 307 | size = ctypes.sizeof(obj) 308 | c_api.oniDeviceSetProperty(self._handle, property_id, ctypes.byref(obj), size) 309 | def is_property_supported(self, property_id): 310 | return bool(c_api.oniDeviceIsPropertySupported(self._handle, property_id)) 311 | 312 | def invoke(self, command_id, data, size = None): 313 | c_api.oniDeviceInvoke(self._handle, command_id, data, size) 314 | def is_command_supported(self, command_id): 315 | return bool(c_api.oniDeviceIsCommandSupported(self._handle, command_id)) 316 | 317 | ''' 318 | The following do not appear to be working: 319 | *get throws error, takes no input, but still crashes... 320 | *set throws same error, regardless of mode passed 321 | ''' 322 | def is_image_registration_mode_supported(self, mode): 323 | #mode = IMAGE_REGISTRATION_DEPTH_TO_COLOR for example 324 | return bool(c_api.oniDeviceIsImageRegistrationModeSupported(self._handle, mode)) 325 | def get_image_registration_mode(self): 326 | return self.get_property(c_api.ONI_DEVICE_PROPERTY_IMAGE_REGISTRATION, c_api.OniImageRegistrationMode) 327 | def set_image_registration_mode(self, mode): 328 | self.set_property(c_api.ONI_DEVICE_PROPERTY_IMAGE_REGISTRATION, mode) 329 | 330 | def is_file(self): 331 | return (self.is_property_supported(c_api.ONI_DEVICE_PROPERTY_PLAYBACK_SPEED) and 332 | self.is_property_supported(c_api.ONI_DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED) and 333 | self.is_property_supported(c_api.ONI_DEVICE_COMMAND_SEEK)) 334 | 335 | def get_depth_color_sync_enabled(self): 336 | return bool(c_api.oniDeviceGetDepthColorSyncEnabled(self._handle)) 337 | def set_depth_color_sync_enabled(self, enable): 338 | if enable: 339 | c_api.oniDeviceEnableDepthColorSync(self._handle) 340 | else: 341 | c_api.oniDeviceDisableDepthColorSync(self._handle) 342 | depth_color_sync = property(get_depth_color_sync_enabled, set_depth_color_sync_enabled) 343 | 344 | @inherit_properties(c_api.OniFrame, "_frame") 345 | class VideoFrame(HandleObject): 346 | def __init__(self, pframe): 347 | self._frame = pframe[0] 348 | c_api.oniFrameAddRef(pframe) 349 | HandleObject.__init__(self, pframe) 350 | _registered_video_frames.add(self) 351 | def _close(self): 352 | if is_initialized(): 353 | c_api.oniFrameRelease(self._handle) 354 | self._frame = ClosedHandle 355 | 356 | def get_buffer_as(self, ctype): 357 | return (ctype * int(self.dataSize / ctypes.sizeof(ctype))).from_address(self.data) 358 | 359 | def get_buffer_as_uint8(self): 360 | return self.get_buffer_as(ctypes.c_uint8) 361 | def get_buffer_as_uint16(self): 362 | return self.get_buffer_as(ctypes.c_uint16) 363 | def get_buffer_as_triplet(self): 364 | return self.get_buffer_as(ctypes.c_uint8 * 3) 365 | 366 | 367 | class CameraSettings(object): 368 | __slots__ = ["stream"] 369 | def __init__(self, stream): 370 | self.stream = weakref.proxy(stream) 371 | 372 | def get_auto_exposure(self): 373 | return bool(self.stream.get_property(c_api.ONI_STREAM_PROPERTY_AUTO_EXPOSURE, c_api.OniBool)) 374 | def set_auto_exposure(self, enabled): 375 | self.stream.set_property(c_api.ONI_STREAM_PROPERTY_AUTO_EXPOSURE, enabled) 376 | auto_exposure = property(get_auto_exposure, set_auto_exposure) 377 | 378 | def get_auto_white_balance(self): 379 | return bool(self.stream.get_property(c_api.ONI_STREAM_PROPERTY_AUTO_WHITE_BALANCE, c_api.OniBool)) 380 | def set_auto_white_balance(self, enabled): 381 | return self.stream.set_property(c_api.ONI_STREAM_PROPERTY_AUTO_WHITE_BALANCE, enabled) 382 | auto_white_balance = property(get_auto_white_balance, set_auto_white_balance) 383 | 384 | def get_gain(self): 385 | try: 386 | return self.stream.get_property(c_api.ONI_STREAM_PROPERTY_GAIN, ctypes.c_int).value 387 | except OpenNIError: 388 | return 100 389 | def set_gain(self, gain): 390 | return self.stream.set_property(c_api.ONI_STREAM_PROPERTY_GAIN, gain) 391 | gain = property(get_gain, set_gain) 392 | 393 | def get_exposure(self): 394 | try: 395 | return self.stream.get_property(c_api.ONI_STREAM_PROPERTY_EXPOSURE, ctypes.c_int).value 396 | except OpenNIError: 397 | return 0 398 | def set_exposure(self, exposure): 399 | return self.stream.set_property(c_api.ONI_STREAM_PROPERTY_EXPOSURE, exposure) 400 | exposure = property(get_exposure, set_exposure) 401 | 402 | 403 | class VideoStream(HandleObject): 404 | def __init__(self, device, sensor_type): 405 | self.device = device 406 | self.sensor_type = sensor_type 407 | self._callbacks = {} 408 | handle = c_api.OniStreamHandle() 409 | c_api.oniDeviceCreateStream(self.device._handle, sensor_type, ctypes.byref(handle)) 410 | HandleObject.__init__(self, handle) 411 | _registered_video_streams.add(self) 412 | if (self.is_property_supported(c_api.ONI_STREAM_PROPERTY_AUTO_WHITE_BALANCE) and 413 | self.is_property_supported(c_api.ONI_STREAM_PROPERTY_AUTO_EXPOSURE)): 414 | self.camera = CameraSettings(self) 415 | else: 416 | self.camera = None 417 | 418 | def _close(self): 419 | if is_initialized(): 420 | self.unregister_all_new_frame_listeners() 421 | self.stop() 422 | c_api.oniStreamDestroy(self._handle) 423 | self.camera = None 424 | 425 | def get_sensor_info(self): 426 | return SensorInfo.from_stream_handle(self._handle) 427 | 428 | def get_recoder(self, filename, allow_lossy_compression = False): 429 | rec = Recorder(filename) 430 | rec.attach(self, allow_lossy_compression) 431 | return rec 432 | 433 | def start(self): 434 | c_api.oniStreamStart(self._handle) 435 | def stop(self): 436 | c_api.oniStreamStop(self._handle) 437 | 438 | def read_frame(self): 439 | pframe = ctypes.POINTER(c_api.OniFrame)() 440 | c_api.oniStreamReadFrame(self._handle, ctypes.byref(pframe)) 441 | return VideoFrame(pframe) 442 | 443 | def register_new_frame_listener(self, callback): 444 | """callback(stream : VideoStream) -> None""" 445 | if callback in self._callbacks: 446 | raise ValueError("Callback %r already registered" % (callback,)) 447 | def adapter(handle, cookie): 448 | callback(self) 449 | 450 | cb_handle = c_api.OniCallbackHandle() 451 | cbobj = c_api.OniNewFrameCallback(adapter) 452 | self._callbacks[callback] = (cb_handle, adapter, cbobj) 453 | c_api.oniStreamRegisterNewFrameCallback(self._handle, cbobj, None, ctypes.byref(cb_handle)) 454 | 455 | def unregister_new_frame_listener(self, callback): 456 | if callback not in self._callbacks: 457 | return 458 | cb_handle, _, _ = self._callbacks.pop(callback) 459 | c_api.oniStreamUnregisterNewFrameCallback(self._handle, cb_handle) 460 | 461 | def unregister_all_new_frame_listeners(self): 462 | for cb_handle, _, _ in self._callbacks.values(): 463 | c_api.oniStreamUnregisterNewFrameCallback(self._handle, cb_handle) 464 | self._callbacks.clear() 465 | 466 | def get_property(self, property_id, rettype): 467 | ret = rettype() 468 | size = ctypes.c_int(ctypes.sizeof(ret)) 469 | c_api.oniStreamGetProperty(self._handle, property_id, ctypes.byref(ret), ctypes.byref(size)) 470 | return ret 471 | def get_int_property(self, property_id): 472 | return self.get_property(property_id, ctypes.c_int).value 473 | def set_property(self, property_id, obj, size = None): 474 | obj, size = _py_to_ctype_obj(obj) 475 | if size is None: 476 | size = ctypes.sizeof(obj) 477 | c_api.oniStreamSetProperty(self._handle, property_id, ctypes.byref(obj), size) 478 | def is_property_supported(self, property_id): 479 | return bool(c_api.oniStreamIsPropertySupported(self._handle, property_id)) 480 | 481 | def invoke(self, command_id, data, size = None): 482 | data, size = _py_to_ctype_obj(data) 483 | if size is None: 484 | size = ctypes.sizeof(data) 485 | c_api.oniStreamInvoke(self._handle, command_id, data, size) 486 | def is_command_supported(self, command_id): 487 | return bool(c_api.oniStreamIsCommandSupported(self._handle, command_id)) 488 | 489 | def get_video_mode(self): 490 | return self.get_property(c_api.ONI_STREAM_PROPERTY_VIDEO_MODE, c_api.OniVideoMode) 491 | def set_video_mode(self, video_mode): 492 | self.set_property(c_api.ONI_STREAM_PROPERTY_VIDEO_MODE, video_mode) 493 | video_mode = property(get_video_mode, set_video_mode) 494 | 495 | def get_max_pixel_value(self): 496 | return self.get_int_property(c_api.ONI_STREAM_PROPERTY_MAX_VALUE) 497 | def get_min_pixel_value(self): 498 | return self.get_int_property(c_api.ONI_STREAM_PROPERTY_MIN_VALUE) 499 | 500 | def is_cropping_supported(self): 501 | return self.is_property_supported(c_api.ONI_STREAM_PROPERTY_CROPPING) 502 | def get_cropping(self): 503 | return self.get_property(c_api.ONI_STREAM_PROPERTY_CROPPING, c_api.OniCropping) 504 | def set_cropping(self, originX, originY, width, height): 505 | cropping = c_api.OniCropping(enabled = True, originX = originX, originY = originY, width = width, height = height) 506 | self.set_property(c_api.ONI_STREAM_PROPERTY_CROPPING, cropping) 507 | cropping = property(get_property, set_property) 508 | 509 | def reset_cropping(self): 510 | self.set_property(c_api.ONI_STREAM_PROPERTY_CROPPING, c_api.OniCropping(enabled = False)) 511 | 512 | def get_mirroring_enabled(self): 513 | return bool(self.get_property(c_api.ONI_STREAM_PROPERTY_MIRRORING, c_api.OniBool)) 514 | def set_mirroring_enabled(self, enabled): 515 | self.set_property(c_api.ONI_STREAM_PROPERTY_MIRRORING, enabled) 516 | mirroring_enabled = property(get_mirroring_enabled, set_mirroring_enabled) 517 | 518 | def get_horizontal_fov(self): 519 | return self.get_property(c_api.ONI_STREAM_PROPERTY_HORIZONTAL_FOV, ctypes.c_float).value 520 | def get_vertical_fov(self): 521 | return self.get_property(c_api.ONI_STREAM_PROPERTY_VERTICAL_FOV, ctypes.c_float).value 522 | 523 | def get_number_of_frames(self): 524 | return self.get_int_property(c_api.ONI_STREAM_PROPERTY_NUMBER_OF_FRAMES) 525 | 526 | def set_frame_buffers_allocator(self, allocator = None): 527 | if not allocator: 528 | c_api.oniStreamSetFrameBuffersAllocator(self._handle, None, None, None) 529 | else: 530 | return c_api.oniStreamSetFrameBuffersAllocator(self._handle, 531 | allocator._allocate_callback, allocator._free_callback, None) 532 | 533 | class FrameAllocator(object): 534 | def __init__(self): 535 | # keep reference to the methods (they are passed as callbacks) 536 | self._alloc_callback = c_api.OniFrameAllocBufferCallback(self._allocate_frame_buffer_callback) 537 | self._free_callback = c_api.OniFrameFreeBufferCallback(self._free_frame_buffer_callback) 538 | 539 | def allocate_frame_buffer(self, size): 540 | raise NotImplementedError() 541 | def free_frame_buffer(self, pdata): 542 | raise NotImplementedError() 543 | 544 | def _allocate_frame_buffer_callback(self, size, _): 545 | return self.allocate_frame_buffer(size) 546 | def _free_frame_buffer_callback(self, pdata, _): 547 | return self.free_frame_buffer(pdata) 548 | 549 | class Recorder(HandleObject): 550 | def __init__(self, filename): 551 | self.filename = filename 552 | handle = c_api.OniRecorderHandle() 553 | c_api.oniCreateRecorder(filename, ctypes.byref(handle)) 554 | HandleObject.__init__(self, handle) 555 | _registered_recorders.add(self) 556 | 557 | def _close(self): 558 | if is_initialized(): 559 | c_api.oniRecorderDestroy(ctypes.byref(self._handle)) 560 | 561 | def attach(self, stream, allow_lossy_compression = False): 562 | c_api.oniRecorderAttachStream(self._handle, stream._handle, allow_lossy_compression) 563 | def start(self): 564 | c_api.oniRecorderStart(self._handle) 565 | def stop(self): 566 | c_api.oniRecorderStop(self._handle) 567 | 568 | 569 | def convert_world_to_depth(depthStream, worldX, worldY, worldZ): 570 | """const VideoStream& depthStream, float worldX, float worldY, float worldZ""" 571 | depthX = ctypes.c_float() 572 | depthY = ctypes.c_float() 573 | depthZ = ctypes.c_float() 574 | c_api.oniCoordinateConverterWorldToDepth(depthStream._handle, worldX, worldY, worldZ, 575 | ctypes.byref(depthX), ctypes.byref(depthY), ctypes.byref(depthZ)) 576 | return depthX.value, depthY.value, depthZ.value 577 | 578 | def convert_depth_to_world(depthStream, depthX, depthY, depthZ): 579 | """const VideoStream& depthStream, float depthX, float depthY, float depthZ, float* pWorldX, float* pWorldY, float* pWorldZ""" 580 | depthX = ctypes.c_float() 581 | depthY = ctypes.c_float() 582 | depthZ = ctypes.c_float() 583 | c_api.oniCoordinateConverterDepthToWorld(depthStream._handle, depthX, depthY, depthZ, 584 | ctypes.byref(depthX), ctypes.byref(depthY), ctypes.byref(depthZ)) 585 | return depthX.value, depthY.value, depthZ.value 586 | 587 | def convert_depth_to_color(depthStream, colorStream, depthX, depthY, depthZ): 588 | """const VideoStream& depthStream, const VideoStream& colorStream, int depthX, int depthY, DepthPixel depthZ, int* pColorX, int* pColorY""" 589 | colorX = ctypes.c_int() 590 | colorY = ctypes.c_int() 591 | c_api.oniCoordinateConverterDepthToColor(depthStream._handle, colorStream._handle, depthX, depthY, depthZ, 592 | ctypes.byref(colorX), ctypes.byref(colorY)) 593 | return colorX.value, colorY.value 594 | 595 | def get_bytes_per_pixel(format): 596 | c_api.oniFormatBytesPerPixel(format) 597 | 598 | ''' 599 | Inherit from this class to make your own listerner? Fill in the Implement Me functions? 600 | ''' 601 | class DeviceListener(HandleObject): 602 | def __init__(self): 603 | handle = c_api.OniCallbackHandle() 604 | self._callbacks = c_api.OniDeviceCallbacks( 605 | deviceConnected = c_api.OniDeviceInfoCallback(self._on_connected), 606 | deviceDisconnected = c_api.OniDeviceInfoCallback(self._on_disconnected), 607 | deviceStateChanged = c_api.OniDeviceStateCallback(self._on_state_changed), 608 | ) 609 | c_api.oniRegisterDeviceCallbacks(self._callbacks, None, ctypes.byref(handle)) 610 | HandleObject.__init__(self, handle) 611 | _registered_device_listeners.add(self) 612 | self._connected_uris = set() 613 | self._disconnected_uris = set() 614 | 615 | def _close(self): 616 | if is_initialized(): 617 | c_api.oniUnregisterDeviceCallbacks(self._handle) 618 | 619 | def unregister(self): 620 | self.close() 621 | 622 | def _on_connected(self, pdevinfo, _): 623 | devinfo = pdevinfo[0] 624 | self._disconnected_uris.discard(devinfo.uri) 625 | if devinfo.uri in self._connected_uris: 626 | return 627 | self._connected_uris.add(devinfo.uri) 628 | self.on_connected(devinfo) 629 | 630 | def _on_disconnected(self, pdevinfo, _): 631 | devinfo = pdevinfo[0] 632 | self._connected_uris.discard(devinfo.uri) 633 | if devinfo.uri in self._disconnected_uris: 634 | return 635 | self._disconnected_uris.add(devinfo.uri) 636 | self.on_disconnected(devinfo) 637 | 638 | def _on_state_changed(self, pdevinfo, state, _): 639 | self.on_state_changed(pdevinfo[0], state) 640 | 641 | def on_connected(self, devinfo): 642 | """Implement me""" 643 | pass 644 | def on_disconnected(self, devinfo): 645 | """Implement me""" 646 | pass 647 | def on_state_changed(self, devinfo, state): 648 | """Implement me""" 649 | pass 650 | 651 | def get_log_filename(): 652 | buf = ctypes.create_string_buffer(1024) 653 | try: 654 | c_api.oniGetLogFileName(buf, ctypes.sizeof(buf)) 655 | except OpenNIError: 656 | # not logging to file 657 | return None 658 | else: 659 | return buf.value 660 | 661 | def configure_logging(directory = None, severity = None, console = None): 662 | """ 663 | severity: 0 - Verbose; 1 - Info; 2 - Warning; 3 - Error. Default - None 664 | """ 665 | if directory is not None: 666 | c_api.oniSetLogFileOutput(True) 667 | c_api.oniSetLogOutputFolder(directory) 668 | else: 669 | c_api.oniSetLogFileOutput(False) 670 | 671 | if severity is not None: 672 | c_api.oniSetLogMinSeverity(severity) 673 | 674 | if console is not None: 675 | c_api.oniSetLogConsoleOutput(bool(console)) 676 | 677 | 678 | -------------------------------------------------------------------------------- /primesense/openni2.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/leezl/OpenNi-Python/4a0731a1f76011260b0c044922e8d20ccb61adb6/primesense/openni2.pyc -------------------------------------------------------------------------------- /primesense/utils.py: -------------------------------------------------------------------------------- 1 | import ctypes 2 | 3 | 4 | class InitializationError(Exception): 5 | pass 6 | 7 | class OpenNIError(Exception): 8 | def __init__(self, code, message, logfile): 9 | self.code = code 10 | self.logfile = logfile 11 | Exception.__init__(self, code, message, logfile) 12 | 13 | class NiteError(Exception): 14 | def __init__(self, code): 15 | Exception.__init__(self, code) 16 | 17 | def inherit_properties(struct, attrname): 18 | def deco(cls): 19 | for name, _ in struct._fields_: 20 | def getter(self, name = name): 21 | return getattr(getattr(self, attrname), name) 22 | def setter(self, value, name = name): 23 | return setattr(getattr(self, attrname), name, value) 24 | setattr(cls, name, property(getter, setter)) 25 | return cls 26 | return deco 27 | 28 | class ClosedHandleError(Exception): 29 | pass 30 | class ClosedHandle(object): 31 | def __getattr__(self, name): 32 | raise ClosedHandleError("Invalid handle") 33 | def __bool__(self): 34 | return False 35 | __nonzero__ = __bool__ 36 | ClosedHandle = ClosedHandle() 37 | 38 | class HandleObject(object): 39 | __slots__ = ["_handle"] 40 | def __init__(self, handle): 41 | self._handle = handle 42 | def __del__(self): 43 | self.close() 44 | def __enter__(self): 45 | return self 46 | def __exit__(self, t, v, tb): 47 | self.close() 48 | def __bool__(self): 49 | return hasattr(self, "_handle") and bool(self._handle) 50 | __nonzero__ = __bool__ 51 | def close(self): 52 | if hasattr(self, "_handle") and self._handle: 53 | self._close() 54 | self._handle = ClosedHandle 55 | def _close(self): 56 | raise NotImplementedError() 57 | 58 | def _py_to_ctype_obj(obj): 59 | size = None 60 | if isinstance(obj, (int, bool)): 61 | obj = ctypes.c_int(obj) 62 | elif isinstance(obj, float): 63 | obj = ctypes.c_float(obj) 64 | elif isinstance(obj, str): 65 | obj = ctypes.create_string_buffer(obj) 66 | size = len(obj) 67 | return obj, size 68 | 69 | 70 | class CEnumMeta(type(ctypes.c_int)): 71 | def __new__(cls, name, bases, namespace): 72 | cls2 = type(ctypes.c_int).__new__(cls, name, bases, namespace) 73 | if namespace.get("__module__") != __name__: 74 | namespace["_values_"].clear() 75 | for name in namespace["_names_"].keys(): 76 | if name.startswith("_"): 77 | continue 78 | setattr(cls2, name, cls2(namespace[name])) 79 | namespace["_names_"][name] = namespace[name] 80 | namespace["_values_"][namespace[name]] = name 81 | return cls2 82 | 83 | def with_meta(meta, base = object): 84 | return meta("NewBase", (base,), {"__module__" : __name__}) 85 | 86 | class CEnum(with_meta(CEnumMeta, ctypes.c_int)): 87 | _names_ = {} 88 | _values_ = {} 89 | __slots__ = [] 90 | 91 | def __repr__(self): 92 | name = self._values_.get(self.value) 93 | if name is None: 94 | return "%s(%r)" % (self.__class__.__name__, self.val) 95 | else: 96 | return "%s.%s" % (self.__class__.__name__, name) 97 | @classmethod 98 | def from_param(cls, obj): 99 | return int(obj) 100 | @classmethod 101 | def from_name(cls, name): 102 | return cls._names_[name] 103 | @classmethod 104 | def from_value(cls, val): 105 | return getattr(self, cls._values_[val]) 106 | 107 | def __int__(self): 108 | return int(self.value) 109 | def __index__(self): 110 | return int(self) 111 | def __eq__(self, other): 112 | return int(self) == int(other) 113 | def __ne__(self, other): 114 | return int(self) != int(other) 115 | def __gt__(self, other): 116 | return int(self) > int(other) 117 | def __ge__(self, other): 118 | return int(self) >= int(other) 119 | def __lt__(self, other): 120 | return int(self) < int(other) 121 | def __le__(self, other): 122 | return int(self) <= int(other) 123 | def __hash__(self): 124 | return hash(int(self)) 125 | 126 | 127 | class DLLNotLoaded(Exception): 128 | pass 129 | 130 | class UnloadedDLL(object): 131 | __slots__ = [] 132 | def __bool__(self): 133 | return False 134 | __nonzero__ = __bool__ 135 | def __call__(self, *args, **kwargs): 136 | raise DLLNotLoaded("DLL is not loaded") 137 | def __getattr__(self, name): 138 | raise DLLNotLoaded("DLL is not loaded") 139 | 140 | UnloadedDLL = UnloadedDLL() 141 | 142 | 143 | -------------------------------------------------------------------------------- /primesense/utils.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/leezl/OpenNi-Python/4a0731a1f76011260b0c044922e8d20ccb61adb6/primesense/utils.pyc -------------------------------------------------------------------------------- /testPythonOpenni.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Short test file, may get longer 3 | ''' 4 | #from primesense import openni2 5 | #from primesense import _openni2 as c_api 6 | #numpy, for matrix manipulation fo the images 7 | #import numpy as np 8 | #matplotlib, for temporary display to check the images 9 | #import matplotlib.pyplot as plt 10 | #NOTE: Matplotlib will not display depth correctly: uint16 is made into uint8, so overflow cause stripes 11 | 12 | ''' 13 | Starter test program 14 | ''' 15 | 16 | from primesense import openni2 17 | from primesense import _openni2 as c_api 18 | import numpy as np 19 | import matplotlib.pyplot as plt 20 | #alternate to matplotlib: 21 | #import cv2 #opencv, which uses numpy arrays for images anyway: can handle uint16 depth 22 | 23 | #takes frame data, and the type it is and displays the image 24 | #frame_data = frame.get_buffer_as_blah(); thisType = numpy.someType 25 | def print_frame(frame_data, thisType): 26 | #need to know what format to get the buffer in: 27 | # if color pixel type is RGB888, then it must be uint8, 28 | #otherwise it will split the pixels incorrectly 29 | img = np.frombuffer(frame_data, dtype=thisType) 30 | whatisit = img.size 31 | #QVGA is what my camera defaulted to, so: 1 x 240 x 320 32 | #also order was weird (1, 240, 320) not (320, 240, 1) 33 | if whatisit == (320*240*1):#QVGA 34 | #shape it accordingly, that is, 1048576=1024*1024 35 | img.shape = (1, 240, 320)#small chance these may be reversed in certain apis...This order? Really? 36 | #filling rgb channels with duplicates so matplotlib can draw it (expects rgb) 37 | img = np.concatenate((img, img, img), axis=0) 38 | #because the order is so weird, rearrange it (third dimension must be 3 or 4) 39 | img = np.swapaxes(img, 0, 2) 40 | img = np.swapaxes(img, 0, 1) 41 | elif whatisit == (320*240*3): 42 | #color is miraculously in this order 43 | img.shape = (240, 320, 3) 44 | else: 45 | print "Frames are of size: ",img.size 46 | 47 | #images still appear to be reflected, but I don't need them to be correct in that way 48 | print img.shape 49 | #need both of follwoing: plt.imShow adds image to plot 50 | plt.imshow(img) 51 | #plt.show shows all the currently added figures 52 | plt.show() 53 | 54 | openni2.initialize() # can also accept the path of the OpenNI redistribution 55 | 56 | dev = openni2.Device.open_any() 57 | print dev.get_sensor_info(openni2.SENSOR_DEPTH) 58 | 59 | depth_stream = dev.create_depth_stream() 60 | depth_stream.set_video_mode(c_api.OniVideoMode(pixelFormat = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_DEPTH_1_MM, resolutionX = 320, resolutionY = 240, fps = 30)) 61 | depth_stream.start() 62 | frame = depth_stream.read_frame() 63 | frame_data = frame.get_buffer_as_uint16() 64 | print_frame(frame_data, np.uint16) 65 | depth_stream.stop() 66 | 67 | depth_stream.set_video_mode(c_api.OniVideoMode(pixelFormat = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_DEPTH_100_UM, resolutionX = 320, resolutionY = 240, fps = 30)) 68 | depth_stream.start() 69 | frame = depth_stream.read_frame() 70 | frame_data = frame.get_buffer_as_uint16() 71 | print_frame(frame_data, np.uint16) 72 | depth_stream.stop() 73 | 74 | print "Testing Color " 75 | color_stream = dev.create_color_stream() 76 | color_stream.set_video_mode(c_api.OniVideoMode(pixelFormat = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_RGB888, resolutionX = 320, resolutionY = 240, fps = 30)) 77 | color_stream.start() 78 | frame = color_stream.read_frame() 79 | frame_data1 = frame.get_buffer_as_uint8() 80 | print_frame(frame_data1, np.uint8) 81 | color_stream.stop() 82 | 83 | openni2.unload() 84 | -------------------------------------------------------------------------------- /testPythonOpenniFull.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Short test file, may get longer 3 | ''' 4 | from primesense import openni2 5 | from primesense import _openni2 as c_api 6 | #numpy, for matrix manipulation fo the images 7 | import numpy as np 8 | #matplotlib, for temporary display to check the images 9 | import matplotlib.pyplot as plt 10 | #alternate: 11 | #import cv2 #can display uint16 images (correctly load and store depth) 12 | 13 | #takes frame data, and the type it is and displays the image 14 | #frame_data = frame.get_buffer_as_blah(); thisType = numpy.someType 15 | def print_frame(frame_data, thisType): 16 | #need to know what format to get the buffer in: 17 | # if color pixel type is RGB888, then it must be uint8, 18 | #otherwise it will split the pixels incorrectly 19 | img = np.frombuffer(frame_data, dtype=thisType) 20 | whatisit = img.size 21 | #QVGA is what my camera defaulted to, so: 1 x 480 x 640 22 | #also order was weird (1, 480, 640) not (640, 480, 1) 23 | if whatisit == (640*480*1):#QVGA 24 | img.shape = (1, 480, 640) 25 | #This order? Really? ^ 26 | #shape it accordingly 27 | img = np.concatenate((img, img, img), axis=0) 28 | img = np.swapaxes(img, 0, 2) 29 | img = np.swapaxes(img, 0, 1) 30 | elif whatisit == (640*480*3): 31 | img.shape = (480, 640, 3) 32 | #these are, what is it, normalizsed? 33 | else: 34 | print "Frames are of size: ",img.size 35 | #images still appear to be reflected, but I don't need them to be correct in that way 36 | print img.shape 37 | #need both of follwoing: plt.imShow adds image to plot 38 | plt.imshow(img) 39 | #plt.show shows all the currently added figures 40 | #plt.show() 41 | plt.pause(0.1) 42 | plt.draw() 43 | plt.close() 44 | 45 | def print_frames(frame_data, frame_data2, thisType, thisType2): 46 | img = np.frombuffer(frame_data, dtype=thisType) 47 | whatisit = img.size 48 | print "Image size 1 ",whatisit 49 | if whatisit == (640*480*1): #QVGA, default 50 | img.shape = (1, 480, 640) 51 | #This order? Really? ^ 52 | #shape it accordingly 53 | img = np.concatenate((img, img, img), axis=0) 54 | img = np.swapaxes(img, 0, 2) 55 | img = np.swapaxes(img, 0, 1) 56 | elif whatisit == (640*480*3): 57 | img.shape = (480, 640, 3) 58 | #these are, what is it, normalizsed? 59 | else: 60 | print "Frames are of size: ",img.size 61 | print img.shape 62 | plt.imshow(img) 63 | plt.show() 64 | 65 | img1 = np.frombuffer(frame_data2, dtype=thisType2) 66 | whatisit = img1.size 67 | print "Image size 2 ",whatisit 68 | if whatisit == (640*480*1): #QVGA, default 69 | img1.shape = (1, 480, 640) 70 | #This order? Really? ^ 71 | #shape it accordingly 72 | img1 = np.concatenate((img1, img1, img1), axis=0) 73 | img1 = np.swapaxes(img1, 0, 2) 74 | img1 = np.swapaxes(img1, 0, 1) 75 | elif whatisit == (640*480*3): 76 | img1.shape = (480, 640, 3) 77 | img1 = np.swapaxes(img1, 0, 2) 78 | #these are, what is it, normalizsed? 79 | else: 80 | print "Frames are of size: ",img1.size 81 | print img1.shape 82 | plt.imshow(img1) 83 | 84 | plt.show() 85 | 86 | openni2.initialize() # can also accept the path of the OpenNI redistribution 87 | 88 | dev = openni2.Device.open_any() 89 | print dev.get_sensor_info(openni2.SENSOR_DEPTH) 90 | 91 | print "testing depth " 92 | depth_stream = dev.create_depth_stream() 93 | '''depth_stream.set_video_mode(c_api.OniVideoMode(pixelFormat = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_DEPTH_1_MM, resolutionX = 640, resolutionY = 480, fps = 30)) 94 | depth_stream.start() 95 | frame = depth_stream.read_frame() 96 | frame_data = frame.get_buffer_as_uint16() 97 | print_frame(frame_data, np.uint16) 98 | depth_stream.stop()''' 99 | 100 | '''print "Testing depth 2 " 101 | depth_stream.set_video_mode(c_api.OniVideoMode(pixelFormat = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_DEPTH_100_UM, resolutionX = 640, resolutionY = 480, fps = 30)) 102 | depth_stream.start() 103 | frame = depth_stream.read_frame() 104 | frame_data = frame.get_buffer_as_uint16() 105 | print_frame(frame_data, np.uint16) 106 | depth_stream.stop()''' 107 | 108 | print "Testing Color " 109 | color_stream = dev.create_color_stream() 110 | '''color_stream.set_video_mode(c_api.OniVideoMode(pixelFormat = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_RGB888, resolutionX = 640, resolutionY = 480, fps = 30)) 111 | color_stream.start() 112 | frame = color_stream.read_frame() 113 | frame_data1 = frame.get_buffer_as_uint8() 114 | print_frame(frame_data1, np.uint8) 115 | color_stream.stop()''' 116 | 117 | print "what?" 118 | depth_stream.set_video_mode(c_api.OniVideoMode(pixelFormat = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_DEPTH_100_UM, resolutionX = 640, resolutionY = 480, fps = 30)) 119 | color_stream.set_video_mode(c_api.OniVideoMode(pixelFormat = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_RGB888, resolutionX = 640, resolutionY = 480, fps = 30)) 120 | print "Starting Streams" 121 | depth_stream.start() 122 | color_stream.start() 123 | print "Read frames" 124 | frame = depth_stream.read_frame() 125 | frame1 = color_stream.read_frame() 126 | print "Getting Buffer" 127 | frame_data = frame.get_buffer_as_uint16() 128 | frame_data1 = frame1.get_buffer_as_uint8() 129 | print "Printing" 130 | print_frames(frame_data, frame_data1, np.uint16, np.uint8) 131 | depth_stream.stop() 132 | color_stream.stop() 133 | 134 | openni2.unload() 135 | -------------------------------------------------------------------------------- /testPythonOpenniQuarter.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Short test file, may get longer 3 | ''' 4 | from primesense import openni2 5 | from primesense import _openni2 as c_api 6 | #numpy, for matrix manipulation fo the images 7 | import numpy as np 8 | #matplotlib, for temporary display to check the images 9 | import matplotlib.pyplot as plt 10 | 11 | #takes frame data, and the type it is and displays the image 12 | #frame_data = frame.get_buffer_as_blah(); thisType = numpy.someType 13 | def print_frame(frame_data, thisType): 14 | #need to know what format to get the buffer in: 15 | # if color pixel type is RGB888, then it must be uint8, 16 | #otherwise it will split the pixels incorrectly 17 | img = np.frombuffer(frame_data, dtype=thisType) 18 | whatisit = img.size 19 | #QVGA is what my camera defaulted to, so: 1 x 240 x 320 20 | #also order was weird (1, 240, 320) not (320, 240, 1) 21 | if whatisit == (320*240*1):#QVGA 22 | img.shape = (1, 240, 320) 23 | #This order? Really? ^ 24 | #shape it accordingly 25 | img = np.concatenate((img, img, img), axis=0) 26 | img = np.swapaxes(img, 0, 2) 27 | img = np.swapaxes(img, 0, 1) 28 | elif whatisit == (320*240*3): 29 | img.shape = (240, 320, 3) 30 | #these are, what is it, normalizsed? 31 | elif whatisit == (640*480*1): 32 | img.shape = (1, 480, 640) 33 | #This order? Really? ^ 34 | #shape it accordingly 35 | img = np.concatenate((img, img, img), axis=0) 36 | img = np.swapaxes(img, 0, 2) 37 | img = np.swapaxes(img, 0, 1) 38 | elif whatisit == (640*480*3): 39 | img.shape = (480,640,3) 40 | else: 41 | print "Frames are of size: ",img.size 42 | #images still appear to be reflected, but I don't need them to be correct in that way 43 | print img.shape 44 | #need both of follwoing: plt.imShow adds image to plot 45 | plt.imshow(img) 46 | #plt.show shows all the currently added figures 47 | #plt.show() 48 | plt.pause(0.1) 49 | plt.draw() 50 | plt.close() 51 | 52 | openni2.initialize() # can also accept the path of the OpenNI redistribution 53 | 54 | dev = openni2.Device.open_any() 55 | print dev.get_sensor_info(openni2.SENSOR_DEPTH) 56 | 57 | print "testing depth " 58 | depth_stream = dev.create_depth_stream() 59 | #depth_stream.set_video_mode(c_api.OniVideoMode(pixelFormat = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_DEPTH_1_MM, resolutionX = 320, resolutionY = 240, fps = 30)) 60 | depth_stream.set_video_mode(c_api.OniVideoMode(pixelFormat = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_DEPTH_1_MM, resolutionX = 640, resolutionY = 480, fps = 30)) 61 | 62 | print "Testing Color " 63 | color_stream = dev.create_color_stream() 64 | #color_stream.set_video_mode(c_api.OniVideoMode(pixelFormat = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_RGB888, resolutionX = 320, resolutionY = 240, fps = 30)) 65 | color_stream.set_video_mode(c_api.OniVideoMode(pixelFormat = c_api.OniPixelFormat.ONI_PIXEL_FORMAT_RGB888, resolutionX = 640, resolutionY = 480, fps = 30)) 66 | 67 | print "Starting Streams" 68 | depth_stream.start() 69 | color_stream.start() 70 | openni2.wait_for_any_stream([depth_stream]) 71 | openni2.wait_for_any_stream([color_stream]) 72 | print "Reading frames" 73 | frame_depth = depth_stream.read_frame() 74 | frame_color = color_stream.read_frame() 75 | print "Getting Buffers" 76 | frame_data_depth = frame_depth.get_buffer_as_uint16() 77 | frame_data_color = frame_color.get_buffer_as_uint8() 78 | print "Printing" 79 | print_frame(frame_data_depth, np.uint16) 80 | print_frame(frame_data_color, np.uint8) 81 | depth_stream.stop() 82 | color_stream.stop() 83 | 84 | openni2.unload() --------------------------------------------------------------------------------