├── .gitignore ├── LICENSE ├── ObjectTrackDemo.xcodeproj ├── project.pbxproj └── project.xcworkspace │ └── contents.xcworkspacedata ├── ObjectTrackDemo ├── AVCamPreviewView.h ├── AVCamPreviewView.m ├── AppDelegate.h ├── AppDelegate.m ├── Assets.xcassets │ └── AppIcon.appiconset │ │ └── Contents.json ├── Base.lproj │ └── LaunchScreen.storyboard ├── CVPixelBufferUtils.h ├── CVPixelBufferUtils.m ├── Info.plist ├── UIImage+Convert.h ├── UIImage+Convert.mm ├── UIImage+Detect.h ├── UIImage+Detect.m ├── UIImage+Orientation.h ├── UIImage+Orientation.m ├── ViewController.h ├── ViewController.mm └── main.m └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | # Xcode 2 | # 3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore 4 | 5 | ## Build generated 6 | build/ 7 | DerivedData/ 8 | 9 | ## Various settings 10 | *.pbxuser 11 | !default.pbxuser 12 | *.mode1v3 13 | !default.mode1v3 14 | *.mode2v3 15 | !default.mode2v3 16 | *.perspectivev3 17 | !default.perspectivev3 18 | xcuserdata/ 19 | 20 | ## Other 21 | *.moved-aside 22 | *.xccheckout 23 | *.xcscmblueprint 24 | 25 | ## Obj-C/Swift specific 26 | *.hmap 27 | *.ipa 28 | *.dSYM.zip 29 | *.dSYM 30 | 31 | # CocoaPods 32 | # 33 | # We recommend against adding the Pods directory to your .gitignore. However 34 | # you should judge for yourself, the pros and cons are mentioned at: 35 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control 36 | # 37 | # Pods/ 38 | 39 | # Carthage 40 | # 41 | # Add this line if you want to avoid checking in source code from Carthage dependencies. 42 | # Carthage/Checkouts 43 | 44 | Carthage/Build 45 | 46 | # fastlane 47 | # 48 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the 49 | # screenshots whenever they are needed. 50 | # For more information about the recommended setup visit: 51 | # https://docs.fastlane.tools/best-practices/source-control/#source-control 52 | 53 | fastlane/report.xml 54 | fastlane/Preview.html 55 | fastlane/screenshots 56 | fastlane/test_output 57 | 58 | # Code Injection 59 | # 60 | # After new code Injection tools there's a generated folder /iOSInjectionProject 61 | # https://github.com/johnno1962/injectionforxcode 62 | 63 | iOSInjectionProject/ 64 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 baiya 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /ObjectTrackDemo.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 48; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 2D89B25F1F04F9F100E71E29 /* CVPixelBufferUtils.m in Sources */ = {isa = PBXBuildFile; fileRef = 2D89B25E1F04F9F100E71E29 /* CVPixelBufferUtils.m */; }; 11 | 2DF10B901F01FBD1003A9756 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 2DF10B8F1F01FBC9003A9756 /* Accelerate.framework */; }; 12 | 551895731EE97D9000556788 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 551895721EE97D8900556788 /* AVFoundation.framework */; }; 13 | 55B831761EE8EFE800A35064 /* Vision.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 55B831751EE8EFDB00A35064 /* Vision.framework */; }; 14 | 55B908681EE8E99C0085647F /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 55B908671EE8E99C0085647F /* AppDelegate.m */; }; 15 | 55B9086B1EE8E99C0085647F /* ViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 55B9086A1EE8E99C0085647F /* ViewController.mm */; }; 16 | 55B908701EE8E99C0085647F /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 55B9086F1EE8E99C0085647F /* Assets.xcassets */; }; 17 | 55B908731EE8E99C0085647F /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 55B908711EE8E99C0085647F /* LaunchScreen.storyboard */; }; 18 | 55B908761EE8E99C0085647F /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 55B908751EE8E99C0085647F /* main.m */; }; 19 | CC98BBA21EFBAE5000A7D8BA /* UIImage+Orientation.m in Sources */ = {isa = PBXBuildFile; fileRef = CC98BBA11EFBAE5000A7D8BA /* UIImage+Orientation.m */; }; 20 | CC98BBA51EFBAF4800A7D8BA /* UIImage+Detect.m in Sources */ = {isa = PBXBuildFile; fileRef = CC98BBA41EFBAF4800A7D8BA /* UIImage+Detect.m */; }; 21 | CC98BBA81EFBB1E800A7D8BA /* UIImage+Convert.mm in Sources */ = {isa = PBXBuildFile; fileRef = CC98BBA71EFBB1E800A7D8BA /* UIImage+Convert.mm */; }; 22 | CCFED2891EFA53B7006A747D /* AVCamPreviewView.m in Sources */ = {isa = PBXBuildFile; fileRef = CCFED2881EFA53B7006A747D /* AVCamPreviewView.m */; }; 23 | /* End PBXBuildFile section */ 24 | 25 | /* Begin PBXFileReference section */ 26 | 2D89B25D1F04F9F100E71E29 /* CVPixelBufferUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CVPixelBufferUtils.h; sourceTree = ""; }; 27 | 2D89B25E1F04F9F100E71E29 /* CVPixelBufferUtils.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CVPixelBufferUtils.m; sourceTree = ""; }; 28 | 2DDFE5301F00F6E10095DFBC /* frameworks */ = {isa = PBXFileReference; lastKnownFileType = text; name = frameworks; path = TextAndHorizonDetectionDemo/frameworks; sourceTree = ""; }; 29 | 2DDFE5311F00F7010095DFBC /* NextCV.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = NextCV.framework; path = frameworks/NextCV.framework; sourceTree = ""; }; 30 | 2DDFE5321F00F7010095DFBC /* YTHumanDetection.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = YTHumanDetection.framework; path = frameworks/YTHumanDetection.framework; sourceTree = ""; }; 31 | 2DDFE5331F00F7010095DFBC /* rapidnet_ios_32bit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = rapidnet_ios_32bit.framework; path = frameworks/rapidnet_ios_32bit.framework; sourceTree = ""; }; 32 | 2DDFE5341F00F7030095DFBC /* opencv2.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = opencv2.framework; path = frameworks/opencv2.framework; sourceTree = ""; }; 33 | 2DDFE5351F00F7030095DFBC /* rapidnet_ios.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = rapidnet_ios.framework; path = frameworks/rapidnet_ios.framework; sourceTree = ""; }; 34 | 2DDFE5361F00F7600095DFBC /* rapidnet_ios_32bit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = rapidnet_ios_32bit.framework; sourceTree = ""; }; 35 | 2DDFE5371F00F7600095DFBC /* NextCV.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = NextCV.framework; sourceTree = ""; }; 36 | 2DDFE5381F00F7600095DFBC /* opencv2.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = opencv2.framework; sourceTree = ""; }; 37 | 2DDFE5391F00F7600095DFBC /* rapidnet_ios.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = rapidnet_ios.framework; sourceTree = ""; }; 38 | 2DDFE53E1F00F7710095DFBC /* YTHumanDetection.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = YTHumanDetection.framework; sourceTree = ""; }; 39 | 2DF10B661F013C4F003A9756 /* YTHumanDetection.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = YTHumanDetection.framework; path = TextAndHorizonDetectionDemo/frameworks/YTHumanDetection.framework; sourceTree = ""; }; 40 | 2DF10B691F013C96003A9756 /* rapidnet_ios_32bit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = rapidnet_ios_32bit.framework; path = TextAndHorizonDetectionDemo/frameworks/rapidnet_ios_32bit.framework; sourceTree = ""; }; 41 | 2DF10B6A1F013C96003A9756 /* NextCV.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = NextCV.framework; path = TextAndHorizonDetectionDemo/frameworks/NextCV.framework; sourceTree = ""; }; 42 | 2DF10B6B1F013C97003A9756 /* opencv2.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = opencv2.framework; path = TextAndHorizonDetectionDemo/frameworks/opencv2.framework; sourceTree = ""; }; 43 | 2DF10B6C1F013C99003A9756 /* rapidnet_ios.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = rapidnet_ios.framework; path = TextAndHorizonDetectionDemo/frameworks/rapidnet_ios.framework; sourceTree = ""; }; 44 | 2DF10B8F1F01FBC9003A9756 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; }; 45 | 551895721EE97D8900556788 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; 46 | 55B831751EE8EFDB00A35064 /* Vision.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Vision.framework; path = System/Library/Frameworks/Vision.framework; sourceTree = SDKROOT; }; 47 | 55B908631EE8E99C0085647F /* ObjectTrackDemo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = ObjectTrackDemo.app; sourceTree = BUILT_PRODUCTS_DIR; }; 48 | 55B908661EE8E99C0085647F /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 49 | 55B908671EE8E99C0085647F /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 50 | 55B908691EE8E99C0085647F /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; }; 51 | 55B9086A1EE8E99C0085647F /* ViewController.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = ViewController.mm; sourceTree = ""; }; 52 | 55B9086F1EE8E99C0085647F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 53 | 55B908721EE8E99C0085647F /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 54 | 55B908741EE8E99C0085647F /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 55 | 55B908751EE8E99C0085647F /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 56 | CC98BBA01EFBAE5000A7D8BA /* UIImage+Orientation.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "UIImage+Orientation.h"; sourceTree = ""; }; 57 | CC98BBA11EFBAE5000A7D8BA /* UIImage+Orientation.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = "UIImage+Orientation.m"; sourceTree = ""; }; 58 | CC98BBA31EFBAF4800A7D8BA /* UIImage+Detect.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "UIImage+Detect.h"; sourceTree = ""; }; 59 | CC98BBA41EFBAF4800A7D8BA /* UIImage+Detect.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = "UIImage+Detect.m"; sourceTree = ""; }; 60 | CC98BBA61EFBB1E800A7D8BA /* UIImage+Convert.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "UIImage+Convert.h"; sourceTree = ""; }; 61 | CC98BBA71EFBB1E800A7D8BA /* UIImage+Convert.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = "UIImage+Convert.mm"; sourceTree = ""; }; 62 | CCFED2871EFA53B7006A747D /* AVCamPreviewView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AVCamPreviewView.h; sourceTree = ""; }; 63 | CCFED2881EFA53B7006A747D /* AVCamPreviewView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AVCamPreviewView.m; sourceTree = ""; }; 64 | /* End PBXFileReference section */ 65 | 66 | /* Begin PBXFrameworksBuildPhase section */ 67 | 55B908601EE8E99C0085647F /* Frameworks */ = { 68 | isa = PBXFrameworksBuildPhase; 69 | buildActionMask = 2147483647; 70 | files = ( 71 | 2DF10B901F01FBD1003A9756 /* Accelerate.framework in Frameworks */, 72 | 551895731EE97D9000556788 /* AVFoundation.framework in Frameworks */, 73 | 55B831761EE8EFE800A35064 /* Vision.framework in Frameworks */, 74 | ); 75 | runOnlyForDeploymentPostprocessing = 0; 76 | }; 77 | /* End PBXFrameworksBuildPhase section */ 78 | 79 | /* Begin PBXGroup section */ 80 | 55B831741EE8EFDB00A35064 /* Frameworks */ = { 81 | isa = PBXGroup; 82 | children = ( 83 | 2DF10B8F1F01FBC9003A9756 /* Accelerate.framework */, 84 | 2DF10B6C1F013C99003A9756 /* rapidnet_ios.framework */, 85 | 2DDFE53E1F00F7710095DFBC /* YTHumanDetection.framework */, 86 | 2DF10B661F013C4F003A9756 /* YTHumanDetection.framework */, 87 | 2DDFE5311F00F7010095DFBC /* NextCV.framework */, 88 | 2DDFE5371F00F7600095DFBC /* NextCV.framework */, 89 | 2DF10B6A1F013C96003A9756 /* NextCV.framework */, 90 | 2DDFE5341F00F7030095DFBC /* opencv2.framework */, 91 | 2DDFE5381F00F7600095DFBC /* opencv2.framework */, 92 | 2DF10B6B1F013C97003A9756 /* opencv2.framework */, 93 | 2DDFE5331F00F7010095DFBC /* rapidnet_ios_32bit.framework */, 94 | 2DDFE5361F00F7600095DFBC /* rapidnet_ios_32bit.framework */, 95 | 2DF10B691F013C96003A9756 /* rapidnet_ios_32bit.framework */, 96 | 2DDFE5351F00F7030095DFBC /* rapidnet_ios.framework */, 97 | 2DDFE5391F00F7600095DFBC /* rapidnet_ios.framework */, 98 | 2DDFE5321F00F7010095DFBC /* YTHumanDetection.framework */, 99 | 2DDFE5301F00F6E10095DFBC /* frameworks */, 100 | 551895721EE97D8900556788 /* AVFoundation.framework */, 101 | 55B831751EE8EFDB00A35064 /* Vision.framework */, 102 | ); 103 | name = Frameworks; 104 | sourceTree = ""; 105 | }; 106 | 55B9085A1EE8E99C0085647F = { 107 | isa = PBXGroup; 108 | children = ( 109 | 55B908651EE8E99C0085647F /* ObjectTrackDemo */, 110 | 55B908641EE8E99C0085647F /* Products */, 111 | 55B831741EE8EFDB00A35064 /* Frameworks */, 112 | ); 113 | sourceTree = ""; 114 | }; 115 | 55B908641EE8E99C0085647F /* Products */ = { 116 | isa = PBXGroup; 117 | children = ( 118 | 55B908631EE8E99C0085647F /* ObjectTrackDemo.app */, 119 | ); 120 | name = Products; 121 | sourceTree = ""; 122 | }; 123 | 55B908651EE8E99C0085647F /* ObjectTrackDemo */ = { 124 | isa = PBXGroup; 125 | children = ( 126 | 55B908661EE8E99C0085647F /* AppDelegate.h */, 127 | 55B908671EE8E99C0085647F /* AppDelegate.m */, 128 | 55B908691EE8E99C0085647F /* ViewController.h */, 129 | 55B9086A1EE8E99C0085647F /* ViewController.mm */, 130 | 55B9086F1EE8E99C0085647F /* Assets.xcassets */, 131 | 55B908711EE8E99C0085647F /* LaunchScreen.storyboard */, 132 | 55B908741EE8E99C0085647F /* Info.plist */, 133 | 55B908751EE8E99C0085647F /* main.m */, 134 | CCFED2871EFA53B7006A747D /* AVCamPreviewView.h */, 135 | CCFED2881EFA53B7006A747D /* AVCamPreviewView.m */, 136 | CC98BBA01EFBAE5000A7D8BA /* UIImage+Orientation.h */, 137 | CC98BBA11EFBAE5000A7D8BA /* UIImage+Orientation.m */, 138 | CC98BBA31EFBAF4800A7D8BA /* UIImage+Detect.h */, 139 | CC98BBA41EFBAF4800A7D8BA /* UIImage+Detect.m */, 140 | CC98BBA61EFBB1E800A7D8BA /* UIImage+Convert.h */, 141 | CC98BBA71EFBB1E800A7D8BA /* UIImage+Convert.mm */, 142 | 2D89B25D1F04F9F100E71E29 /* CVPixelBufferUtils.h */, 143 | 2D89B25E1F04F9F100E71E29 /* CVPixelBufferUtils.m */, 144 | ); 145 | path = ObjectTrackDemo; 146 | sourceTree = ""; 147 | }; 148 | /* End PBXGroup section */ 149 | 150 | /* Begin PBXNativeTarget section */ 151 | 55B908621EE8E99C0085647F /* ObjectTrackDemo */ = { 152 | isa = PBXNativeTarget; 153 | buildConfigurationList = 55B908791EE8E99C0085647F /* Build configuration list for PBXNativeTarget "ObjectTrackDemo" */; 154 | buildPhases = ( 155 | 55B9085F1EE8E99C0085647F /* Sources */, 156 | 55B908601EE8E99C0085647F /* Frameworks */, 157 | 55B908611EE8E99C0085647F /* Resources */, 158 | ); 159 | buildRules = ( 160 | ); 161 | dependencies = ( 162 | ); 163 | name = ObjectTrackDemo; 164 | productName = TextAndHorizonDetectionDemo; 165 | productReference = 55B908631EE8E99C0085647F /* ObjectTrackDemo.app */; 166 | productType = "com.apple.product-type.application"; 167 | }; 168 | /* End PBXNativeTarget section */ 169 | 170 | /* Begin PBXProject section */ 171 | 55B9085B1EE8E99C0085647F /* Project object */ = { 172 | isa = PBXProject; 173 | attributes = { 174 | LastUpgradeCheck = 0900; 175 | ORGANIZATIONNAME = Maxcw; 176 | TargetAttributes = { 177 | 55B908621EE8E99C0085647F = { 178 | CreatedOnToolsVersion = 9.0; 179 | ProvisioningStyle = Automatic; 180 | }; 181 | }; 182 | }; 183 | buildConfigurationList = 55B9085E1EE8E99C0085647F /* Build configuration list for PBXProject "ObjectTrackDemo" */; 184 | compatibilityVersion = "Xcode 8.0"; 185 | developmentRegion = en; 186 | hasScannedForEncodings = 0; 187 | knownRegions = ( 188 | en, 189 | Base, 190 | ); 191 | mainGroup = 55B9085A1EE8E99C0085647F; 192 | productRefGroup = 55B908641EE8E99C0085647F /* Products */; 193 | projectDirPath = ""; 194 | projectRoot = ""; 195 | targets = ( 196 | 55B908621EE8E99C0085647F /* ObjectTrackDemo */, 197 | ); 198 | }; 199 | /* End PBXProject section */ 200 | 201 | /* Begin PBXResourcesBuildPhase section */ 202 | 55B908611EE8E99C0085647F /* Resources */ = { 203 | isa = PBXResourcesBuildPhase; 204 | buildActionMask = 2147483647; 205 | files = ( 206 | 55B908731EE8E99C0085647F /* LaunchScreen.storyboard in Resources */, 207 | 55B908701EE8E99C0085647F /* Assets.xcassets in Resources */, 208 | ); 209 | runOnlyForDeploymentPostprocessing = 0; 210 | }; 211 | /* End PBXResourcesBuildPhase section */ 212 | 213 | /* Begin PBXSourcesBuildPhase section */ 214 | 55B9085F1EE8E99C0085647F /* Sources */ = { 215 | isa = PBXSourcesBuildPhase; 216 | buildActionMask = 2147483647; 217 | files = ( 218 | CC98BBA51EFBAF4800A7D8BA /* UIImage+Detect.m in Sources */, 219 | CC98BBA21EFBAE5000A7D8BA /* UIImage+Orientation.m in Sources */, 220 | 55B9086B1EE8E99C0085647F /* ViewController.mm in Sources */, 221 | 55B908761EE8E99C0085647F /* main.m in Sources */, 222 | CCFED2891EFA53B7006A747D /* AVCamPreviewView.m in Sources */, 223 | 2D89B25F1F04F9F100E71E29 /* CVPixelBufferUtils.m in Sources */, 224 | CC98BBA81EFBB1E800A7D8BA /* UIImage+Convert.mm in Sources */, 225 | 55B908681EE8E99C0085647F /* AppDelegate.m in Sources */, 226 | ); 227 | runOnlyForDeploymentPostprocessing = 0; 228 | }; 229 | /* End PBXSourcesBuildPhase section */ 230 | 231 | /* Begin PBXVariantGroup section */ 232 | 55B908711EE8E99C0085647F /* LaunchScreen.storyboard */ = { 233 | isa = PBXVariantGroup; 234 | children = ( 235 | 55B908721EE8E99C0085647F /* Base */, 236 | ); 237 | name = LaunchScreen.storyboard; 238 | sourceTree = ""; 239 | }; 240 | /* End PBXVariantGroup section */ 241 | 242 | /* Begin XCBuildConfiguration section */ 243 | 55B908771EE8E99C0085647F /* Debug */ = { 244 | isa = XCBuildConfiguration; 245 | buildSettings = { 246 | ALWAYS_SEARCH_USER_PATHS = NO; 247 | CLANG_ANALYZER_NONNULL = YES; 248 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 249 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 250 | CLANG_CXX_LIBRARY = "libc++"; 251 | CLANG_ENABLE_MODULES = YES; 252 | CLANG_ENABLE_OBJC_ARC = YES; 253 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 254 | CLANG_WARN_BOOL_CONVERSION = YES; 255 | CLANG_WARN_COMMA = YES; 256 | CLANG_WARN_CONSTANT_CONVERSION = YES; 257 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 258 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 259 | CLANG_WARN_EMPTY_BODY = YES; 260 | CLANG_WARN_ENUM_CONVERSION = YES; 261 | CLANG_WARN_INFINITE_RECURSION = YES; 262 | CLANG_WARN_INT_CONVERSION = YES; 263 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 264 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 265 | CLANG_WARN_STRICT_PROTOTYPES = YES; 266 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 267 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 268 | CLANG_WARN_UNREACHABLE_CODE = YES; 269 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 270 | CODE_SIGN_IDENTITY = "iPhone Developer"; 271 | COPY_PHASE_STRIP = NO; 272 | DEBUG_INFORMATION_FORMAT = dwarf; 273 | ENABLE_BITCODE = NO; 274 | ENABLE_STRICT_OBJC_MSGSEND = YES; 275 | ENABLE_TESTABILITY = YES; 276 | GCC_C_LANGUAGE_STANDARD = gnu11; 277 | GCC_DYNAMIC_NO_PIC = NO; 278 | GCC_NO_COMMON_BLOCKS = YES; 279 | GCC_OPTIMIZATION_LEVEL = 0; 280 | GCC_PREPROCESSOR_DEFINITIONS = ( 281 | "DEBUG=1", 282 | "$(inherited)", 283 | ); 284 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 285 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 286 | GCC_WARN_UNDECLARED_SELECTOR = YES; 287 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 288 | GCC_WARN_UNUSED_FUNCTION = YES; 289 | GCC_WARN_UNUSED_VARIABLE = YES; 290 | IPHONEOS_DEPLOYMENT_TARGET = 11.0; 291 | MTL_ENABLE_DEBUG_INFO = YES; 292 | ONLY_ACTIVE_ARCH = YES; 293 | OTHER_LDFLAGS = ""; 294 | SDKROOT = iphoneos; 295 | }; 296 | name = Debug; 297 | }; 298 | 55B908781EE8E99C0085647F /* Release */ = { 299 | isa = XCBuildConfiguration; 300 | buildSettings = { 301 | ALWAYS_SEARCH_USER_PATHS = NO; 302 | CLANG_ANALYZER_NONNULL = YES; 303 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 304 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 305 | CLANG_CXX_LIBRARY = "libc++"; 306 | CLANG_ENABLE_MODULES = YES; 307 | CLANG_ENABLE_OBJC_ARC = YES; 308 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 309 | CLANG_WARN_BOOL_CONVERSION = YES; 310 | CLANG_WARN_COMMA = YES; 311 | CLANG_WARN_CONSTANT_CONVERSION = YES; 312 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 313 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 314 | CLANG_WARN_EMPTY_BODY = YES; 315 | CLANG_WARN_ENUM_CONVERSION = YES; 316 | CLANG_WARN_INFINITE_RECURSION = YES; 317 | CLANG_WARN_INT_CONVERSION = YES; 318 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 319 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 320 | CLANG_WARN_STRICT_PROTOTYPES = YES; 321 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 322 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 323 | CLANG_WARN_UNREACHABLE_CODE = YES; 324 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 325 | CODE_SIGN_IDENTITY = "iPhone Developer"; 326 | COPY_PHASE_STRIP = NO; 327 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 328 | ENABLE_BITCODE = NO; 329 | ENABLE_NS_ASSERTIONS = NO; 330 | ENABLE_STRICT_OBJC_MSGSEND = YES; 331 | GCC_C_LANGUAGE_STANDARD = gnu11; 332 | GCC_NO_COMMON_BLOCKS = YES; 333 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 334 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 335 | GCC_WARN_UNDECLARED_SELECTOR = YES; 336 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 337 | GCC_WARN_UNUSED_FUNCTION = YES; 338 | GCC_WARN_UNUSED_VARIABLE = YES; 339 | IPHONEOS_DEPLOYMENT_TARGET = 11.0; 340 | MTL_ENABLE_DEBUG_INFO = NO; 341 | OTHER_LDFLAGS = ""; 342 | SDKROOT = iphoneos; 343 | VALIDATE_PRODUCT = YES; 344 | }; 345 | name = Release; 346 | }; 347 | 55B9087A1EE8E99C0085647F /* Debug */ = { 348 | isa = XCBuildConfiguration; 349 | buildSettings = { 350 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 351 | CLANG_CXX_LIBRARY = "libc++"; 352 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 353 | CODE_SIGN_STYLE = Automatic; 354 | DEVELOPMENT_TEAM = JS6V648H48; 355 | FRAMEWORK_SEARCH_PATHS = ( 356 | "$(inherited)", 357 | "$(PROJECT_DIR)", 358 | "$(PROJECT_DIR)/TextAndHorizonDetectionDemo/frameworks", 359 | ); 360 | INFOPLIST_FILE = "$(SRCROOT)/ObjectTrackDemo/Info.plist"; 361 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 362 | PRODUCT_BUNDLE_IDENTIFIER = com.test.ObjectTrackDemo; 363 | PRODUCT_NAME = "$(TARGET_NAME)"; 364 | PROVISIONING_PROFILE_SPECIFIER = ""; 365 | TARGETED_DEVICE_FAMILY = "1,2"; 366 | VALID_ARCHS = arm64; 367 | }; 368 | name = Debug; 369 | }; 370 | 55B9087B1EE8E99C0085647F /* Release */ = { 371 | isa = XCBuildConfiguration; 372 | buildSettings = { 373 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 374 | CLANG_CXX_LIBRARY = "libc++"; 375 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 376 | CODE_SIGN_STYLE = Automatic; 377 | DEVELOPMENT_TEAM = JS6V648H48; 378 | FRAMEWORK_SEARCH_PATHS = ( 379 | "$(inherited)", 380 | "$(PROJECT_DIR)", 381 | "$(PROJECT_DIR)/TextAndHorizonDetectionDemo/frameworks", 382 | ); 383 | INFOPLIST_FILE = "$(SRCROOT)/ObjectTrackDemo/Info.plist"; 384 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 385 | PRODUCT_BUNDLE_IDENTIFIER = com.test.ObjectTrackDemo; 386 | PRODUCT_NAME = "$(TARGET_NAME)"; 387 | PROVISIONING_PROFILE_SPECIFIER = ""; 388 | TARGETED_DEVICE_FAMILY = "1,2"; 389 | VALID_ARCHS = arm64; 390 | }; 391 | name = Release; 392 | }; 393 | /* End XCBuildConfiguration section */ 394 | 395 | /* Begin XCConfigurationList section */ 396 | 55B9085E1EE8E99C0085647F /* Build configuration list for PBXProject "ObjectTrackDemo" */ = { 397 | isa = XCConfigurationList; 398 | buildConfigurations = ( 399 | 55B908771EE8E99C0085647F /* Debug */, 400 | 55B908781EE8E99C0085647F /* Release */, 401 | ); 402 | defaultConfigurationIsVisible = 0; 403 | defaultConfigurationName = Release; 404 | }; 405 | 55B908791EE8E99C0085647F /* Build configuration list for PBXNativeTarget "ObjectTrackDemo" */ = { 406 | isa = XCConfigurationList; 407 | buildConfigurations = ( 408 | 55B9087A1EE8E99C0085647F /* Debug */, 409 | 55B9087B1EE8E99C0085647F /* Release */, 410 | ); 411 | defaultConfigurationIsVisible = 0; 412 | defaultConfigurationName = Release; 413 | }; 414 | /* End XCConfigurationList section */ 415 | }; 416 | rootObject = 55B9085B1EE8E99C0085647F /* Project object */; 417 | } 418 | -------------------------------------------------------------------------------- /ObjectTrackDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /ObjectTrackDemo/AVCamPreviewView.h: -------------------------------------------------------------------------------- 1 | // 2 | // AVCamPreviewView.h 3 | // ObjectTrackDemo 4 | // 5 | // Created by baiya on 2017/6/21. 6 | // Copyright © 2017年 Maxcw. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | @interface AVCamPreviewView : UIView 13 | 14 | @property (nonatomic, readonly) AVCaptureVideoPreviewLayer *videoPreviewLayer; 15 | 16 | @property (nonatomic) AVCaptureSession *session; 17 | 18 | @end 19 | -------------------------------------------------------------------------------- /ObjectTrackDemo/AVCamPreviewView.m: -------------------------------------------------------------------------------- 1 | // 2 | // AVCamPreviewView.m 3 | // ObjectTrackDemo 4 | // 5 | // Created by baiya on 2017/6/21. 6 | // Copyright © 2017年 Maxcw. All rights reserved. 7 | // 8 | 9 | #import "AVCamPreviewView.h" 10 | #import 11 | 12 | @implementation AVCamPreviewView 13 | 14 | 15 | + (Class)layerClass 16 | { 17 | return [AVCaptureVideoPreviewLayer class]; 18 | } 19 | 20 | - (AVCaptureVideoPreviewLayer *)videoPreviewLayer 21 | { 22 | return (AVCaptureVideoPreviewLayer *)self.layer; 23 | } 24 | 25 | - (AVCaptureSession *)session 26 | { 27 | return self.videoPreviewLayer.session; 28 | } 29 | 30 | - (void)setSession:(AVCaptureSession *)session 31 | { 32 | self.videoPreviewLayer.session = session; 33 | } 34 | 35 | 36 | @end 37 | -------------------------------------------------------------------------------- /ObjectTrackDemo/AppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.h 3 | // ObjectTrackDemo 4 | // 5 | // Created by baiya on 2017/6/8. 6 | // Copyright © 2017年 Maxcw. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface AppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | 16 | @end 17 | 18 | -------------------------------------------------------------------------------- /ObjectTrackDemo/AppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.m 3 | // ObjectTrackDemo 4 | // 5 | // Created by baiya on 2017/6/8. 6 | // Copyright © 2017年 Maxcw. All rights reserved. 7 | // 8 | 9 | #import "AppDelegate.h" 10 | #import "ViewController.h" 11 | 12 | @interface AppDelegate () 13 | 14 | @end 15 | 16 | @implementation AppDelegate 17 | 18 | 19 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 20 | // Override point for customization after application launch. 21 | self.window = [[UIWindow alloc] initWithFrame:[UIScreen mainScreen].bounds]; 22 | self.window.backgroundColor = [UIColor whiteColor]; 23 | 24 | ViewController* tempVc = [ViewController new]; 25 | UINavigationController* tempNv = [[UINavigationController alloc] initWithRootViewController:tempVc]; 26 | 27 | [self.window setRootViewController:tempNv]; 28 | [self.window makeKeyAndVisible]; 29 | 30 | return YES; 31 | } 32 | 33 | 34 | - (void)applicationWillResignActive:(UIApplication *)application { 35 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 36 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 37 | } 38 | 39 | 40 | - (void)applicationDidEnterBackground:(UIApplication *)application { 41 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 42 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 43 | } 44 | 45 | 46 | - (void)applicationWillEnterForeground:(UIApplication *)application { 47 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 48 | } 49 | 50 | 51 | - (void)applicationDidBecomeActive:(UIApplication *)application { 52 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 53 | } 54 | 55 | 56 | - (void)applicationWillTerminate:(UIApplication *)application { 57 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 58 | } 59 | 60 | 61 | @end 62 | -------------------------------------------------------------------------------- /ObjectTrackDemo/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "20x20", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "20x20", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "29x29", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "29x29", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "40x40", 66 | "scale" : "1x" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "size" : "40x40", 71 | "scale" : "2x" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "size" : "76x76", 76 | "scale" : "1x" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "size" : "76x76", 81 | "scale" : "2x" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "size" : "83.5x83.5", 86 | "scale" : "2x" 87 | }, 88 | { 89 | "idiom" : "ios-marketing", 90 | "size" : "1024x1024", 91 | "scale" : "1x" 92 | } 93 | ], 94 | "info" : { 95 | "version" : 1, 96 | "author" : "xcode" 97 | } 98 | } -------------------------------------------------------------------------------- /ObjectTrackDemo/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /ObjectTrackDemo/CVPixelBufferUtils.h: -------------------------------------------------------------------------------- 1 | // 2 | // CVPixelBufferUtils.h 3 | // ObjectTrackDemo 4 | // 5 | // Created by baiya on 2017/6/29. 6 | // Copyright © 2017年 Maxcw. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | #import 12 | //#import 13 | 14 | @interface CVPixelBufferUtils : NSObject 15 | 16 | // 将CVPixelBufferRef 转化为cv::Mat,传参的buffer格式为BGRA,不过其他四通道格式应该也适用 17 | //+ (cv::Mat)matFromPixelBuffer:(CVPixelBufferRef)buffer; 18 | 19 | /* 20 | * 注意旋转SampleBuffer 为argb或者bgra格式,其他格式可能不支持 21 | * rotationConstant: 22 | * 0 -- rotate 0 degrees (simply copy the data from src to dest) 23 | * 1 -- rotate 90 degrees counterclockwise 24 | * 2 -- rotate 180 degress 25 | * 3 -- rotate 270 degrees counterclockwise 26 | */ 27 | + (CVPixelBufferRef)rotateBuffer:(CMSampleBufferRef)sampleBuffer withConstant:(uint8_t)rotationConstant; 28 | 29 | @end 30 | -------------------------------------------------------------------------------- /ObjectTrackDemo/CVPixelBufferUtils.m: -------------------------------------------------------------------------------- 1 | // 2 | // CVPixelBufferUtils.m 3 | // ObjectTrackDemo 4 | // 5 | // Created by baiya on 2017/6/29. 6 | // Copyright © 2017年 Maxcw. All rights reserved. 7 | // 8 | 9 | #import "CVPixelBufferUtils.h" 10 | 11 | @implementation CVPixelBufferUtils 12 | 13 | 14 | // 将CVPixelBufferRef 转化为cv::Mat,传参的buffer格式为BGRA,不过其他四通道格式应该也适用 15 | //+ (cv::Mat)matFromPixelBuffer:(CVPixelBufferRef)buffer 16 | //{ 17 | // CVPixelBufferLockBaseAddress(buffer, 0); 18 | // 19 | // unsigned char *base = (unsigned char *)CVPixelBufferGetBaseAddress( buffer ); 20 | // size_t width = CVPixelBufferGetWidth( buffer ); 21 | // size_t height = CVPixelBufferGetHeight( buffer ); 22 | // size_t stride = CVPixelBufferGetBytesPerRow( buffer ); 23 | // OSType type = CVPixelBufferGetPixelFormatType(buffer); 24 | // size_t extendedWidth = stride / 4; // each pixel is 4 bytes/32 bits 25 | // cv::Mat bgraImage = cv::Mat( (int)height, (int)extendedWidth, CV_8UC4, base ); 26 | // 27 | // CVPixelBufferUnlockBaseAddress(buffer,0); 28 | // 29 | // return bgraImage; 30 | //} 31 | 32 | 33 | /* 34 | * 注意旋转SampleBuffer 为argb或者bgra格式,其他格式可能不支持 35 | * rotationConstant: 36 | * 0 -- rotate 0 degrees (simply copy the data from src to dest) 37 | * 1 -- rotate 90 degrees counterclockwise 38 | * 2 -- rotate 180 degress 39 | * 3 -- rotate 270 degrees counterclockwise 40 | */ 41 | + (CVPixelBufferRef)rotateBuffer:(CMSampleBufferRef)sampleBuffer withConstant:(uint8_t)rotationConstant 42 | { 43 | CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 44 | CVPixelBufferLockBaseAddress(imageBuffer, 0); 45 | 46 | OSType pixelFormatType = CVPixelBufferGetPixelFormatType(imageBuffer); 47 | 48 | // NSAssert(pixelFormatType == kCVPixelFormatType_32ARGB, @"Code works only with 32ARGB format. Test/adapt for other formats!"); 49 | 50 | const size_t kAlignment_32ARGB = 32; 51 | const size_t kBytesPerPixel_32ARGB = 4; 52 | 53 | size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 54 | size_t width = CVPixelBufferGetWidth(imageBuffer); 55 | size_t height = CVPixelBufferGetHeight(imageBuffer); 56 | 57 | BOOL rotatePerpendicular = (rotationConstant == 1) || (rotationConstant == 3); // Use enumeration values here 58 | const size_t outWidth = rotatePerpendicular ? height : width; 59 | const size_t outHeight = rotatePerpendicular ? width : height; 60 | 61 | size_t bytesPerRowOut = kBytesPerPixel_32ARGB * ceil(outWidth * 1.0 / kAlignment_32ARGB) * kAlignment_32ARGB; 62 | 63 | const size_t dstSize = bytesPerRowOut * outHeight * sizeof(unsigned char); 64 | 65 | void *srcBuff = CVPixelBufferGetBaseAddress(imageBuffer); 66 | 67 | unsigned char *dstBuff = (unsigned char *)malloc(dstSize); 68 | 69 | vImage_Buffer inbuff = {srcBuff, height, width, bytesPerRow}; 70 | vImage_Buffer outbuff = {dstBuff, outHeight, outWidth, bytesPerRowOut}; 71 | 72 | uint8_t bgColor[4] = {0, 0, 0, 0}; 73 | 74 | vImage_Error err = vImageRotate90_ARGB8888(&inbuff, &outbuff, rotationConstant, bgColor, 0); 75 | if (err != kvImageNoError) 76 | { 77 | NSLog(@"%ld", err); 78 | } 79 | 80 | CVPixelBufferUnlockBaseAddress(imageBuffer, 0); 81 | 82 | CVPixelBufferRef rotatedBuffer = NULL; 83 | CVPixelBufferCreateWithBytes(NULL, 84 | outWidth, 85 | outHeight, 86 | pixelFormatType, 87 | outbuff.data, 88 | bytesPerRowOut, 89 | freePixelBufferDataAfterRelease, 90 | NULL, 91 | NULL, 92 | &rotatedBuffer); 93 | 94 | return rotatedBuffer; 95 | } 96 | 97 | void freePixelBufferDataAfterRelease(void *releaseRefCon, const void *baseAddress) 98 | { 99 | // Free the memory we malloced for the vImage rotation 100 | free((void *)baseAddress); 101 | } 102 | 103 | @end 104 | -------------------------------------------------------------------------------- /ObjectTrackDemo/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | NSCameraUsageDescription 24 | 申请相机权限 25 | UILaunchStoryboardName 26 | LaunchScreen 27 | UIRequiredDeviceCapabilities 28 | 29 | armv7 30 | 31 | UISupportedInterfaceOrientations 32 | 33 | UIInterfaceOrientationPortrait 34 | UIInterfaceOrientationLandscapeLeft 35 | UIInterfaceOrientationLandscapeRight 36 | 37 | UISupportedInterfaceOrientations~ipad 38 | 39 | UIInterfaceOrientationPortrait 40 | UIInterfaceOrientationPortraitUpsideDown 41 | UIInterfaceOrientationLandscapeLeft 42 | UIInterfaceOrientationLandscapeRight 43 | 44 | pri 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /ObjectTrackDemo/UIImage+Convert.h: -------------------------------------------------------------------------------- 1 | // 2 | // UIImage+Convert.h 3 | // ObjectTrackDemo 4 | // 5 | // Created by baiya on 2017/6/22. 6 | // Copyright © 2017年 Maxcw. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface UIImage (Convert) 12 | 13 | + (UIImage*)imageWithImageBuffer:(CVImageBufferRef)imageBuffer; 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /ObjectTrackDemo/UIImage+Convert.mm: -------------------------------------------------------------------------------- 1 | // 2 | // UIImage+Convert.m 3 | // ObjectTrackDemo 4 | // 5 | // Created by baiya on 2017/6/22. 6 | // Copyright © 2017年 Maxcw. All rights reserved. 7 | // 8 | 9 | #import "UIImage+Convert.h" 10 | 11 | @implementation UIImage (Convert) 12 | 13 | 14 | + (UIImage*)imageWithImageBuffer:(CVImageBufferRef)imageBuffer 15 | { 16 | CVPixelBufferLockBaseAddress(imageBuffer,0); 17 | uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); 18 | size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 19 | size_t width = CVPixelBufferGetWidth(imageBuffer); 20 | size_t height = CVPixelBufferGetHeight(imageBuffer); 21 | 22 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 23 | CGContextRef newContext = CGBitmapContextCreate(baseAddress, 24 | width, height, 8, bytesPerRow, colorSpace, 25 | kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 26 | CGImageRef newImage = CGBitmapContextCreateImage(newContext); 27 | 28 | CGContextRelease(newContext); 29 | CGColorSpaceRelease(colorSpace); 30 | 31 | 32 | UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight]; 33 | 34 | CGImageRelease(newImage); 35 | 36 | return image; 37 | } 38 | 39 | @end 40 | -------------------------------------------------------------------------------- /ObjectTrackDemo/UIImage+Detect.h: -------------------------------------------------------------------------------- 1 | // 2 | // UIImage+Detect.h 3 | // ObjectTrackDemo 4 | // 5 | // Created by baiya on 2017/6/22. 6 | // Copyright © 2017年 Maxcw. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | @interface UIImage (Detect) 13 | 14 | - (void)detectTextWithImage:(nullable VNRequestCompletionHandler)completionHandler; 15 | 16 | @end 17 | -------------------------------------------------------------------------------- /ObjectTrackDemo/UIImage+Detect.m: -------------------------------------------------------------------------------- 1 | // 2 | // UIImage+Detect.m 3 | // ObjectTrackDemo 4 | // 5 | // Created by baiya on 2017/6/22. 6 | // Copyright © 2017年 Maxcw. All rights reserved. 7 | // 8 | 9 | #import "UIImage+Detect.h" 10 | 11 | @implementation UIImage (Detect) 12 | 13 | // 使用image进行文字检测 14 | - (void)detectTextWithImage:(nullable VNRequestCompletionHandler)completionHandler 15 | { 16 | UIImage *image = self; 17 | if(nil == image) 18 | return; 19 | 20 | VNImageRequestHandler *handler = [[VNImageRequestHandler alloc] initWithCGImage:image.CGImage options:@{}]; 21 | VNDetectTextRectanglesRequest *request = [[VNDetectTextRectanglesRequest alloc] initWithCompletionHandler:completionHandler]; 22 | 23 | request.reportCharacterBoxes = YES; 24 | [handler performRequests:@[request] error:nil]; 25 | } 26 | 27 | @end 28 | -------------------------------------------------------------------------------- /ObjectTrackDemo/UIImage+Orientation.h: -------------------------------------------------------------------------------- 1 | // 2 | // UIImage+Orientation.h 3 | // ObjectTrackDemo 4 | // 5 | // Created by baiya on 2017/6/22. 6 | // Copyright © 2017年 Maxcw. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface UIImage (Orientation) 12 | 13 | - (UIImage *)fixOrientation; 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /ObjectTrackDemo/UIImage+Orientation.m: -------------------------------------------------------------------------------- 1 | // 2 | // UIImage+Orientation.m 3 | // ObjectTrackDemo 4 | // 5 | // Created by baiya on 2017/6/22. 6 | // Copyright © 2017年 Maxcw. All rights reserved. 7 | // 8 | 9 | #import "UIImage+Orientation.h" 10 | 11 | @implementation UIImage (Orientation) 12 | 13 | 14 | - (UIImage *)fixOrientation { 15 | UIImage *aImage = self; 16 | if (aImage==nil || !aImage) { 17 | return nil; 18 | } 19 | // No-op if the orientation is already correct 20 | if (aImage.imageOrientation == UIImageOrientationUp) return aImage; 21 | // We need to calculate the proper transformation to make the image upright. 22 | // We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored. 23 | CGAffineTransform transform = CGAffineTransformIdentity; 24 | UIImageOrientation orientation=aImage.imageOrientation; 25 | int orientation_=orientation; 26 | switch (orientation_) { 27 | case UIImageOrientationDown: 28 | case UIImageOrientationDownMirrored: 29 | transform = CGAffineTransformTranslate(transform, aImage.size.width, aImage.size.height); 30 | transform = CGAffineTransformRotate(transform, M_PI); 31 | break; 32 | case UIImageOrientationLeft: 33 | case UIImageOrientationLeftMirrored: 34 | transform = CGAffineTransformTranslate(transform, aImage.size.width, 0); 35 | transform = CGAffineTransformRotate(transform, M_PI_2); 36 | break; 37 | case UIImageOrientationRight: 38 | case UIImageOrientationRightMirrored: 39 | transform = CGAffineTransformTranslate(transform, 0, aImage.size.height); 40 | transform = CGAffineTransformRotate(transform, -M_PI_2); 41 | break; 42 | } 43 | switch (orientation_) { 44 | case UIImageOrientationUpMirrored: 45 | case UIImageOrientationDownMirrored: 46 | transform = CGAffineTransformTranslate(transform, aImage.size.width, 0); 47 | transform = CGAffineTransformScale(transform, -1, 1); 48 | break; 49 | case UIImageOrientationLeftMirrored: 50 | case UIImageOrientationRightMirrored: 51 | transform = CGAffineTransformTranslate(transform, aImage.size.height, 0); 52 | 53 | transform = CGAffineTransformScale(transform, -1, 1); 54 | break; 55 | 56 | } 57 | 58 | // Now we draw the underlying CGImage into a new context, applying the transform 59 | // calculated above. 60 | CGContextRef ctx = CGBitmapContextCreate(NULL, aImage.size.width, aImage.size.height, 61 | 62 | CGImageGetBitsPerComponent(aImage.CGImage), 0, 63 | 64 | CGImageGetColorSpace(aImage.CGImage), 65 | 66 | CGImageGetBitmapInfo(aImage.CGImage)); 67 | 68 | CGContextConcatCTM(ctx, transform); 69 | switch (aImage.imageOrientation) { 70 | case UIImageOrientationLeft: 71 | case UIImageOrientationLeftMirrored: 72 | case UIImageOrientationRight: 73 | case UIImageOrientationRightMirrored: 74 | // Grr... 75 | CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.height,aImage.size.width), aImage.CGImage); 76 | break; 77 | default: 78 | CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.width,aImage.size.height), aImage.CGImage); 79 | break; 80 | 81 | } 82 | 83 | // And now we just create a new UIImage from the drawing context 84 | CGImageRef cgimg = CGBitmapContextCreateImage(ctx); 85 | UIImage *img = [UIImage imageWithCGImage:cgimg]; 86 | CGContextRelease(ctx); 87 | CGImageRelease(cgimg); 88 | 89 | aImage=img; 90 | img=nil; 91 | return aImage; 92 | } 93 | 94 | @end 95 | -------------------------------------------------------------------------------- /ObjectTrackDemo/ViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.h 3 | // ObjectTrackDemo 4 | // 5 | // Created by baiya on 2017/6/8. 6 | // Copyright © 2017年 Maxcw. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface ViewController : UIViewController 12 | 13 | @end 14 | 15 | -------------------------------------------------------------------------------- /ObjectTrackDemo/ViewController.mm: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.m 3 | // ObjectTrackDemo 4 | // 5 | // Created by baiya on 2017/6/8. 6 | // Copyright © 2017年 Maxcw. All rights reserved. 7 | // 8 | 9 | #import "ViewController.h" 10 | #import 11 | #import 12 | #import 13 | #import 14 | #import 15 | #import 16 | #import 17 | #import 18 | #import 19 | #import 20 | #import "AVCamPreviewView.h" 21 | #import 22 | #import "UIImage+Convert.h" 23 | #import "UIImage+Orientation.h" 24 | #import "CVPixelBufferUtils.h" 25 | 26 | 27 | 28 | typedef NS_ENUM(uint8_t, MOVRotateDirection) 29 | { 30 | MOVRotateDirectionNone = 0, 31 | MOVRotateDirectionCounterclockwise90, 32 | MOVRotateDirectionCounterclockwise180, 33 | MOVRotateDirectionCounterclockwise270, 34 | MOVRotateDirectionUnknown 35 | }; 36 | 37 | @interface ViewController () 38 | 39 | //@property (nullable, nonatomic, strong) UIImageView *imageView1; 40 | @property (nullable, nonatomic, strong) UIImageView *highlightView; 41 | @property (nullable, nonatomic, strong) UIImageView *bgImgView; 42 | 43 | @property (nonatomic, retain) AVCaptureSession *captureSession; 44 | @property (nonatomic, retain) AVCamPreviewView *preView; 45 | 46 | @property (nonatomic, strong) UILabel *infoLabel; 47 | 48 | @property (nonatomic, assign) NSUInteger counter; // 计数器 49 | 50 | @property (nonatomic, retain) VNSequenceRequestHandler *sequenceHandler; 51 | //@property (atomic, retain) VNDetectedObjectObservation *lastObsercation; 52 | 53 | @property (nonatomic, retain) NSMutableDictionary *lastObsercationsDic; 54 | 55 | @property (nonatomic, strong) dispatch_queue_t queue; 56 | 57 | @end 58 | 59 | @implementation ViewController 60 | 61 | - (void)viewDidLoad { 62 | [super viewDidLoad]; 63 | // Do any additional setup after loading the view, typically from a nib. 64 | 65 | 66 | 67 | AVCamPreviewView *preView = [[AVCamPreviewView alloc] initWithFrame:self.view.bounds]; 68 | [self.view addSubview:preView]; 69 | self.preView = preView; 70 | 71 | self.bgImgView = [[UIImageView alloc] initWithFrame:self.view.bounds]; 72 | self.bgImgView.contentMode = UIViewContentModeScaleAspectFit; 73 | self.bgImgView.backgroundColor = [UIColor clearColor]; 74 | [self.view addSubview:self.bgImgView]; 75 | 76 | self.highlightView = [[UIImageView alloc] initWithFrame:self.view.bounds]; 77 | self.highlightView.contentMode = UIViewContentModeScaleAspectFit; 78 | self.highlightView.backgroundColor = [UIColor clearColor]; 79 | [self.view addSubview:self.highlightView]; 80 | 81 | 82 | self.infoLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, self.view.bounds.size.height - 100, self.view.bounds.size.width, 100)]; 83 | [self.view addSubview:self.infoLabel]; 84 | self.infoLabel.backgroundColor = [UIColor colorWithRed:0x00 green:0x00 blue:0x00 alpha:0.4]; 85 | self.infoLabel.textColor = [UIColor whiteColor]; 86 | self.infoLabel.numberOfLines = 0; 87 | 88 | self.counter = 0; 89 | 90 | [self initCapture]; 91 | 92 | self.navigationItem.rightBarButtonItem = [[UIBarButtonItem alloc] initWithTitle:@"reset" style:UIBarButtonItemStylePlain target:self action:@selector(reset)]; 93 | } 94 | 95 | - (void)reset 96 | { 97 | 98 | [self.lastObsercationsDic removeAllObjects]; 99 | self.lastObsercationsDic = nil; 100 | 101 | self.sequenceHandler = nil; 102 | 103 | } 104 | 105 | - (void)initCapture { 106 | AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] error:nil]; 107 | AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init]; 108 | captureOutput.alwaysDiscardsLateVideoFrames = YES; 109 | //captureOutput.minFrameDuration = CMTimeMake(1, 10); 110 | 111 | dispatch_queue_t queue = dispatch_queue_create("cameraQueue", NULL); 112 | self.queue = queue; 113 | [captureOutput setSampleBufferDelegate:self queue:queue]; 114 | NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; 115 | NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; 116 | NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; 117 | [captureOutput setVideoSettings:videoSettings]; 118 | self.captureSession = [[AVCaptureSession alloc] init]; 119 | [self.captureSession addInput:captureInput]; 120 | [self.captureSession addOutput:captureOutput]; 121 | [self.captureSession startRunning]; 122 | 123 | self.preView.session = self.captureSession; 124 | } 125 | 126 | #pragma mark AVCaptureSession delegate 127 | - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 128 | { 129 | if (self.counter % 10 != 0) { 130 | self.counter ++; 131 | return; 132 | } 133 | self.counter = 0; 134 | 135 | 136 | CVPixelBufferRef rotateBuffer = [CVPixelBufferUtils rotateBuffer:sampleBuffer withConstant:MOVRotateDirectionCounterclockwise270]; 137 | 138 | // [self detectTextWithPixelBuffer:rotateBuffer]; 139 | [self objectTrackWithPixelBuffer:rotateBuffer]; 140 | 141 | CVBufferRelease(rotateBuffer); 142 | 143 | } 144 | 145 | // 使用pixelBuffer进行文字检测 146 | //- (void)detectTextWithPixelBuffer:(CVPixelBufferRef)pixelBuffer 147 | //{ 148 | // void (^ VNRequestCompletionHandler)(VNRequest *request, NSError * _Nullable error) = ^(VNRequest *request, NSError * _Nullable error) 149 | // { 150 | // if (nil == error) { 151 | // 152 | // size_t width = CVPixelBufferGetWidth(pixelBuffer); 153 | // size_t height = CVPixelBufferGetHeight(pixelBuffer); 154 | // CGSize size = CGSizeMake(width, height); 155 | // void (^UIGraphicsImageDrawingActions)(UIGraphicsImageRendererContext *rendererContext) = ^(UIGraphicsImageRendererContext *rendererContext) 156 | // { 157 | // //vision框架使用的坐标是为 0 -》 1, 原点为屏幕的左下角(跟UIKit不同),向右向上增加,妈蛋其实就是Opengl的纹理坐标系。 158 | // CGAffineTransform transform= CGAffineTransformIdentity; 159 | // transform = CGAffineTransformScale(transform, size.width, -size.height); 160 | // transform = CGAffineTransformTranslate(transform, 0, -1); 161 | // 162 | // for (VNTextObservation *textObservation in request.results) 163 | // { 164 | // [[UIColor redColor] setStroke]; 165 | // [[UIBezierPath bezierPathWithRect:CGRectApplyAffineTransform(textObservation.boundingBox, transform)] stroke]; 166 | // for (VNRectangleObservation *rectangleObservation in textObservation.characterBoxes) 167 | // { 168 | // [[UIColor blueColor] setStroke]; 169 | // [[UIBezierPath bezierPathWithRect:CGRectApplyAffineTransform(rectangleObservation.boundingBox, transform)] stroke]; 170 | // } 171 | // } 172 | // }; 173 | // 174 | // UIGraphicsImageRenderer *renderer = [[UIGraphicsImageRenderer alloc] initWithSize:size]; 175 | // UIImage *overlayImage = [renderer imageWithActions:UIGraphicsImageDrawingActions]; 176 | // 177 | // dispatch_async(dispatch_get_main_queue(), ^{ 178 | // self.highlightView.image = overlayImage; 179 | // }); 180 | // } 181 | // }; 182 | // 183 | // VNImageRequestHandler *handler = [[VNImageRequestHandler alloc] initWithCVPixelBuffer:pixelBuffer options:@{}]; 184 | // VNDetectTextRectanglesRequest *request = [[VNDetectTextRectanglesRequest alloc] initWithCompletionHandler:VNRequestCompletionHandler]; 185 | // 186 | // request.reportCharacterBoxes = YES; 187 | // [handler performRequests:@[request] error:nil]; 188 | //} 189 | 190 | 191 | 192 | // 物体检测 193 | - (void)detectObjectWithPixelBuffer:(CVPixelBufferRef)pixelBuffer 194 | { 195 | if (!self.lastObsercationsDic) { 196 | self.lastObsercationsDic = [NSMutableDictionary dictionary]; 197 | } 198 | CFAbsoluteTime start = CFAbsoluteTimeGetCurrent(); 199 | 200 | void (^ VNRequestCompletionHandler)(VNRequest *request, NSError * _Nullable error) = ^(VNRequest *request, NSError * _Nullable error) 201 | { 202 | CFAbsoluteTime end = CFAbsoluteTimeGetCurrent(); 203 | 204 | NSLog(@"检测耗时: %f", end - start); 205 | if (!error && request.results.count > 0) { 206 | for (VNDetectedObjectObservation *observation in request.results) { 207 | [self.lastObsercationsDic setObject:observation forKey:observation.uuid.UUIDString]; 208 | } 209 | 210 | [self objectTrackWithPixelBuffer:pixelBuffer]; 211 | 212 | return ; 213 | } 214 | }; 215 | 216 | VNImageRequestHandler *handler = [[VNImageRequestHandler alloc] initWithCVPixelBuffer:pixelBuffer options:@{}]; 217 | VNDetectRectanglesRequest *request = [[VNDetectRectanglesRequest alloc] initWithCompletionHandler:VNRequestCompletionHandler]; 218 | request.minimumAspectRatio = 0.1; 219 | request.maximumObservations = 0; 220 | [handler performRequests:@[request] error:nil]; 221 | } 222 | 223 | // 物体跟踪 224 | - (void)objectTrackWithPixelBuffer:(CVPixelBufferRef)pixelBuffer 225 | { 226 | if (self.lastObsercationsDic.count == 0 ) { 227 | [self detectObjectWithPixelBuffer:pixelBuffer]; 228 | return; 229 | } 230 | 231 | if (!self.sequenceHandler) { 232 | self.sequenceHandler = [[VNSequenceRequestHandler alloc] init]; 233 | } 234 | 235 | NSArray *obsercationKeys = self.lastObsercationsDic.allKeys; 236 | 237 | NSMutableArray *obsercationRequest = [NSMutableArray array]; 238 | 239 | size_t width = CVPixelBufferGetWidth(pixelBuffer); 240 | size_t height = CVPixelBufferGetHeight(pixelBuffer); 241 | CGSize size = CGSizeMake(width, height); 242 | 243 | CFAbsoluteTime start = CFAbsoluteTimeGetCurrent(); 244 | for (NSString *key in obsercationKeys) { 245 | 246 | VNDetectedObjectObservation *obsercation = self.lastObsercationsDic[key]; 247 | 248 | VNTrackObjectRequest *trackObjectRequest = [[VNTrackObjectRequest alloc] initWithDetectedObjectObservation:obsercation completionHandler:^(VNRequest * _Nonnull request, NSError * _Nullable error) { 249 | 250 | CFAbsoluteTime end = CFAbsoluteTimeGetCurrent(); 251 | NSLog(@"跟踪耗时: %f", end - start); 252 | 253 | if (nil == error && request.results.count > 0) { 254 | 255 | NSArray *results = request.results; 256 | VNDetectedObjectObservation *rectangleObservation = results.firstObject; 257 | if (rectangleObservation.confidence < 0.3) { 258 | [self.lastObsercationsDic removeObjectForKey:rectangleObservation.uuid.UUIDString]; 259 | return; 260 | } 261 | 262 | [self.lastObsercationsDic setObject:rectangleObservation forKey:rectangleObservation.uuid.UUIDString]; 263 | 264 | [self overlayImageWithSize:size]; 265 | 266 | 267 | } else { 268 | dispatch_async(dispatch_get_main_queue(), ^{ 269 | // 识别失败,移除物体跟踪队列 270 | 271 | [self.lastObsercationsDic removeObjectForKey:key]; 272 | [self overlayImageWithSize:size]; 273 | 274 | }); 275 | 276 | } 277 | }]; 278 | trackObjectRequest.trackingLevel = VNRequestTrackingLevelAccurate; 279 | 280 | [obsercationRequest addObject:trackObjectRequest]; 281 | } 282 | 283 | 284 | NSError *error = nil; 285 | [self.sequenceHandler performRequests:obsercationRequest onCVPixelBuffer:pixelBuffer error:&error]; 286 | 287 | } 288 | 289 | 290 | - (void)overlayImageWithSize:(CGSize)size 291 | { 292 | 293 | NSDictionary *lastObsercationDicCopy = [NSDictionary dictionaryWithDictionary:self.lastObsercationsDic]; 294 | NSArray *keyArr = [lastObsercationDicCopy allKeys]; 295 | 296 | 297 | UIGraphicsImageRenderer *renderer = [[UIGraphicsImageRenderer alloc] initWithSize:CGSizeMake(size.width, size.height)]; 298 | 299 | void (^UIGraphicsImageDrawingActions)(UIGraphicsImageRendererContext *rendererContext) = ^(UIGraphicsImageRendererContext *rendererContext) 300 | { 301 | CGAffineTransform transform = CGAffineTransformIdentity; 302 | transform = CGAffineTransformScale(transform, size.width, -size.height); 303 | transform = CGAffineTransformTranslate(transform, 0, -1); 304 | 305 | for (NSString *uuid in keyArr) { 306 | VNDetectedObjectObservation *rectangleObservation = lastObsercationDicCopy[uuid]; 307 | 308 | [[UIColor redColor] setStroke]; 309 | UIBezierPath *path = [UIBezierPath bezierPathWithRect:CGRectApplyAffineTransform(rectangleObservation.boundingBox, transform)]; 310 | path.lineWidth = 4.0f; 311 | [path stroke]; 312 | 313 | } 314 | }; 315 | 316 | UIImage *overlayImage = [renderer imageWithActions:UIGraphicsImageDrawingActions]; 317 | 318 | NSMutableString *trackInfoStr = [NSMutableString string]; 319 | 320 | for (NSString *uuid in keyArr) { 321 | VNDetectedObjectObservation *rectangleObservation = lastObsercationDicCopy[uuid]; 322 | 323 | [trackInfoStr appendFormat:@"置信度 : %.2f \n", rectangleObservation.confidence]; 324 | } 325 | 326 | dispatch_async(dispatch_get_main_queue(), ^{ 327 | 328 | self.highlightView.image = overlayImage; 329 | 330 | self.infoLabel.text = trackInfoStr; 331 | }); 332 | } 333 | 334 | 335 | @end 336 | 337 | -------------------------------------------------------------------------------- /ObjectTrackDemo/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // TextAndHorizonDetectionDemo 4 | // 5 | // Created by maxcwfeng on 2017/6/8. 6 | // Copyright © 2017年 Maxcw. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "AppDelegate.h" 11 | 12 | int main(int argc, char * argv[]) { 13 | @autoreleasepool { 14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ios11-ObjectTrackDemo 2 | 3 | 4 | ios 11 新出了[Vision](https://developer.apple.com/documentation/vision) 框架,提供了人脸识别、物体检测、物体跟踪等技术。本文将通过一个Demo简单介绍如何使用Vision框架进行物体检测和物体跟踪。本文Demo可以在[Github](https://github.com/WhiteTeeth/ios11-ObjectTrackDemo)上下载。 5 | 6 | 7 | 8 | 9 | # 1. 关于Vision框架 10 | 11 | Vision 是伴随ios 11 推出的基于CoreML的图形处理框架。运用高性能图形处理和视觉技术,可以对图像和视频进行人脸检测、特征点检测和场景识别等。 12 | 13 | ![image](http://7punko.com1.z0.glb.clouddn.com/blog/ios11%E4%BD%BF%E7%94%A8vision%E5%BC%80%E5%A7%8B%E7%89%A9%E4%BD%93%E8%B7%9F%E8%B8%AA/vision%E6%A1%86%E6%9E%B6.jpg) 14 | 15 | 16 | # 2. 使用vision 进行物体识别 17 | 18 | ## 环境 19 | 20 | Xcode 9 + ios 11 21 | 22 | 23 | ## 获取图像数据 24 | 25 | 该步骤假设你已经调起系统相机,并获得 `CMSampleBufferRef` 数据。注意返回的simpleBuffer 方向和UIView 显示方向不一致,所以先对simpleBuffer 旋转到正确的方向。 26 | 27 | 当然也可以不进行旋转,但是要保证后续坐标转换的一致性。 28 | 29 | 30 | ``` 31 | /* 32 | * 注意旋转SampleBuffer 为argb或者bgra格式,其他格式可能不支持 33 | * rotationConstant: 34 | * 0 -- rotate 0 degrees (simply copy the data from src to dest) 35 | * 1 -- rotate 90 degrees counterclockwise 36 | * 2 -- rotate 180 degress 37 | * 3 -- rotate 270 degrees counterclockwise 38 | */ 39 | + (CVPixelBufferRef)rotateBuffer:(CMSampleBufferRef)sampleBuffer withConstant:(uint8_t)rotationConstant 40 | { 41 | CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 42 | CVPixelBufferLockBaseAddress(imageBuffer, 0); 43 | 44 | OSType pixelFormatType = CVPixelBufferGetPixelFormatType(imageBuffer); 45 | 46 | // NSAssert(pixelFormatType == kCVPixelFormatType_32ARGB, @"Code works only with 32ARGB format. Test/adapt for other formats!"); 47 | 48 | const size_t kAlignment_32ARGB = 32; 49 | const size_t kBytesPerPixel_32ARGB = 4; 50 | 51 | size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 52 | size_t width = CVPixelBufferGetWidth(imageBuffer); 53 | size_t height = CVPixelBufferGetHeight(imageBuffer); 54 | 55 | BOOL rotatePerpendicular = (rotationConstant == 1) || (rotationConstant == 3); // Use enumeration values here 56 | const size_t outWidth = rotatePerpendicular ? height : width; 57 | const size_t outHeight = rotatePerpendicular ? width : height; 58 | 59 | size_t bytesPerRowOut = kBytesPerPixel_32ARGB * ceil(outWidth * 1.0 / kAlignment_32ARGB) * kAlignment_32ARGB; 60 | 61 | const size_t dstSize = bytesPerRowOut * outHeight * sizeof(unsigned char); 62 | 63 | void *srcBuff = CVPixelBufferGetBaseAddress(imageBuffer); 64 | 65 | unsigned char *dstBuff = (unsigned char *)malloc(dstSize); 66 | 67 | vImage_Buffer inbuff = {srcBuff, height, width, bytesPerRow}; 68 | vImage_Buffer outbuff = {dstBuff, outHeight, outWidth, bytesPerRowOut}; 69 | 70 | uint8_t bgColor[4] = {0, 0, 0, 0}; 71 | 72 | vImage_Error err = vImageRotate90_ARGB8888(&inbuff, &outbuff, rotationConstant, bgColor, 0); 73 | if (err != kvImageNoError) 74 | { 75 | NSLog(@"%ld", err); 76 | } 77 | 78 | CVPixelBufferUnlockBaseAddress(imageBuffer, 0); 79 | 80 | CVPixelBufferRef rotatedBuffer = NULL; 81 | CVPixelBufferCreateWithBytes(NULL, 82 | outWidth, 83 | outHeight, 84 | pixelFormatType, 85 | outbuff.data, 86 | bytesPerRowOut, 87 | freePixelBufferDataAfterRelease, 88 | NULL, 89 | NULL, 90 | &rotatedBuffer); 91 | 92 | return rotatedBuffer; 93 | } 94 | 95 | void freePixelBufferDataAfterRelease(void *releaseRefCon, const void *baseAddress) 96 | { 97 | // Free the memory we malloced for the vImage rotation 98 | free((void *)baseAddress); 99 | } 100 | 101 | 102 | ``` 103 | 104 | 105 | ## 物体检测 106 | 107 | 拿到图像数据后就可以进行物体检测,物体检测流程很简单: 108 | 109 | 1. 创建一个物体检测请求 VNDetectRectanglesRequest 110 | 2. 根据数据源(pixelBuffer 或者 UIImage)创建一个 VNImageRequestHandler 111 | 3. 调用[VNImageRequestHandler performRequests] 执行检测 112 | 113 | 114 | ``` 115 | 116 | - (void)detectObjectWithPixelBuffer:(CVPixelBufferRef)pixelBuffer 117 | { 118 | CFAbsoluteTime start = CFAbsoluteTimeGetCurrent(); 119 | 120 | void (^ VNRequestCompletionHandler)(VNRequest *request, NSError * _Nullable error) = ^(VNRequest *request, NSError * _Nullable error) 121 | { 122 | CFAbsoluteTime end = CFAbsoluteTimeGetCurrent(); 123 | 124 | NSLog(@"检测耗时: %f", end - start); 125 | if (!error && request.results.count > 0) { 126 | // TODO 这里处理检测结果 127 | return ; 128 | } 129 | }; 130 | 131 | VNImageRequestHandler *handler = [[VNImageRequestHandler alloc] initWithCVPixelBuffer:pixelBuffer options:@{}]; 132 | VNDetectRectanglesRequest *request = [[VNDetectRectanglesRequest alloc] initWithCompletionHandler:VNRequestCompletionHandler]; 133 | request.minimumAspectRatio = 0.1; // 最小长宽比设为0.1 134 | request.maximumObservations = 0; // 不限制检测结果 135 | [handler performRequests:@[request] error:nil]; 136 | } 137 | 138 | ``` 139 | 140 | 141 | ## 显示检测结果 142 | 143 | 144 | 物体检测返回结果是一个 `VNDetectedObjectObservation` 的结果集,包含`confidence`, `uuid` 和 `boundingBox`三种属性。 因为vision坐标系类似opengl的纹理坐标系,以屏幕左下角为坐标原点,并做了归一化。所以将显示结果投影到屏幕时,还需要进行坐标系的转换。 145 | 146 | 三种坐标系的区别: 147 | 148 | 坐标系 | 原点 | 长宽 149 | ---- | ---- | ------ 150 | UIKit坐标系 | 左上角 | 屏幕大小 151 | AVFoundation坐标系 | 左上角 | 0 - 1 152 | Vision坐标系 | 左下角 | 0 - 1 153 | 154 | 155 | 显示代码如下,使用`CGAffineTransform `进行坐标转换,并根据转换后矩形绘制红色边框。同时打印`confidence`信息到屏幕上。 156 | 157 | 158 | ``` 159 | 160 | - (void)overlayImageWithSize:(CGSize)size 161 | { 162 | 163 | NSDictionary *lastObsercationDicCopy = [NSDictionary dictionaryWithDictionary:self.lastObsercationsDic]; 164 | NSArray *keyArr = [lastObsercationDicCopy allKeys]; 165 | 166 | UIGraphicsImageRenderer *renderer = [[UIGraphicsImageRenderer alloc] initWithSize:CGSizeMake(size.width, size.height)]; 167 | 168 | void (^UIGraphicsImageDrawingActions)(UIGraphicsImageRendererContext *rendererContext) = ^(UIGraphicsImageRendererContext *rendererContext) 169 | { 170 | // 将vision坐标转换为屏幕坐标 171 | CGAffineTransform transform = CGAffineTransformIdentity; 172 | transform = CGAffineTransformScale(transform, size.width, -size.height); 173 | transform = CGAffineTransformTranslate(transform, 0, -1); 174 | 175 | for (NSString *uuid in keyArr) { 176 | VNDetectedObjectObservation *rectangleObservation = lastObsercationDicCopy[uuid]; 177 | 178 | // 绘制红框 179 | [[UIColor redColor] setStroke]; 180 | UIBezierPath *path = [UIBezierPath bezierPathWithRect:CGRectApplyAffineTransform(rectangleObservation.boundingBox, transform)]; 181 | path.lineWidth = 4.0f; 182 | [path stroke]; 183 | 184 | } 185 | }; 186 | 187 | UIImage *overlayImage = [renderer imageWithActions:UIGraphicsImageDrawingActions]; 188 | 189 | NSMutableString *trackInfoStr = [NSMutableString string]; 190 | 191 | for (NSString *uuid in keyArr) { 192 | VNDetectedObjectObservation *rectangleObservation = lastObsercationDicCopy[uuid]; 193 | 194 | [trackInfoStr appendFormat:@"置信度 : %.2f \n", rectangleObservation.confidence]; 195 | } 196 | 197 | dispatch_async(dispatch_get_main_queue(), ^{ 198 | 199 | self.highlightView.image = overlayImage; 200 | 201 | self.infoLabel.text = trackInfoStr; 202 | }); 203 | } 204 | 205 | 206 | ``` 207 | 208 | 209 | # 3. 物体跟踪 210 | 211 | 物体跟踪需要处理连续的视频帧,所以需要创建`VNSequenceRequestHandler`处理多帧图像。同时还需要一个`VNDetectedObjectObservation`对象 做为参考源。你可以使用物体检测的结果,或者指定一个矩形作为物体跟踪的参考源。注意因为坐标系不同,如果直接指定矩形作为参考源时,需要事先进行正确的坐标转换。 212 | 213 | 跟踪多物体时,可以使用`VNDetectedObjectObservation.uuid`区分跟踪对象,并做相应处理。 214 | 215 | 216 | ``` 217 | 218 | - (void)objectTrackWithPixelBuffer:(CVPixelBufferRef)pixelBuffer 219 | { 220 | 221 | if (!self.sequenceHandler) { 222 | self.sequenceHandler = [[VNSequenceRequestHandler alloc] init]; 223 | } 224 | 225 | NSArray *obsercationKeys = self.lastObsercationsDic.allKeys; 226 | 227 | NSMutableArray *obsercationRequest = [NSMutableArray array]; 228 | 229 | CFAbsoluteTime start = CFAbsoluteTimeGetCurrent(); 230 | for (NSString *key in obsercationKeys) { 231 | 232 | VNDetectedObjectObservation *obsercation = self.lastObsercationsDic[key]; 233 | 234 | VNTrackObjectRequest *trackObjectRequest = [[VNTrackObjectRequest alloc] initWithDetectedObjectObservation:obsercation completionHandler:^(VNRequest * _Nonnull request, NSError * _Nullable error) { 235 | 236 | CFAbsoluteTime end = CFAbsoluteTimeGetCurrent(); 237 | NSLog(@"跟踪耗时: %f", end - start); 238 | 239 | if (nil == error && request.results.count > 0) { 240 | 241 | // TODO 处理跟踪结果 242 | 243 | 244 | } else { 245 | // 跟踪失败处理 246 | 247 | } 248 | }]; 249 | trackObjectRequest.trackingLevel = VNRequestTrackingLevelAccurate; 250 | 251 | [obsercationRequest addObject:trackObjectRequest]; 252 | } 253 | 254 | 255 | NSError *error = nil; 256 | [self.sequenceHandler performRequests:obsercationRequest onCVPixelBuffer:pixelBuffer error:&error]; 257 | 258 | } 259 | 260 | ``` 261 | 262 | 263 | 264 | ## 效果图 265 | 266 | ![image](http://7punko.com1.z0.glb.clouddn.com/blog/ios11%E4%BD%BF%E7%94%A8vision%E5%BC%80%E5%A7%8B%E7%89%A9%E4%BD%93%E8%B7%9F%E8%B8%AA/%E6%95%88%E6%9E%9C%E5%9B%BE.jpg) 267 | 268 | 269 | 270 | # 4. 性能 271 | 272 | ## 测试机型 273 | 274 | iphone6p ios 11.0(15A5318g) 275 | 276 | 1/10 取帧率 277 | 278 | 279 | ## 物体检测 280 | 281 | ### 内存 282 | 283 | 稳定在40M左右 284 | 285 | ![image](http://7punko.com1.z0.glb.clouddn.com/blog/ios11%E4%BD%BF%E7%94%A8vision%E5%BC%80%E5%A7%8B%E7%89%A9%E4%BD%93%E8%B7%9F%E8%B8%AA/%E7%89%A9%E4%BD%93%E6%A3%80%E6%B5%8B%E5%86%85%E5%AD%98_iphone6p.png) 286 | 287 | ### CPU 288 | 289 | 达到了125%的使用量 290 | 291 | 292 | ![image](http://7punko.com1.z0.glb.clouddn.com/blog/ios11%E4%BD%BF%E7%94%A8vision%E5%BC%80%E5%A7%8B%E7%89%A9%E4%BD%93%E8%B7%9F%E8%B8%AA/%E7%89%A9%E4%BD%93%E6%A3%80%E6%B5%8BCPU_iphone6p.png) 293 | 294 | ### 电量 295 | 296 | ![image](http://7punko.com1.z0.glb.clouddn.com/blog/ios11%E4%BD%BF%E7%94%A8vision%E5%BC%80%E5%A7%8B%E7%89%A9%E4%BD%93%E8%B7%9F%E8%B8%AA/%E7%89%A9%E4%BD%93%E6%A3%80%E6%B5%8B%E7%94%B5%E9%87%8F_iphone6p.png) 297 | 298 | 299 | ### 耗时 300 | 301 | 平均在50ms左右 302 | 303 | ![image](http://7punko.com1.z0.glb.clouddn.com/blog/ios11%E4%BD%BF%E7%94%A8vision%E5%BC%80%E5%A7%8B%E7%89%A9%E4%BD%93%E8%B7%9F%E8%B8%AA/%E7%89%A9%E4%BD%93%E6%A3%80%E6%B5%8B%E8%80%97%E6%97%B6_iphone6p.png) 304 | 305 | 306 | ## 物体跟踪 307 | 308 | ### 内存 309 | 310 | 和物体检测一样在40M左右 311 | 312 | ![image](http://7punko.com1.z0.glb.clouddn.com/blog/ios11%E4%BD%BF%E7%94%A8vision%E5%BC%80%E5%A7%8B%E7%89%A9%E4%BD%93%E8%B7%9F%E8%B8%AA/%E7%89%A9%E4%BD%93%E8%B7%9F%E8%B8%AA%E5%86%85%E5%AD%98_iphone6p.png) 313 | 314 | ### CPU 315 | 316 | 相对低些,但也有100%的使用率 317 | 318 | ![image](http://7punko.com1.z0.glb.clouddn.com/blog/ios11%E4%BD%BF%E7%94%A8vision%E5%BC%80%E5%A7%8B%E7%89%A9%E4%BD%93%E8%B7%9F%E8%B8%AA/%E7%89%A9%E4%BD%93%E8%B7%9F%E8%B8%AACPU_iphone6p.png) 319 | 320 | ### 电量 321 | 322 | ![image](http://7punko.com1.z0.glb.clouddn.com/blog/ios11%E4%BD%BF%E7%94%A8vision%E5%BC%80%E5%A7%8B%E7%89%A9%E4%BD%93%E8%B7%9F%E8%B8%AA/%E7%89%A9%E4%BD%93%E8%B7%9F%E8%B8%AA%E7%94%B5%E9%87%8F_iphone6p.png) 323 | 324 | ### 耗时 325 | 326 | 相对低些,20-40ms不等 327 | 328 | ![image](http://7punko.com1.z0.glb.clouddn.com/blog/ios11%E4%BD%BF%E7%94%A8vision%E5%BC%80%E5%A7%8B%E7%89%A9%E4%BD%93%E8%B7%9F%E8%B8%AA/%E7%89%A9%E4%BD%93%E8%B7%9F%E8%B8%AA%E8%80%97%E6%97%B6iphone6p.png) 329 | 330 | 331 | # 5. 总结 332 | 333 | Vision是一个比较好用的框架,性能也不错。除了物体跟踪,Vision还提供**图像分类**、**人脸识别**、**人脸特征提取**、**人脸追踪**、**文字识别**等功能,使用方法和物体检测类似,本文就不再进行过多描述。 334 | 335 | 336 | 337 | ## 参考文档 338 | 339 | [Getting Started with Vision](https://github.com/jeffreybergier/Blog-Getting-Started-with-Vision) 340 | 341 | 342 | --------------------------------------------------------------------------------