├── .gitignore ├── README.md ├── openCV-Practice.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ ├── contents.xcworkspacedata │ └── xcuserdata │ │ └── Realank.xcuserdatad │ │ └── UserInterfaceState.xcuserstate └── xcuserdata │ └── Realank.xcuserdatad │ ├── xcdebugger │ └── Breakpoints_v2.xcbkptlist │ └── xcschemes │ ├── openCV-Practice.xcscheme │ └── xcschememanagement.plist ├── openCV-Practice ├── AppDelegate.h ├── AppDelegate.m ├── Assets.xcassets │ ├── AppIcon.appiconset │ │ └── Contents.json │ ├── Contents.json │ ├── face.imageset │ │ ├── Contents.json │ │ └── IMG_0165.PNG │ ├── img.imageset │ │ ├── Contents.json │ │ └── IMG_3705.PNG │ ├── lane1.imageset │ │ ├── Contents.json │ │ └── lane1.jpeg │ └── lane2.imageset │ │ ├── Contents.json │ │ └── lane2.jpg ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── Info.plist ├── LaneDetectViewController.h ├── LaneDetectViewController.m ├── LaneDetectViewController.xib ├── VideoViewController.h ├── VideoViewController.m ├── VideoViewController.xib ├── ViewController.h ├── ViewController.m ├── haarcascade_eye_tree_eyeglasses.xml ├── haarcascade_frontalface_alt.xml ├── haarcascade_frontalface_alt2.xml ├── linefinder.h ├── main.m ├── openCVUtil.h └── openCVUtil.mm ├── openCV-PracticeTests ├── Info.plist └── openCV_PracticeTests.m └── openCV-PracticeUITests ├── Info.plist └── openCV_PracticeUITests.m /.gitignore: -------------------------------------------------------------------------------- 1 | opencv2.framework -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # openCV-Practice 2 | 3 | a opencv demo 4 | 5 | you should import opencv2.framework yourself in http://opencv.org 6 | -------------------------------------------------------------------------------- /openCV-Practice.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 46; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 8C754D3E1CD89F08006135CB /* VideoViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C754D3C1CD89F08006135CB /* VideoViewController.m */; }; 11 | 8C754D3F1CD89F08006135CB /* VideoViewController.xib in Resources */ = {isa = PBXBuildFile; fileRef = 8C754D3D1CD89F08006135CB /* VideoViewController.xib */; }; 12 | 8C75BF6C1CD9C79000E6EE44 /* haarcascade_frontalface_alt2.xml in Resources */ = {isa = PBXBuildFile; fileRef = 8C75BF6B1CD9C79000E6EE44 /* haarcascade_frontalface_alt2.xml */; }; 13 | 8CB609191DB6DB34003428E2 /* LaneDetectViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 8CB609171DB6DB34003428E2 /* LaneDetectViewController.m */; }; 14 | 8CB6091A1DB6DB34003428E2 /* LaneDetectViewController.xib in Resources */ = {isa = PBXBuildFile; fileRef = 8CB609181DB6DB34003428E2 /* LaneDetectViewController.xib */; }; 15 | 8CFF3F121CD337450037103C /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 8CFF3F111CD337450037103C /* main.m */; }; 16 | 8CFF3F151CD337450037103C /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 8CFF3F141CD337450037103C /* AppDelegate.m */; }; 17 | 8CFF3F181CD337450037103C /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 8CFF3F171CD337450037103C /* ViewController.m */; }; 18 | 8CFF3F1B1CD337450037103C /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 8CFF3F191CD337450037103C /* Main.storyboard */; }; 19 | 8CFF3F1D1CD337450037103C /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8CFF3F1C1CD337450037103C /* Assets.xcassets */; }; 20 | 8CFF3F201CD337450037103C /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 8CFF3F1E1CD337450037103C /* LaunchScreen.storyboard */; }; 21 | 8CFF3F2B1CD337450037103C /* openCV_PracticeTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 8CFF3F2A1CD337450037103C /* openCV_PracticeTests.m */; }; 22 | 8CFF3F361CD337450037103C /* openCV_PracticeUITests.m in Sources */ = {isa = PBXBuildFile; fileRef = 8CFF3F351CD337450037103C /* openCV_PracticeUITests.m */; }; 23 | 8CFF3F461CD337AE0037103C /* opencv2.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8CFF3F451CD337AE0037103C /* opencv2.framework */; }; 24 | 8CFF3F491CD339930037103C /* OpenCVUtil.mm in Sources */ = {isa = PBXBuildFile; fileRef = 8CFF3F481CD339930037103C /* OpenCVUtil.mm */; }; 25 | 8CFF3F4C1CD358480037103C /* haarcascade_eye_tree_eyeglasses.xml in Resources */ = {isa = PBXBuildFile; fileRef = 8CFF3F4A1CD358480037103C /* haarcascade_eye_tree_eyeglasses.xml */; }; 26 | 8CFF3F4D1CD358480037103C /* haarcascade_frontalface_alt.xml in Resources */ = {isa = PBXBuildFile; fileRef = 8CFF3F4B1CD358480037103C /* haarcascade_frontalface_alt.xml */; }; 27 | /* End PBXBuildFile section */ 28 | 29 | /* Begin PBXContainerItemProxy section */ 30 | 8CFF3F271CD337450037103C /* PBXContainerItemProxy */ = { 31 | isa = PBXContainerItemProxy; 32 | containerPortal = 8CFF3F051CD337450037103C /* Project object */; 33 | proxyType = 1; 34 | remoteGlobalIDString = 8CFF3F0C1CD337450037103C; 35 | remoteInfo = "openCV-Practice"; 36 | }; 37 | 8CFF3F321CD337450037103C /* PBXContainerItemProxy */ = { 38 | isa = PBXContainerItemProxy; 39 | containerPortal = 8CFF3F051CD337450037103C /* Project object */; 40 | proxyType = 1; 41 | remoteGlobalIDString = 8CFF3F0C1CD337450037103C; 42 | remoteInfo = "openCV-Practice"; 43 | }; 44 | /* End PBXContainerItemProxy section */ 45 | 46 | /* Begin PBXFileReference section */ 47 | 8C754D3B1CD89F08006135CB /* VideoViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = VideoViewController.h; sourceTree = ""; }; 48 | 8C754D3C1CD89F08006135CB /* VideoViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = VideoViewController.m; sourceTree = ""; }; 49 | 8C754D3D1CD89F08006135CB /* VideoViewController.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = VideoViewController.xib; sourceTree = ""; }; 50 | 8C75BF6B1CD9C79000E6EE44 /* haarcascade_frontalface_alt2.xml */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xml; path = haarcascade_frontalface_alt2.xml; sourceTree = ""; }; 51 | 8CB609161DB6DB34003428E2 /* LaneDetectViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = LaneDetectViewController.h; sourceTree = ""; }; 52 | 8CB609171DB6DB34003428E2 /* LaneDetectViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = LaneDetectViewController.m; sourceTree = ""; }; 53 | 8CB609181DB6DB34003428E2 /* LaneDetectViewController.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = LaneDetectViewController.xib; sourceTree = ""; }; 54 | 8CB6091B1DB6DE53003428E2 /* linefinder.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = linefinder.h; sourceTree = ""; }; 55 | 8CFF3F0D1CD337450037103C /* openCV-Practice.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "openCV-Practice.app"; sourceTree = BUILT_PRODUCTS_DIR; }; 56 | 8CFF3F111CD337450037103C /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 57 | 8CFF3F131CD337450037103C /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 58 | 8CFF3F141CD337450037103C /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 59 | 8CFF3F161CD337450037103C /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; }; 60 | 8CFF3F171CD337450037103C /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = ""; }; 61 | 8CFF3F1A1CD337450037103C /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 62 | 8CFF3F1C1CD337450037103C /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 63 | 8CFF3F1F1CD337450037103C /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 64 | 8CFF3F211CD337450037103C /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 65 | 8CFF3F261CD337450037103C /* openCV-PracticeTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "openCV-PracticeTests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; }; 66 | 8CFF3F2A1CD337450037103C /* openCV_PracticeTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = openCV_PracticeTests.m; sourceTree = ""; }; 67 | 8CFF3F2C1CD337450037103C /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 68 | 8CFF3F311CD337450037103C /* openCV-PracticeUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "openCV-PracticeUITests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; }; 69 | 8CFF3F351CD337450037103C /* openCV_PracticeUITests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = openCV_PracticeUITests.m; sourceTree = ""; }; 70 | 8CFF3F371CD337450037103C /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 71 | 8CFF3F451CD337AE0037103C /* opencv2.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = opencv2.framework; sourceTree = ""; }; 72 | 8CFF3F471CD339930037103C /* OpenCVUtil.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OpenCVUtil.h; sourceTree = ""; }; 73 | 8CFF3F481CD339930037103C /* OpenCVUtil.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = OpenCVUtil.mm; sourceTree = ""; }; 74 | 8CFF3F4A1CD358480037103C /* haarcascade_eye_tree_eyeglasses.xml */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xml; path = haarcascade_eye_tree_eyeglasses.xml; sourceTree = ""; }; 75 | 8CFF3F4B1CD358480037103C /* haarcascade_frontalface_alt.xml */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xml; path = haarcascade_frontalface_alt.xml; sourceTree = ""; }; 76 | /* End PBXFileReference section */ 77 | 78 | /* Begin PBXFrameworksBuildPhase section */ 79 | 8CFF3F0A1CD337450037103C /* Frameworks */ = { 80 | isa = PBXFrameworksBuildPhase; 81 | buildActionMask = 2147483647; 82 | files = ( 83 | 8CFF3F461CD337AE0037103C /* opencv2.framework in Frameworks */, 84 | ); 85 | runOnlyForDeploymentPostprocessing = 0; 86 | }; 87 | 8CFF3F231CD337450037103C /* Frameworks */ = { 88 | isa = PBXFrameworksBuildPhase; 89 | buildActionMask = 2147483647; 90 | files = ( 91 | ); 92 | runOnlyForDeploymentPostprocessing = 0; 93 | }; 94 | 8CFF3F2E1CD337450037103C /* Frameworks */ = { 95 | isa = PBXFrameworksBuildPhase; 96 | buildActionMask = 2147483647; 97 | files = ( 98 | ); 99 | runOnlyForDeploymentPostprocessing = 0; 100 | }; 101 | /* End PBXFrameworksBuildPhase section */ 102 | 103 | /* Begin PBXGroup section */ 104 | 8CFF3F041CD337450037103C = { 105 | isa = PBXGroup; 106 | children = ( 107 | 8CFF3F0F1CD337450037103C /* openCV-Practice */, 108 | 8CFF3F291CD337450037103C /* openCV-PracticeTests */, 109 | 8CFF3F341CD337450037103C /* openCV-PracticeUITests */, 110 | 8CFF3F0E1CD337450037103C /* Products */, 111 | ); 112 | sourceTree = ""; 113 | }; 114 | 8CFF3F0E1CD337450037103C /* Products */ = { 115 | isa = PBXGroup; 116 | children = ( 117 | 8CFF3F0D1CD337450037103C /* openCV-Practice.app */, 118 | 8CFF3F261CD337450037103C /* openCV-PracticeTests.xctest */, 119 | 8CFF3F311CD337450037103C /* openCV-PracticeUITests.xctest */, 120 | ); 121 | name = Products; 122 | sourceTree = ""; 123 | }; 124 | 8CFF3F0F1CD337450037103C /* openCV-Practice */ = { 125 | isa = PBXGroup; 126 | children = ( 127 | 8CFF3F131CD337450037103C /* AppDelegate.h */, 128 | 8CFF3F141CD337450037103C /* AppDelegate.m */, 129 | 8CFF3F161CD337450037103C /* ViewController.h */, 130 | 8CFF3F171CD337450037103C /* ViewController.m */, 131 | 8C754D3B1CD89F08006135CB /* VideoViewController.h */, 132 | 8C754D3C1CD89F08006135CB /* VideoViewController.m */, 133 | 8C754D3D1CD89F08006135CB /* VideoViewController.xib */, 134 | 8CB609161DB6DB34003428E2 /* LaneDetectViewController.h */, 135 | 8CB609171DB6DB34003428E2 /* LaneDetectViewController.m */, 136 | 8CB609181DB6DB34003428E2 /* LaneDetectViewController.xib */, 137 | 8CB6091B1DB6DE53003428E2 /* linefinder.h */, 138 | 8CFF3F471CD339930037103C /* OpenCVUtil.h */, 139 | 8CFF3F481CD339930037103C /* OpenCVUtil.mm */, 140 | 8CFF3F191CD337450037103C /* Main.storyboard */, 141 | 8CFF3F1C1CD337450037103C /* Assets.xcassets */, 142 | 8CFF3F1E1CD337450037103C /* LaunchScreen.storyboard */, 143 | 8CFF3F211CD337450037103C /* Info.plist */, 144 | 8CFF3F101CD337450037103C /* Supporting Files */, 145 | ); 146 | path = "openCV-Practice"; 147 | sourceTree = ""; 148 | }; 149 | 8CFF3F101CD337450037103C /* Supporting Files */ = { 150 | isa = PBXGroup; 151 | children = ( 152 | 8C75BF6B1CD9C79000E6EE44 /* haarcascade_frontalface_alt2.xml */, 153 | 8CFF3F4A1CD358480037103C /* haarcascade_eye_tree_eyeglasses.xml */, 154 | 8CFF3F4B1CD358480037103C /* haarcascade_frontalface_alt.xml */, 155 | 8CFF3F451CD337AE0037103C /* opencv2.framework */, 156 | 8CFF3F111CD337450037103C /* main.m */, 157 | ); 158 | name = "Supporting Files"; 159 | sourceTree = ""; 160 | }; 161 | 8CFF3F291CD337450037103C /* openCV-PracticeTests */ = { 162 | isa = PBXGroup; 163 | children = ( 164 | 8CFF3F2A1CD337450037103C /* openCV_PracticeTests.m */, 165 | 8CFF3F2C1CD337450037103C /* Info.plist */, 166 | ); 167 | path = "openCV-PracticeTests"; 168 | sourceTree = ""; 169 | }; 170 | 8CFF3F341CD337450037103C /* openCV-PracticeUITests */ = { 171 | isa = PBXGroup; 172 | children = ( 173 | 8CFF3F351CD337450037103C /* openCV_PracticeUITests.m */, 174 | 8CFF3F371CD337450037103C /* Info.plist */, 175 | ); 176 | path = "openCV-PracticeUITests"; 177 | sourceTree = ""; 178 | }; 179 | /* End PBXGroup section */ 180 | 181 | /* Begin PBXNativeTarget section */ 182 | 8CFF3F0C1CD337450037103C /* openCV-Practice */ = { 183 | isa = PBXNativeTarget; 184 | buildConfigurationList = 8CFF3F3A1CD337450037103C /* Build configuration list for PBXNativeTarget "openCV-Practice" */; 185 | buildPhases = ( 186 | 8CFF3F091CD337450037103C /* Sources */, 187 | 8CFF3F0A1CD337450037103C /* Frameworks */, 188 | 8CFF3F0B1CD337450037103C /* Resources */, 189 | ); 190 | buildRules = ( 191 | ); 192 | dependencies = ( 193 | ); 194 | name = "openCV-Practice"; 195 | productName = "openCV-Practice"; 196 | productReference = 8CFF3F0D1CD337450037103C /* openCV-Practice.app */; 197 | productType = "com.apple.product-type.application"; 198 | }; 199 | 8CFF3F251CD337450037103C /* openCV-PracticeTests */ = { 200 | isa = PBXNativeTarget; 201 | buildConfigurationList = 8CFF3F3D1CD337450037103C /* Build configuration list for PBXNativeTarget "openCV-PracticeTests" */; 202 | buildPhases = ( 203 | 8CFF3F221CD337450037103C /* Sources */, 204 | 8CFF3F231CD337450037103C /* Frameworks */, 205 | 8CFF3F241CD337450037103C /* Resources */, 206 | ); 207 | buildRules = ( 208 | ); 209 | dependencies = ( 210 | 8CFF3F281CD337450037103C /* PBXTargetDependency */, 211 | ); 212 | name = "openCV-PracticeTests"; 213 | productName = "openCV-PracticeTests"; 214 | productReference = 8CFF3F261CD337450037103C /* openCV-PracticeTests.xctest */; 215 | productType = "com.apple.product-type.bundle.unit-test"; 216 | }; 217 | 8CFF3F301CD337450037103C /* openCV-PracticeUITests */ = { 218 | isa = PBXNativeTarget; 219 | buildConfigurationList = 8CFF3F401CD337450037103C /* Build configuration list for PBXNativeTarget "openCV-PracticeUITests" */; 220 | buildPhases = ( 221 | 8CFF3F2D1CD337450037103C /* Sources */, 222 | 8CFF3F2E1CD337450037103C /* Frameworks */, 223 | 8CFF3F2F1CD337450037103C /* Resources */, 224 | ); 225 | buildRules = ( 226 | ); 227 | dependencies = ( 228 | 8CFF3F331CD337450037103C /* PBXTargetDependency */, 229 | ); 230 | name = "openCV-PracticeUITests"; 231 | productName = "openCV-PracticeUITests"; 232 | productReference = 8CFF3F311CD337450037103C /* openCV-PracticeUITests.xctest */; 233 | productType = "com.apple.product-type.bundle.ui-testing"; 234 | }; 235 | /* End PBXNativeTarget section */ 236 | 237 | /* Begin PBXProject section */ 238 | 8CFF3F051CD337450037103C /* Project object */ = { 239 | isa = PBXProject; 240 | attributes = { 241 | LastUpgradeCheck = 0800; 242 | ORGANIZATIONNAME = realank; 243 | TargetAttributes = { 244 | 8CFF3F0C1CD337450037103C = { 245 | CreatedOnToolsVersion = 7.3; 246 | DevelopmentTeam = 275X96KVEB; 247 | }; 248 | 8CFF3F251CD337450037103C = { 249 | CreatedOnToolsVersion = 7.3; 250 | TestTargetID = 8CFF3F0C1CD337450037103C; 251 | }; 252 | 8CFF3F301CD337450037103C = { 253 | CreatedOnToolsVersion = 7.3; 254 | TestTargetID = 8CFF3F0C1CD337450037103C; 255 | }; 256 | }; 257 | }; 258 | buildConfigurationList = 8CFF3F081CD337450037103C /* Build configuration list for PBXProject "openCV-Practice" */; 259 | compatibilityVersion = "Xcode 3.2"; 260 | developmentRegion = English; 261 | hasScannedForEncodings = 0; 262 | knownRegions = ( 263 | en, 264 | Base, 265 | ); 266 | mainGroup = 8CFF3F041CD337450037103C; 267 | productRefGroup = 8CFF3F0E1CD337450037103C /* Products */; 268 | projectDirPath = ""; 269 | projectRoot = ""; 270 | targets = ( 271 | 8CFF3F0C1CD337450037103C /* openCV-Practice */, 272 | 8CFF3F251CD337450037103C /* openCV-PracticeTests */, 273 | 8CFF3F301CD337450037103C /* openCV-PracticeUITests */, 274 | ); 275 | }; 276 | /* End PBXProject section */ 277 | 278 | /* Begin PBXResourcesBuildPhase section */ 279 | 8CFF3F0B1CD337450037103C /* Resources */ = { 280 | isa = PBXResourcesBuildPhase; 281 | buildActionMask = 2147483647; 282 | files = ( 283 | 8CFF3F4D1CD358480037103C /* haarcascade_frontalface_alt.xml in Resources */, 284 | 8C75BF6C1CD9C79000E6EE44 /* haarcascade_frontalface_alt2.xml in Resources */, 285 | 8CFF3F201CD337450037103C /* LaunchScreen.storyboard in Resources */, 286 | 8C754D3F1CD89F08006135CB /* VideoViewController.xib in Resources */, 287 | 8CFF3F1D1CD337450037103C /* Assets.xcassets in Resources */, 288 | 8CB6091A1DB6DB34003428E2 /* LaneDetectViewController.xib in Resources */, 289 | 8CFF3F1B1CD337450037103C /* Main.storyboard in Resources */, 290 | 8CFF3F4C1CD358480037103C /* haarcascade_eye_tree_eyeglasses.xml in Resources */, 291 | ); 292 | runOnlyForDeploymentPostprocessing = 0; 293 | }; 294 | 8CFF3F241CD337450037103C /* Resources */ = { 295 | isa = PBXResourcesBuildPhase; 296 | buildActionMask = 2147483647; 297 | files = ( 298 | ); 299 | runOnlyForDeploymentPostprocessing = 0; 300 | }; 301 | 8CFF3F2F1CD337450037103C /* Resources */ = { 302 | isa = PBXResourcesBuildPhase; 303 | buildActionMask = 2147483647; 304 | files = ( 305 | ); 306 | runOnlyForDeploymentPostprocessing = 0; 307 | }; 308 | /* End PBXResourcesBuildPhase section */ 309 | 310 | /* Begin PBXSourcesBuildPhase section */ 311 | 8CFF3F091CD337450037103C /* Sources */ = { 312 | isa = PBXSourcesBuildPhase; 313 | buildActionMask = 2147483647; 314 | files = ( 315 | 8C754D3E1CD89F08006135CB /* VideoViewController.m in Sources */, 316 | 8CFF3F491CD339930037103C /* OpenCVUtil.mm in Sources */, 317 | 8CFF3F181CD337450037103C /* ViewController.m in Sources */, 318 | 8CB609191DB6DB34003428E2 /* LaneDetectViewController.m in Sources */, 319 | 8CFF3F151CD337450037103C /* AppDelegate.m in Sources */, 320 | 8CFF3F121CD337450037103C /* main.m in Sources */, 321 | ); 322 | runOnlyForDeploymentPostprocessing = 0; 323 | }; 324 | 8CFF3F221CD337450037103C /* Sources */ = { 325 | isa = PBXSourcesBuildPhase; 326 | buildActionMask = 2147483647; 327 | files = ( 328 | 8CFF3F2B1CD337450037103C /* openCV_PracticeTests.m in Sources */, 329 | ); 330 | runOnlyForDeploymentPostprocessing = 0; 331 | }; 332 | 8CFF3F2D1CD337450037103C /* Sources */ = { 333 | isa = PBXSourcesBuildPhase; 334 | buildActionMask = 2147483647; 335 | files = ( 336 | 8CFF3F361CD337450037103C /* openCV_PracticeUITests.m in Sources */, 337 | ); 338 | runOnlyForDeploymentPostprocessing = 0; 339 | }; 340 | /* End PBXSourcesBuildPhase section */ 341 | 342 | /* Begin PBXTargetDependency section */ 343 | 8CFF3F281CD337450037103C /* PBXTargetDependency */ = { 344 | isa = PBXTargetDependency; 345 | target = 8CFF3F0C1CD337450037103C /* openCV-Practice */; 346 | targetProxy = 8CFF3F271CD337450037103C /* PBXContainerItemProxy */; 347 | }; 348 | 8CFF3F331CD337450037103C /* PBXTargetDependency */ = { 349 | isa = PBXTargetDependency; 350 | target = 8CFF3F0C1CD337450037103C /* openCV-Practice */; 351 | targetProxy = 8CFF3F321CD337450037103C /* PBXContainerItemProxy */; 352 | }; 353 | /* End PBXTargetDependency section */ 354 | 355 | /* Begin PBXVariantGroup section */ 356 | 8CFF3F191CD337450037103C /* Main.storyboard */ = { 357 | isa = PBXVariantGroup; 358 | children = ( 359 | 8CFF3F1A1CD337450037103C /* Base */, 360 | ); 361 | name = Main.storyboard; 362 | sourceTree = ""; 363 | }; 364 | 8CFF3F1E1CD337450037103C /* LaunchScreen.storyboard */ = { 365 | isa = PBXVariantGroup; 366 | children = ( 367 | 8CFF3F1F1CD337450037103C /* Base */, 368 | ); 369 | name = LaunchScreen.storyboard; 370 | sourceTree = ""; 371 | }; 372 | /* End PBXVariantGroup section */ 373 | 374 | /* Begin XCBuildConfiguration section */ 375 | 8CFF3F381CD337450037103C /* Debug */ = { 376 | isa = XCBuildConfiguration; 377 | buildSettings = { 378 | ALWAYS_SEARCH_USER_PATHS = NO; 379 | CLANG_ANALYZER_NONNULL = YES; 380 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 381 | CLANG_CXX_LIBRARY = "libc++"; 382 | CLANG_ENABLE_MODULES = YES; 383 | CLANG_ENABLE_OBJC_ARC = YES; 384 | CLANG_WARN_BOOL_CONVERSION = YES; 385 | CLANG_WARN_CONSTANT_CONVERSION = YES; 386 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 387 | CLANG_WARN_EMPTY_BODY = YES; 388 | CLANG_WARN_ENUM_CONVERSION = YES; 389 | CLANG_WARN_INFINITE_RECURSION = YES; 390 | CLANG_WARN_INT_CONVERSION = YES; 391 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 392 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 393 | CLANG_WARN_UNREACHABLE_CODE = YES; 394 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 395 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 396 | COPY_PHASE_STRIP = NO; 397 | DEBUG_INFORMATION_FORMAT = dwarf; 398 | ENABLE_STRICT_OBJC_MSGSEND = YES; 399 | ENABLE_TESTABILITY = YES; 400 | GCC_C_LANGUAGE_STANDARD = gnu99; 401 | GCC_DYNAMIC_NO_PIC = NO; 402 | GCC_NO_COMMON_BLOCKS = YES; 403 | GCC_OPTIMIZATION_LEVEL = 0; 404 | GCC_PREPROCESSOR_DEFINITIONS = ( 405 | "DEBUG=1", 406 | "$(inherited)", 407 | ); 408 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 409 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 410 | GCC_WARN_UNDECLARED_SELECTOR = YES; 411 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 412 | GCC_WARN_UNUSED_FUNCTION = YES; 413 | GCC_WARN_UNUSED_VARIABLE = YES; 414 | IPHONEOS_DEPLOYMENT_TARGET = 9.3; 415 | MTL_ENABLE_DEBUG_INFO = YES; 416 | ONLY_ACTIVE_ARCH = YES; 417 | SDKROOT = iphoneos; 418 | }; 419 | name = Debug; 420 | }; 421 | 8CFF3F391CD337450037103C /* Release */ = { 422 | isa = XCBuildConfiguration; 423 | buildSettings = { 424 | ALWAYS_SEARCH_USER_PATHS = NO; 425 | CLANG_ANALYZER_NONNULL = YES; 426 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 427 | CLANG_CXX_LIBRARY = "libc++"; 428 | CLANG_ENABLE_MODULES = YES; 429 | CLANG_ENABLE_OBJC_ARC = YES; 430 | CLANG_WARN_BOOL_CONVERSION = YES; 431 | CLANG_WARN_CONSTANT_CONVERSION = YES; 432 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 433 | CLANG_WARN_EMPTY_BODY = YES; 434 | CLANG_WARN_ENUM_CONVERSION = YES; 435 | CLANG_WARN_INFINITE_RECURSION = YES; 436 | CLANG_WARN_INT_CONVERSION = YES; 437 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 438 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 439 | CLANG_WARN_UNREACHABLE_CODE = YES; 440 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 441 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 442 | COPY_PHASE_STRIP = NO; 443 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 444 | ENABLE_NS_ASSERTIONS = NO; 445 | ENABLE_STRICT_OBJC_MSGSEND = YES; 446 | GCC_C_LANGUAGE_STANDARD = gnu99; 447 | GCC_NO_COMMON_BLOCKS = YES; 448 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 449 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 450 | GCC_WARN_UNDECLARED_SELECTOR = YES; 451 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 452 | GCC_WARN_UNUSED_FUNCTION = YES; 453 | GCC_WARN_UNUSED_VARIABLE = YES; 454 | IPHONEOS_DEPLOYMENT_TARGET = 9.3; 455 | MTL_ENABLE_DEBUG_INFO = NO; 456 | SDKROOT = iphoneos; 457 | VALIDATE_PRODUCT = YES; 458 | }; 459 | name = Release; 460 | }; 461 | 8CFF3F3B1CD337450037103C /* Debug */ = { 462 | isa = XCBuildConfiguration; 463 | buildSettings = { 464 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 465 | DEVELOPMENT_TEAM = 275X96KVEB; 466 | FRAMEWORK_SEARCH_PATHS = ( 467 | "$(inherited)", 468 | "$(PROJECT_DIR)/openCV-Practice", 469 | ); 470 | INFOPLIST_FILE = "openCV-Practice/Info.plist"; 471 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 472 | PRODUCT_BUNDLE_IDENTIFIER = "realank.openCV-Practice"; 473 | PRODUCT_NAME = "$(TARGET_NAME)"; 474 | TARGETED_DEVICE_FAMILY = "1,2"; 475 | }; 476 | name = Debug; 477 | }; 478 | 8CFF3F3C1CD337450037103C /* Release */ = { 479 | isa = XCBuildConfiguration; 480 | buildSettings = { 481 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 482 | DEVELOPMENT_TEAM = 275X96KVEB; 483 | FRAMEWORK_SEARCH_PATHS = ( 484 | "$(inherited)", 485 | "$(PROJECT_DIR)/openCV-Practice", 486 | ); 487 | INFOPLIST_FILE = "openCV-Practice/Info.plist"; 488 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 489 | PRODUCT_BUNDLE_IDENTIFIER = "realank.openCV-Practice"; 490 | PRODUCT_NAME = "$(TARGET_NAME)"; 491 | TARGETED_DEVICE_FAMILY = "1,2"; 492 | }; 493 | name = Release; 494 | }; 495 | 8CFF3F3E1CD337450037103C /* Debug */ = { 496 | isa = XCBuildConfiguration; 497 | buildSettings = { 498 | BUNDLE_LOADER = "$(TEST_HOST)"; 499 | INFOPLIST_FILE = "openCV-PracticeTests/Info.plist"; 500 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; 501 | PRODUCT_BUNDLE_IDENTIFIER = "realank.openCV-PracticeTests"; 502 | PRODUCT_NAME = "$(TARGET_NAME)"; 503 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/openCV-Practice.app/openCV-Practice"; 504 | }; 505 | name = Debug; 506 | }; 507 | 8CFF3F3F1CD337450037103C /* Release */ = { 508 | isa = XCBuildConfiguration; 509 | buildSettings = { 510 | BUNDLE_LOADER = "$(TEST_HOST)"; 511 | INFOPLIST_FILE = "openCV-PracticeTests/Info.plist"; 512 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; 513 | PRODUCT_BUNDLE_IDENTIFIER = "realank.openCV-PracticeTests"; 514 | PRODUCT_NAME = "$(TARGET_NAME)"; 515 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/openCV-Practice.app/openCV-Practice"; 516 | }; 517 | name = Release; 518 | }; 519 | 8CFF3F411CD337450037103C /* Debug */ = { 520 | isa = XCBuildConfiguration; 521 | buildSettings = { 522 | INFOPLIST_FILE = "openCV-PracticeUITests/Info.plist"; 523 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; 524 | PRODUCT_BUNDLE_IDENTIFIER = "realank.openCV-PracticeUITests"; 525 | PRODUCT_NAME = "$(TARGET_NAME)"; 526 | TEST_TARGET_NAME = "openCV-Practice"; 527 | }; 528 | name = Debug; 529 | }; 530 | 8CFF3F421CD337450037103C /* Release */ = { 531 | isa = XCBuildConfiguration; 532 | buildSettings = { 533 | INFOPLIST_FILE = "openCV-PracticeUITests/Info.plist"; 534 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; 535 | PRODUCT_BUNDLE_IDENTIFIER = "realank.openCV-PracticeUITests"; 536 | PRODUCT_NAME = "$(TARGET_NAME)"; 537 | TEST_TARGET_NAME = "openCV-Practice"; 538 | }; 539 | name = Release; 540 | }; 541 | /* End XCBuildConfiguration section */ 542 | 543 | /* Begin XCConfigurationList section */ 544 | 8CFF3F081CD337450037103C /* Build configuration list for PBXProject "openCV-Practice" */ = { 545 | isa = XCConfigurationList; 546 | buildConfigurations = ( 547 | 8CFF3F381CD337450037103C /* Debug */, 548 | 8CFF3F391CD337450037103C /* Release */, 549 | ); 550 | defaultConfigurationIsVisible = 0; 551 | defaultConfigurationName = Release; 552 | }; 553 | 8CFF3F3A1CD337450037103C /* Build configuration list for PBXNativeTarget "openCV-Practice" */ = { 554 | isa = XCConfigurationList; 555 | buildConfigurations = ( 556 | 8CFF3F3B1CD337450037103C /* Debug */, 557 | 8CFF3F3C1CD337450037103C /* Release */, 558 | ); 559 | defaultConfigurationIsVisible = 0; 560 | defaultConfigurationName = Release; 561 | }; 562 | 8CFF3F3D1CD337450037103C /* Build configuration list for PBXNativeTarget "openCV-PracticeTests" */ = { 563 | isa = XCConfigurationList; 564 | buildConfigurations = ( 565 | 8CFF3F3E1CD337450037103C /* Debug */, 566 | 8CFF3F3F1CD337450037103C /* Release */, 567 | ); 568 | defaultConfigurationIsVisible = 0; 569 | defaultConfigurationName = Release; 570 | }; 571 | 8CFF3F401CD337450037103C /* Build configuration list for PBXNativeTarget "openCV-PracticeUITests" */ = { 572 | isa = XCConfigurationList; 573 | buildConfigurations = ( 574 | 8CFF3F411CD337450037103C /* Debug */, 575 | 8CFF3F421CD337450037103C /* Release */, 576 | ); 577 | defaultConfigurationIsVisible = 0; 578 | defaultConfigurationName = Release; 579 | }; 580 | /* End XCConfigurationList section */ 581 | }; 582 | rootObject = 8CFF3F051CD337450037103C /* Project object */; 583 | } 584 | -------------------------------------------------------------------------------- /openCV-Practice.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /openCV-Practice.xcodeproj/project.xcworkspace/xcuserdata/Realank.xcuserdatad/UserInterfaceState.xcuserstate: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Realank/openCV-Practice/6a263a89d3cdbb2dc19cfddda5f029c8936f0273/openCV-Practice.xcodeproj/project.xcworkspace/xcuserdata/Realank.xcuserdatad/UserInterfaceState.xcuserstate -------------------------------------------------------------------------------- /openCV-Practice.xcodeproj/xcuserdata/Realank.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | 8 | 14 | 15 | 16 | 18 | 30 | 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /openCV-Practice.xcodeproj/xcuserdata/Realank.xcuserdatad/xcschemes/openCV-Practice.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 33 | 39 | 40 | 41 | 43 | 49 | 50 | 51 | 52 | 53 | 59 | 60 | 61 | 62 | 63 | 64 | 74 | 76 | 82 | 83 | 84 | 85 | 86 | 87 | 93 | 95 | 101 | 102 | 103 | 104 | 106 | 107 | 110 | 111 | 112 | -------------------------------------------------------------------------------- /openCV-Practice.xcodeproj/xcuserdata/Realank.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | openCV-Practice.xcscheme 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | SuppressBuildableAutocreation 14 | 15 | 8CFF3F0C1CD337450037103C 16 | 17 | primary 18 | 19 | 20 | 8CFF3F251CD337450037103C 21 | 22 | primary 23 | 24 | 25 | 8CFF3F301CD337450037103C 26 | 27 | primary 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /openCV-Practice/AppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.h 3 | // openCV-Practice 4 | // 5 | // Created by Realank on 16/4/29. 6 | // Copyright © 2016年 realank. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface AppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | 16 | @end 17 | 18 | -------------------------------------------------------------------------------- /openCV-Practice/AppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.m 3 | // openCV-Practice 4 | // 5 | // Created by Realank on 16/4/29. 6 | // Copyright © 2016年 realank. All rights reserved. 7 | // 8 | 9 | #import "AppDelegate.h" 10 | 11 | @interface AppDelegate () 12 | 13 | @end 14 | 15 | @implementation AppDelegate 16 | 17 | 18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 19 | // Override point for customization after application launch. 20 | return YES; 21 | } 22 | 23 | - (void)applicationWillResignActive:(UIApplication *)application { 24 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 25 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. 26 | } 27 | 28 | - (void)applicationDidEnterBackground:(UIApplication *)application { 29 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 30 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 31 | } 32 | 33 | - (void)applicationWillEnterForeground:(UIApplication *)application { 34 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. 35 | } 36 | 37 | - (void)applicationDidBecomeActive:(UIApplication *)application { 38 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 39 | } 40 | 41 | - (void)applicationWillTerminate:(UIApplication *)application { 42 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 43 | } 44 | 45 | @end 46 | -------------------------------------------------------------------------------- /openCV-Practice/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | } 43 | ], 44 | "info" : { 45 | "version" : 1, 46 | "author" : "xcode" 47 | } 48 | } -------------------------------------------------------------------------------- /openCV-Practice/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /openCV-Practice/Assets.xcassets/face.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "filename" : "IMG_0165.PNG", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /openCV-Practice/Assets.xcassets/face.imageset/IMG_0165.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Realank/openCV-Practice/6a263a89d3cdbb2dc19cfddda5f029c8936f0273/openCV-Practice/Assets.xcassets/face.imageset/IMG_0165.PNG -------------------------------------------------------------------------------- /openCV-Practice/Assets.xcassets/img.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "filename" : "IMG_3705.PNG", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /openCV-Practice/Assets.xcassets/img.imageset/IMG_3705.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Realank/openCV-Practice/6a263a89d3cdbb2dc19cfddda5f029c8936f0273/openCV-Practice/Assets.xcassets/img.imageset/IMG_3705.PNG -------------------------------------------------------------------------------- /openCV-Practice/Assets.xcassets/lane1.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "filename" : "lane1.jpeg", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /openCV-Practice/Assets.xcassets/lane1.imageset/lane1.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Realank/openCV-Practice/6a263a89d3cdbb2dc19cfddda5f029c8936f0273/openCV-Practice/Assets.xcassets/lane1.imageset/lane1.jpeg -------------------------------------------------------------------------------- /openCV-Practice/Assets.xcassets/lane2.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "filename" : "lane2.jpg", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /openCV-Practice/Assets.xcassets/lane2.imageset/lane2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Realank/openCV-Practice/6a263a89d3cdbb2dc19cfddda5f029c8936f0273/openCV-Practice/Assets.xcassets/lane2.imageset/lane2.jpg -------------------------------------------------------------------------------- /openCV-Practice/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /openCV-Practice/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 34 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | -------------------------------------------------------------------------------- /openCV-Practice/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | NSCameraUsageDescription 8 | Allow Camera 9 | CFBundleExecutable 10 | $(EXECUTABLE_NAME) 11 | CFBundleIdentifier 12 | $(PRODUCT_BUNDLE_IDENTIFIER) 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | $(PRODUCT_NAME) 17 | CFBundlePackageType 18 | APPL 19 | CFBundleShortVersionString 20 | 1.0 21 | CFBundleSignature 22 | ???? 23 | CFBundleVersion 24 | 1 25 | LSRequiresIPhoneOS 26 | 27 | UILaunchStoryboardName 28 | LaunchScreen 29 | UIMainStoryboardFile 30 | Main 31 | UIRequiredDeviceCapabilities 32 | 33 | armv7 34 | 35 | UISupportedInterfaceOrientations 36 | 37 | UIInterfaceOrientationPortrait 38 | UIInterfaceOrientationLandscapeLeft 39 | UIInterfaceOrientationLandscapeRight 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /openCV-Practice/LaneDetectViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // LaneDetectViewController.h 3 | // openCV-Practice 4 | // 5 | // Created by Realank on 2016/10/18. 6 | // Copyright © 2016年 realank. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface LaneDetectViewController : UIViewController 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /openCV-Practice/LaneDetectViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // LaneDetectViewController.m 3 | // openCV-Practice 4 | // 5 | // Created by Realank on 2016/10/18. 6 | // Copyright © 2016年 realank. All rights reserved. 7 | // 8 | 9 | #import "LaneDetectViewController.h" 10 | #import "openCVUtil.h" 11 | @interface LaneDetectViewController () 12 | @property (weak, nonatomic) IBOutlet UIImageView *laneImageView; 13 | @property (weak, nonatomic) IBOutlet UIImageView *stepImage1View; 14 | @property (weak, nonatomic) IBOutlet UIImageView *stepImage2View; 15 | @property (weak, nonatomic) IBOutlet UIImageView *stepImage3View; 16 | @property (weak, nonatomic) IBOutlet UIImageView *stepImage4View; 17 | @property (weak, nonatomic) IBOutlet UIImageView *stepImage5View; 18 | @property (weak, nonatomic) IBOutlet UIImageView *stepImage6View; 19 | 20 | @end 21 | 22 | @implementation LaneDetectViewController 23 | 24 | - (void)viewDidLoad { 25 | [super viewDidLoad]; 26 | NSArray* images = [OpenCVUtil laneDetectForImage:[UIImage imageNamed:@"lane1"]]; 27 | self.stepImage1View.image = images[0]; 28 | self.stepImage2View.image = images[1]; 29 | self.stepImage3View.image = images[2]; 30 | self.stepImage4View.image = images[3]; 31 | self.stepImage5View.image = images[4]; 32 | self.stepImage6View.image = images[5]; 33 | self.laneImageView.image = images[6]; 34 | } 35 | 36 | 37 | 38 | @end 39 | -------------------------------------------------------------------------------- /openCV-Practice/LaneDetectViewController.xib: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | -------------------------------------------------------------------------------- /openCV-Practice/VideoViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // VideoViewController.h 3 | // openCV-Practice 4 | // 5 | // Created by Realank on 16/5/3. 6 | // Copyright © 2016年 realank. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface VideoViewController : UIViewController 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /openCV-Practice/VideoViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // VideoViewController.m 3 | // openCV-Practice 4 | // 5 | // Created by Realank on 16/5/3. 6 | // Copyright © 2016年 realank. All rights reserved. 7 | // 8 | 9 | #import "VideoViewController.h" 10 | 11 | #import 12 | #import 13 | #import "OpenCVUtil.h" 14 | typedef void(^PropertyChangeBlock)(AVCaptureDevice *captureDevice); 15 | 16 | @interface VideoViewController () 17 | 18 | @property (strong,nonatomic) AVCaptureSession *captureSession;//负责输入和输出设备之间的数据传递 19 | @property (strong,nonatomic) AVCaptureDeviceInput *captureDeviceInput;//负责从AVCaptureDevice获得输入数据 20 | @property (strong,nonatomic) AVCaptureStillImageOutput *captureStillImageOutput;//照片输出流 21 | @property (strong,nonatomic) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;//相机拍摄预览图层 22 | @property (weak, nonatomic) IBOutlet UIView *viewContainer; 23 | @property (weak, nonatomic) IBOutlet UIImageView *focusCursor; //聚焦光标 24 | 25 | @property (weak, nonatomic) IBOutlet UIImageView *resultImage; 26 | @property (nonatomic, strong) CAShapeLayer *shapeLayer; 27 | @end 28 | 29 | @implementation VideoViewController 30 | 31 | #pragma mark - 控制器视图方法 32 | - (void)viewDidLoad { 33 | [super viewDidLoad]; 34 | 35 | _shapeLayer = [CAShapeLayer layer]; 36 | _shapeLayer.frame = _resultImage.bounds; 37 | // _shapeLayer.backgroundColor = [[UIColor grayColor] colorWithAlphaComponent:0.5].CGColor; 38 | [_resultImage.layer addSublayer:_shapeLayer]; 39 | _shapeLayer.lineWidth = 3; 40 | _shapeLayer.strokeColor = [UIColor redColor].CGColor; 41 | _shapeLayer.fillColor = [UIColor clearColor].CGColor; 42 | 43 | // UIBezierPath *path = [UIBezierPath bezierPathWithRect:CGRectMake(0, 0, 20, 20)]; 44 | // _shapeLayer.path = path.CGPath; 45 | 46 | // _resultImage.contentMode = UIViewContentModeScaleToFill; 47 | } 48 | -(void)viewWillAppear:(BOOL)animated{ 49 | [super viewWillAppear:animated]; 50 | //初始化会话 51 | _captureSession=[[AVCaptureSession alloc]init]; 52 | if ([_captureSession canSetSessionPreset:AVCaptureSessionPresetMedium]) {//设置分辨率 53 | _captureSession.sessionPreset=AVCaptureSessionPresetMedium; 54 | } 55 | 56 | //获得输入设备 57 | AVCaptureDevice *captureDevice=[self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];//取得前置摄像头 58 | if (!captureDevice) { 59 | NSLog(@"取得后置摄像头时出现问题."); 60 | return; 61 | } 62 | 63 | NSError *error=nil; 64 | //根据输入设备初始化设备输入对象,用于获得输入数据 65 | _captureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:captureDevice error:&error]; 66 | if (error) { 67 | NSLog(@"取得设备输入对象时出错,错误原因:%@",error.localizedDescription); 68 | return; 69 | } 70 | //初始化设备输出对象,用于获得输出数据 71 | _captureStillImageOutput=[[AVCaptureStillImageOutput alloc]init]; 72 | NSDictionary *outputSettings = @{AVVideoCodecKey:AVVideoCodecJPEG}; 73 | [_captureStillImageOutput setOutputSettings:outputSettings];//输出设置 74 | 75 | //将设备输入添加到会话中 76 | if ([_captureSession canAddInput:_captureDeviceInput]) { 77 | [_captureSession addInput:_captureDeviceInput]; 78 | } 79 | 80 | //将设备输出添加到会话中 81 | if ([_captureSession canAddOutput:_captureStillImageOutput]) { 82 | // [_captureSession addOutput:_captureStillImageOutput]; 83 | } 84 | 85 | AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] 86 | init]; 87 | captureOutput.alwaysDiscardsLateVideoFrames = YES; 88 | dispatch_queue_t queue; 89 | queue = dispatch_queue_create("cameraQueue", NULL); 90 | [captureOutput setSampleBufferDelegate:self queue:queue]; 91 | NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; 92 | NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; 93 | NSDictionary* videoSettings = [NSDictionary 94 | dictionaryWithObject:value forKey:key]; 95 | [captureOutput setVideoSettings:videoSettings]; 96 | if ([_captureSession canAddOutput:captureOutput]) { 97 | [_captureSession addOutput:captureOutput]; 98 | } 99 | 100 | 101 | //创建视频预览层,用于实时展示摄像头状态 102 | _captureVideoPreviewLayer=[[AVCaptureVideoPreviewLayer alloc]initWithSession:self.captureSession]; 103 | 104 | CALayer *layer=self.viewContainer.layer; 105 | layer.masksToBounds=YES; 106 | 107 | _captureVideoPreviewLayer.frame=layer.bounds; 108 | _captureVideoPreviewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;//填充模式 109 | //将视频预览层添加到界面中 110 | //[layer addSublayer:_captureVideoPreviewLayer]; 111 | [layer insertSublayer:_captureVideoPreviewLayer below:_shapeLayer]; 112 | 113 | [self addNotificationToCaptureDevice:captureDevice]; 114 | [self addGenstureRecognizer]; 115 | } 116 | 117 | -(void)viewDidAppear:(BOOL)animated{ 118 | [super viewDidAppear:animated]; 119 | [self.captureSession startRunning]; 120 | } 121 | 122 | -(void)viewDidDisappear:(BOOL)animated{ 123 | [super viewDidDisappear:animated]; 124 | [self.captureSession stopRunning]; 125 | } 126 | 127 | -(void)dealloc{ 128 | [self removeNotification]; 129 | } 130 | #pragma mark - UI方法 131 | #pragma mark 拍照 132 | - (IBAction)takeButtonClick:(UIButton *)sender { 133 | //根据设备输出获得连接 134 | AVCaptureConnection *captureConnection=[self.captureStillImageOutput connectionWithMediaType:AVMediaTypeVideo]; 135 | //根据连接取得设备输出的数据 136 | [self.captureStillImageOutput captureStillImageAsynchronouslyFromConnection:captureConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { 137 | if (imageDataSampleBuffer) { 138 | NSData *imageData=[AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; 139 | UIImage *image=[UIImage imageWithData:imageData]; 140 | UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil); 141 | // ALAssetsLibrary *assetsLibrary=[[ALAssetsLibrary alloc]init]; 142 | // [assetsLibrary writeImageToSavedPhotosAlbum:[image CGImage] orientation:(ALAssetOrientation)[image imageOrientation] completionBlock:nil]; 143 | } 144 | 145 | }]; 146 | } 147 | #pragma mark 切换前后摄像头 148 | - (IBAction)toggleButtonClick:(UIButton *)sender { 149 | AVCaptureDevice *currentDevice=[self.captureDeviceInput device]; 150 | AVCaptureDevicePosition currentPosition=[currentDevice position]; 151 | [self removeNotificationFromCaptureDevice:currentDevice]; 152 | AVCaptureDevice *toChangeDevice; 153 | AVCaptureDevicePosition toChangePosition=AVCaptureDevicePositionFront; 154 | if (currentPosition==AVCaptureDevicePositionUnspecified||currentPosition==AVCaptureDevicePositionFront) { 155 | toChangePosition=AVCaptureDevicePositionBack; 156 | } 157 | toChangeDevice=[self getCameraDeviceWithPosition:toChangePosition]; 158 | [self addNotificationToCaptureDevice:toChangeDevice]; 159 | //获得要调整的设备输入对象 160 | AVCaptureDeviceInput *toChangeDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:toChangeDevice error:nil]; 161 | 162 | //改变会话的配置前一定要先开启配置,配置完成后提交配置改变 163 | [self.captureSession beginConfiguration]; 164 | //移除原有输入对象 165 | [self.captureSession removeInput:self.captureDeviceInput]; 166 | //添加新的输入对象 167 | if ([self.captureSession canAddInput:toChangeDeviceInput]) { 168 | [self.captureSession addInput:toChangeDeviceInput]; 169 | self.captureDeviceInput=toChangeDeviceInput; 170 | } 171 | //提交会话配置 172 | [self.captureSession commitConfiguration]; 173 | 174 | } 175 | 176 | 177 | #pragma mark - 通知 178 | /** 179 | * 给输入设备添加通知 180 | */ 181 | -(void)addNotificationToCaptureDevice:(AVCaptureDevice *)captureDevice{ 182 | //注意添加区域改变捕获通知必须首先设置设备允许捕获 183 | [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { 184 | captureDevice.subjectAreaChangeMonitoringEnabled=YES; 185 | }]; 186 | NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; 187 | //捕获区域发生改变 188 | [notificationCenter addObserver:self selector:@selector(areaChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice]; 189 | } 190 | -(void)removeNotificationFromCaptureDevice:(AVCaptureDevice *)captureDevice{ 191 | NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; 192 | [notificationCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice]; 193 | } 194 | /** 195 | * 移除所有通知 196 | */ 197 | -(void)removeNotification{ 198 | NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; 199 | [notificationCenter removeObserver:self]; 200 | } 201 | 202 | -(void)addNotificationToCaptureSession:(AVCaptureSession *)captureSession{ 203 | NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; 204 | //会话出错 205 | [notificationCenter addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:captureSession]; 206 | } 207 | 208 | /** 209 | * 设备连接成功 210 | * 211 | * @param notification 通知对象 212 | */ 213 | -(void)deviceConnected:(NSNotification *)notification{ 214 | NSLog(@"设备已连接..."); 215 | } 216 | /** 217 | * 设备连接断开 218 | * 219 | * @param notification 通知对象 220 | */ 221 | -(void)deviceDisconnected:(NSNotification *)notification{ 222 | NSLog(@"设备已断开."); 223 | } 224 | /** 225 | * 捕获区域改变 226 | * 227 | * @param notification 通知对象 228 | */ 229 | -(void)areaChange:(NSNotification *)notification{ 230 | NSLog(@"捕获区域改变..."); 231 | } 232 | 233 | /** 234 | * 会话出错 235 | * 236 | * @param notification 通知对象 237 | */ 238 | -(void)sessionRuntimeError:(NSNotification *)notification{ 239 | NSLog(@"会话发生错误."); 240 | } 241 | 242 | #pragma mark - 私有方法 243 | 244 | /** 245 | * 取得指定位置的摄像头 246 | * 247 | * @param position 摄像头位置 248 | * 249 | * @return 摄像头设备 250 | */ 251 | -(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{ 252 | AVCaptureDeviceDiscoverySession* discorverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:position]; 253 | for (AVCaptureDevice *camera in discorverySession.devices) { 254 | if ([camera position]==position) { 255 | return camera; 256 | } 257 | } 258 | return nil; 259 | } 260 | 261 | /** 262 | * 改变设备属性的统一操作方法 263 | * 264 | * @param propertyChange 属性改变操作 265 | */ 266 | -(void)changeDeviceProperty:(PropertyChangeBlock)propertyChange{ 267 | AVCaptureDevice *captureDevice= [self.captureDeviceInput device]; 268 | NSError *error; 269 | //注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁 270 | if ([captureDevice lockForConfiguration:&error]) { 271 | propertyChange(captureDevice); 272 | [captureDevice unlockForConfiguration]; 273 | }else{ 274 | NSLog(@"设置设备属性过程发生错误,错误信息:%@",error.localizedDescription); 275 | } 276 | } 277 | 278 | /** 279 | * 设置闪光灯模式 280 | * 281 | * @param flashMode 闪光灯模式 282 | */ 283 | -(void)setFlashMode:(AVCaptureFlashMode )flashMode{ 284 | [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { 285 | if ([captureDevice isFlashModeSupported:flashMode]) { 286 | [captureDevice setFlashMode:flashMode]; 287 | } 288 | }]; 289 | } 290 | /** 291 | * 设置聚焦模式 292 | * 293 | * @param focusMode 聚焦模式 294 | */ 295 | -(void)setFocusMode:(AVCaptureFocusMode )focusMode{ 296 | [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { 297 | if ([captureDevice isFocusModeSupported:focusMode]) { 298 | [captureDevice setFocusMode:focusMode]; 299 | } 300 | }]; 301 | } 302 | /** 303 | * 设置曝光模式 304 | * 305 | * @param exposureMode 曝光模式 306 | */ 307 | -(void)setExposureMode:(AVCaptureExposureMode)exposureMode{ 308 | [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { 309 | if ([captureDevice isExposureModeSupported:exposureMode]) { 310 | [captureDevice setExposureMode:exposureMode]; 311 | } 312 | }]; 313 | } 314 | /** 315 | * 设置聚焦点 316 | * 317 | * @param point 聚焦点 318 | */ 319 | -(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{ 320 | [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { 321 | if ([captureDevice isFocusModeSupported:focusMode]) { 322 | [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; 323 | } 324 | if ([captureDevice isFocusPointOfInterestSupported]) { 325 | [captureDevice setFocusPointOfInterest:point]; 326 | } 327 | if ([captureDevice isExposureModeSupported:exposureMode]) { 328 | [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; 329 | } 330 | if ([captureDevice isExposurePointOfInterestSupported]) { 331 | [captureDevice setExposurePointOfInterest:point]; 332 | } 333 | }]; 334 | } 335 | 336 | /** 337 | * 添加点按手势,点按时聚焦 338 | */ 339 | -(void)addGenstureRecognizer{ 340 | UITapGestureRecognizer *tapGesture=[[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapScreen:)]; 341 | [self.viewContainer addGestureRecognizer:tapGesture]; 342 | } 343 | -(void)tapScreen:(UITapGestureRecognizer *)tapGesture{ 344 | CGPoint point= [tapGesture locationInView:self.viewContainer]; 345 | //将UI坐标转化为摄像头坐标 346 | CGPoint cameraPoint= [self.captureVideoPreviewLayer captureDevicePointOfInterestForPoint:point]; 347 | [self setFocusCursorWithPoint:point]; 348 | [self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint]; 349 | } 350 | 351 | 352 | /** 353 | * 设置聚焦光标位置 354 | * 355 | * @param point 光标位置 356 | */ 357 | -(void)setFocusCursorWithPoint:(CGPoint)point{ 358 | self.focusCursor.center=point; 359 | self.focusCursor.transform=CGAffineTransformMakeScale(1.5, 1.5); 360 | self.focusCursor.alpha=1.0; 361 | [UIView animateWithDuration:1.0 animations:^{ 362 | self.focusCursor.transform=CGAffineTransformIdentity; 363 | } completion:^(BOOL finished) { 364 | self.focusCursor.alpha=0; 365 | 366 | }]; 367 | } 368 | 369 | 370 | #pragma mark AVCaptureSession delegate 371 | - (void)captureOutput:(AVCaptureOutput *)captureOutput 372 | didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 373 | fromConnection:(AVCaptureConnection *)connection 374 | { 375 | static int proccessing = 0; 376 | if (proccessing) { 377 | return; 378 | } 379 | proccessing = 1; 380 | 381 | 382 | CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 383 | CVPixelBufferLockBaseAddress(imageBuffer,0); 384 | uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); 385 | size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 386 | size_t width = CVPixelBufferGetWidth(imageBuffer); 387 | size_t height = CVPixelBufferGetHeight(imageBuffer); 388 | 389 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 390 | CGContextRef newContext = CGBitmapContextCreate(baseAddress, 391 | width, height, 8, bytesPerRow, colorSpace, 392 | kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 393 | CGImageRef newImage = CGBitmapContextCreateImage(newContext); 394 | 395 | CGContextRelease(newContext); 396 | CGColorSpaceRelease(colorSpace); 397 | 398 | 399 | UIImage *image= [UIImage imageWithCGImage:newImage scale:0.5 orientation:UIImageOrientationLeftMirrored]; 400 | 401 | CGImageRelease(newImage); 402 | // self.resultImage.image = image; 403 | 404 | CVPixelBufferUnlockBaseAddress(imageBuffer,0); 405 | // __weak __typeof(self) weakSelf = self; 406 | // dispatch_async(dispatch_get_main_queue(), ^{ 407 | // weakSelf.resultImage.image = image; 408 | // }); 409 | 410 | if (0) { 411 | // show face 412 | NSArray* rectArray = [OpenCVUtil facePointDetectForImage:[self fixOrientation:image]]; 413 | if (rectArray.count > 0) { 414 | UIBezierPath* totalPath = [UIBezierPath bezierPath]; 415 | for (NSNumber* rectValue in rectArray) { 416 | CGRect rect = [rectValue CGRectValue]; 417 | rect = [self convertRectFromRect:rect toSize:_resultImage.bounds.size]; 418 | UIBezierPath *subpath = [UIBezierPath bezierPathWithRect:rect]; 419 | [totalPath appendPath:subpath]; 420 | } 421 | 422 | __weak __typeof(self) weakSelf = self; 423 | dispatch_async(dispatch_get_main_queue(), ^{ 424 | weakSelf.shapeLayer.path = totalPath.CGPath; 425 | weakSelf.resultImage.image = image; 426 | proccessing = 0; 427 | }); 428 | }else{ 429 | proccessing = 0; 430 | } 431 | }else{ 432 | //show lane 433 | NSArray* images = [OpenCVUtil laneDetectForImage:[self fixOrientation:image]]; 434 | if (images.count) { 435 | __weak __typeof(self) weakSelf = self; 436 | dispatch_async(dispatch_get_main_queue(), ^{ 437 | weakSelf.resultImage.image = images.lastObject; 438 | proccessing = 0; 439 | }); 440 | } 441 | } 442 | 443 | 444 | // __weak __typeof(self) weakSelf = self; 445 | // dispatch_async(dispatch_get_main_queue(), ^{ 446 | // weakSelf.resultImage.image = [OpenCVUtil faceDetectForImage:[self fixOrientation:image]]; 447 | // proccessing = 0; 448 | // }); 449 | } 450 | 451 | - (UIImage *)fixOrientation:(UIImage *)aImage { 452 | 453 | // No-op if the orientation is already correct 454 | if (aImage.imageOrientation == UIImageOrientationUp) 455 | return aImage; 456 | 457 | // We need to calculate the proper transformation to make the image upright. 458 | // We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored. 459 | CGAffineTransform transform = CGAffineTransformIdentity; 460 | 461 | switch (aImage.imageOrientation) { 462 | case UIImageOrientationDown: 463 | case UIImageOrientationDownMirrored: 464 | transform = CGAffineTransformTranslate(transform, aImage.size.width, aImage.size.height); 465 | transform = CGAffineTransformRotate(transform, M_PI); 466 | break; 467 | 468 | case UIImageOrientationLeft: 469 | case UIImageOrientationLeftMirrored: 470 | transform = CGAffineTransformTranslate(transform, aImage.size.width, 0); 471 | transform = CGAffineTransformRotate(transform, M_PI_2); 472 | break; 473 | 474 | case UIImageOrientationRight: 475 | case UIImageOrientationRightMirrored: 476 | transform = CGAffineTransformTranslate(transform, 0, aImage.size.height); 477 | transform = CGAffineTransformRotate(transform, -M_PI_2); 478 | break; 479 | default: 480 | break; 481 | } 482 | 483 | switch (aImage.imageOrientation) { 484 | case UIImageOrientationUpMirrored: 485 | case UIImageOrientationDownMirrored: 486 | transform = CGAffineTransformTranslate(transform, aImage.size.width, 0); 487 | transform = CGAffineTransformScale(transform, -1, 1); 488 | break; 489 | 490 | case UIImageOrientationLeftMirrored: 491 | case UIImageOrientationRightMirrored: 492 | transform = CGAffineTransformTranslate(transform, aImage.size.height, 0); 493 | transform = CGAffineTransformScale(transform, -1, 1); 494 | transform = CGAffineTransformTranslate(transform, 0, aImage.size.width); 495 | transform = CGAffineTransformScale(transform, 1, -1); 496 | break; 497 | default: 498 | break; 499 | } 500 | 501 | // Now we draw the underlying CGImage into a new context, applying the transform 502 | // calculated above. 503 | CGContextRef ctx = CGBitmapContextCreate(NULL, aImage.size.width, aImage.size.height, 504 | CGImageGetBitsPerComponent(aImage.CGImage), 0, 505 | CGImageGetColorSpace(aImage.CGImage), 506 | CGImageGetBitmapInfo(aImage.CGImage)); 507 | CGContextConcatCTM(ctx, transform); 508 | switch (aImage.imageOrientation) { 509 | case UIImageOrientationLeft: 510 | case UIImageOrientationLeftMirrored: 511 | case UIImageOrientationRight: 512 | case UIImageOrientationRightMirrored: 513 | // Grr... 514 | CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.height,aImage.size.width), aImage.CGImage); 515 | break; 516 | 517 | default: 518 | CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.width,aImage.size.height), aImage.CGImage); 519 | break; 520 | } 521 | 522 | // And now we just create a new UIImage from the drawing context 523 | CGImageRef cgimg = CGBitmapContextCreateImage(ctx); 524 | UIImage *img = [UIImage imageWithCGImage:cgimg]; 525 | CGContextRelease(ctx); 526 | CGImageRelease(cgimg); 527 | return img; 528 | } 529 | 530 | - (CGRect)convertRectFromRect:(CGRect)fromRect toSize:(CGSize)size{ 531 | 532 | return CGRectMake(size.width*fromRect.origin.x, size.height*fromRect.origin.y,size.width*fromRect.size.width, size.height*fromRect.size.height); 533 | } 534 | 535 | @end 536 | -------------------------------------------------------------------------------- /openCV-Practice/VideoViewController.xib: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | -------------------------------------------------------------------------------- /openCV-Practice/ViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.h 3 | // openCV-Practice 4 | // 5 | // Created by Realank on 16/4/29. 6 | // Copyright © 2016年 realank. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface ViewController : UIViewController 12 | 13 | 14 | @end 15 | 16 | -------------------------------------------------------------------------------- /openCV-Practice/ViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.m 3 | // openCV-Practice 4 | // 5 | // Created by Realank on 16/4/29. 6 | // Copyright © 2016年 realank. All rights reserved. 7 | // 8 | 9 | #import "ViewController.h" 10 | #import "VideoViewController.h" 11 | #import "LaneDetectViewController.h" 12 | #import "OpenCVUtil.h" 13 | 14 | @interface ViewController () 15 | 16 | 17 | @property (weak, nonatomic) IBOutlet UIImageView *imgView; 18 | @end 19 | 20 | @implementation ViewController 21 | 22 | - (void)viewDidLoad { 23 | [super viewDidLoad]; 24 | // Do any additional setup after loading the view, typically from a nib. 25 | NSLog(@"very before"); 26 | NSLog(@"before"); 27 | self.imgView.image = [OpenCVUtil faceDetectForImage:[UIImage imageNamed:@"face"]]; 28 | NSLog(@"after"); 29 | } 30 | - (IBAction)faceDetect:(id)sender { 31 | VideoViewController *vc = [[VideoViewController alloc]init]; 32 | [self.navigationController pushViewController:vc animated:YES]; 33 | } 34 | - (IBAction)laneDetect:(id)sender { 35 | LaneDetectViewController *vc = [[LaneDetectViewController alloc]init]; 36 | [self.navigationController pushViewController:vc animated:YES]; 37 | } 38 | 39 | 40 | @end 41 | -------------------------------------------------------------------------------- /openCV-Practice/linefinder.h: -------------------------------------------------------------------------------- 1 | /*------------------------------------------------------------------------------------------*\ 2 | Lane Detection 3 | 4 | General idea and some code modified from: 5 | chapter 7 of Computer Vision Programming using the OpenCV Library. 6 | by Robert Laganiere, Packt Publishing, 2011. 7 | 8 | This program is free software; permission is hereby granted to use, copy, modify, 9 | and distribute this source code, or portions thereof, for any purpose, without fee, 10 | subject to the restriction that the copyright notice may not be removed 11 | or altered from any source or altered source distribution. 12 | The software is released on an as-is basis and without any warranties of any kind. 13 | In particular, the software is not guaranteed to be fault-tolerant or free from failure. 14 | The author disclaims all warranties with regard to this software, any use, 15 | and any consequent failure, is purely the responsibility of the user. 16 | 17 | Copyright (C) 2013 Jason Dorweiler, www.transistor.io 18 | \*------------------------------------------------------------------------------------------*/ 19 | 20 | #if !defined LINEF 21 | #define LINEF 22 | 23 | #include 24 | #include 25 | #define PI 3.1415926 26 | 27 | class LineFinder { 28 | 29 | private: 30 | 31 | // original image 32 | cv::Mat img; 33 | 34 | // vector containing the end points 35 | // of the detected lines 36 | std::vector lines; 37 | 38 | // accumulator resolution parameters 39 | double deltaRho; 40 | double deltaTheta; 41 | 42 | // minimum number of votes that a line 43 | // must receive before being considered 44 | int minVote; 45 | 46 | // min length for a line 47 | double minLength; 48 | 49 | // max allowed gap along the line 50 | double maxGap; 51 | 52 | // distance to shift the drawn lines down when using a ROI 53 | int shift; 54 | 55 | public: 56 | 57 | // Default accumulator resolution is 1 pixel by 1 degree 58 | // no gap, no mimimum length 59 | LineFinder() : deltaRho(1), deltaTheta(PI/180), minVote(10), minLength(0.), maxGap(0.) {} 60 | 61 | // Set the resolution of the accumulator 62 | void setAccResolution(double dRho, double dTheta) { 63 | 64 | deltaRho= dRho; 65 | deltaTheta= dTheta; 66 | } 67 | 68 | // Set the minimum number of votes 69 | void setMinVote(int minv) { 70 | 71 | minVote= minv; 72 | } 73 | 74 | // Set line length and gap 75 | void setLineLengthAndGap(double length, double gap) { 76 | 77 | minLength= length; 78 | maxGap= gap; 79 | } 80 | 81 | // set image shift 82 | void setShift(int imgShift) { 83 | 84 | shift = imgShift; 85 | } 86 | 87 | // Apply probabilistic Hough Transform 88 | std::vector findLines(cv::Mat& binary) { 89 | 90 | lines.clear(); 91 | cv::HoughLinesP(binary,lines,deltaRho,deltaTheta,minVote, minLength, maxGap); 92 | 93 | return lines; 94 | } 95 | 96 | // Draw the detected lines on an image 97 | void drawDetectedLines(cv::Mat &image, cv::Scalar color=cv::Scalar(255)) { 98 | 99 | // Draw the lines 100 | std::vector::const_iterator it2= lines.begin(); 101 | 102 | while (it2!=lines.end()) { 103 | 104 | cv::Point pt1((*it2)[0],(*it2)[1]+shift); 105 | cv::Point pt2((*it2)[2],(*it2)[3]+shift); 106 | 107 | cv::line( image, pt1, pt2, color, 6 ); 108 | std::cout << " HoughP line: ("<< pt1 <<"," << pt2 << ")\n"; 109 | ++it2; 110 | } 111 | } 112 | 113 | // Eliminates lines that do not have an orientation equals to 114 | // the ones specified in the input matrix of orientations 115 | // At least the given percentage of pixels on the line must 116 | // be within plus or minus delta of the corresponding orientation 117 | std::vector removeLinesOfInconsistentOrientations( 118 | const cv::Mat &orientations, double percentage, double delta) { 119 | 120 | std::vector::iterator it= lines.begin(); 121 | 122 | // check all lines 123 | while (it!=lines.end()) { 124 | 125 | // end points 126 | int x1= (*it)[0]; 127 | int y1= (*it)[1]; 128 | int x2= (*it)[2]; 129 | int y2= (*it)[3]; 130 | 131 | // line orientation + 90o to get the parallel line 132 | double ori1= atan2(static_cast(y1-y2),static_cast(x1-x2))+PI/2; 133 | if (ori1>PI) ori1= ori1-2*PI; 134 | 135 | double ori2= atan2(static_cast(y2-y1),static_cast(x2-x1))+PI/2; 136 | if (ori2>PI) ori2= ori2-2*PI; 137 | 138 | // for all points on the line 139 | cv::LineIterator lit(orientations,cv::Point(x1,y1),cv::Point(x2,y2)); 140 | int i,count=0; 141 | for(i = 0, count=0; i < lit.count; i++, ++lit) { 142 | 143 | float ori= *(reinterpret_cast(*lit)); 144 | 145 | // is line orientation similar to gradient orientation ? 146 | if (std::min(fabs(ori-ori1),fabs(ori-ori2))(i); 152 | 153 | // set to zero lines of inconsistent orientation 154 | if (consistency < percentage) { 155 | 156 | (*it)[0]=(*it)[1]=(*it)[2]=(*it)[3]=0; 157 | 158 | } 159 | 160 | ++it; 161 | } 162 | 163 | return lines; 164 | } 165 | }; 166 | 167 | 168 | #endif 169 | -------------------------------------------------------------------------------- /openCV-Practice/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // openCV-Practice 4 | // 5 | // Created by Realank on 16/4/29. 6 | // Copyright © 2016年 realank. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "AppDelegate.h" 11 | 12 | int main(int argc, char * argv[]) { 13 | @autoreleasepool { 14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /openCV-Practice/openCVUtil.h: -------------------------------------------------------------------------------- 1 | // 2 | // OpenCVUtil.h 3 | // openCV-Practice 4 | // 5 | // Created by Realank on 16/4/29. 6 | // Copyright © 2016年 realank. All rights reserved. 7 | // 8 | 9 | #import 10 | @class UIImage; 11 | 12 | @interface OpenCVUtil : NSObject 13 | 14 | + (UIImage*)convertImage:(UIImage*)image; 15 | + (UIImage*)faceDetectForImage:(UIImage*)image; 16 | + (UIImage*)circleDetectForImage:(UIImage*)image; 17 | + (NSArray*)facePointDetectForImage:(UIImage*)image; 18 | + (NSArray*)laneDetectForImage:(UIImage*)image; 19 | @end 20 | -------------------------------------------------------------------------------- /openCV-Practice/openCVUtil.mm: -------------------------------------------------------------------------------- 1 | // 2 | // OpenCVUtil.m 3 | // openCV-Practice 4 | // 5 | // Created by Realank on 16/4/29. 6 | // Copyright © 2016年 realank. All rights reserved. 7 | // 8 | 9 | #import "OpenCVUtil.h" 10 | #import 11 | #import 12 | #include 13 | #import 14 | #import 15 | #include "opencv2/highgui/highgui.hpp" 16 | #include 17 | #include 18 | #include 19 | #include 20 | #import "linefinder.h" 21 | 22 | #define PI 3.1415926 23 | 24 | @interface OpenCVUtil (){ 25 | 26 | } 27 | 28 | @end 29 | 30 | @implementation OpenCVUtil 31 | 32 | + (UIImage*)convertImage:(UIImage*)image { 33 | cv::Mat cvImage; 34 | UIImageToMat(image, cvImage); 35 | 36 | if(!cvImage.empty()){ 37 | cv::Mat gray; 38 | // 将图像转换为灰度显示 39 | cv::cvtColor(cvImage,gray,CV_RGB2GRAY); 40 | // 应用高斯滤波器去除小的边缘 41 | cv::GaussianBlur(gray, gray, cv::Size(5,5), 1.2,1.2); 42 | // 计算与画布边缘 43 | cv::Mat edges; 44 | cv::Canny(gray, edges, 0, 50); 45 | // 使用白色填充 46 | cvImage.setTo(cv::Scalar::all(225)); 47 | // 修改边缘颜色 48 | cvImage.setTo(cv::Scalar(0,128,255,255),edges); 49 | // 将Mat转换为Xcode的UIImageView显示 50 | return MatToUIImage(cvImage); 51 | } 52 | return nil; 53 | } 54 | 55 | + (NSArray*)facePointDetectForImage:(UIImage*)image{ 56 | 57 | static cv::CascadeClassifier faceDetector; 58 | 59 | static dispatch_once_t onceToken; 60 | dispatch_once(&onceToken, ^{ 61 | // 添加xml文件 62 | NSString* cascadePath = [[NSBundle mainBundle] 63 | pathForResource:@"haarcascade_frontalface_alt2" 64 | ofType:@"xml"]; 65 | faceDetector.load([cascadePath UTF8String]); 66 | }); 67 | 68 | 69 | cv::Mat faceImage; 70 | UIImageToMat(image, faceImage); 71 | 72 | // 转为灰度 73 | cv::Mat gray; 74 | cvtColor(faceImage, gray, CV_BGR2GRAY); 75 | 76 | // 检测人脸并储存 77 | std::vectorfaces; 78 | faceDetector.detectMultiScale(gray, faces,1.1,2,CV_HAAR_FIND_BIGGEST_OBJECT,cv::Size(30,30)); 79 | 80 | NSMutableArray *array = [NSMutableArray array]; 81 | 82 | for(unsigned int i= 0;i < faces.size();i++) 83 | { 84 | const cv::Rect& face = faces[i]; 85 | // NSLog(@"image:%d,%d,\ndetect:\nstart:%d,%d,%d,%d",faceImage.rows, faceImage.cols,face.x,face.y,face.width,face.height ); 86 | float height = (float)faceImage.rows; 87 | float width = (float)faceImage.cols; 88 | CGRect rect = CGRectMake(face.x/width, face.y/height, face.width/width, face.height/height); 89 | [array addObject:[NSNumber valueWithCGRect:rect]]; 90 | 91 | } 92 | 93 | 94 | return [array copy]; 95 | } 96 | 97 | + (UIImage*)faceDetectForImage:(UIImage*)image { 98 | // cv::CascadeClassifier faceDetector; 99 | // // 添加xml文件 100 | // NSString* cascadePath = [[NSBundle mainBundle] 101 | // pathForResource:@"haarcascade_frontalface_alt" 102 | // ofType:@"xml"]; 103 | // faceDetector.load([cascadePath UTF8String]); 104 | static cv::CascadeClassifier faceDetector; 105 | 106 | static dispatch_once_t onceToken; 107 | dispatch_once(&onceToken, ^{ 108 | // 添加xml文件 109 | NSString* cascadePath = [[NSBundle mainBundle] 110 | pathForResource:@"haarcascade_frontalface_alt" 111 | ofType:@"xml"]; 112 | faceDetector.load([cascadePath UTF8String]); 113 | }); 114 | 115 | 116 | cv::Mat faceImage; 117 | UIImageToMat(image, faceImage); 118 | 119 | // 转为灰度 120 | cv::Mat gray; 121 | cvtColor(faceImage, gray, CV_BGR2GRAY); 122 | 123 | NSLog(@"%d",faceImage.channels()); 124 | 125 | // 检测人脸并储存 126 | std::vectorfaces; 127 | faceDetector.detectMultiScale(gray, faces,1.1,2,0,cv::Size(30,30)); 128 | 129 | // 在每个人脸上画一个红色四方形 130 | for(unsigned int i= 0;i < faces.size();i++) 131 | { 132 | const cv::Rect& face = faces[i]; 133 | cv::Point tl(face.x,face.y); 134 | cv::Point br = tl + cv::Point(face.width,face.height); 135 | // 四方形的画法 136 | cv::Scalar magenta = cv::Scalar(255, 0, 0, 255); 137 | cv::rectangle(faceImage, tl, br, magenta, 11, 8, 0); 138 | } 139 | 140 | return MatToUIImage(faceImage); 141 | } 142 | 143 | + (UIImage*)circleDetectForImage:(UIImage*)image{ 144 | cv::Mat circleImage,src_gray; 145 | UIImageToMat(image, circleImage); 146 | /// Convert it to gray 147 | cvtColor( circleImage, src_gray, CV_BGR2GRAY ); 148 | 149 | /// Reduce the noise so we avoid false circle detection 150 | GaussianBlur( src_gray, src_gray, cv::Size(9, 9), 2, 2 ); 151 | 152 | std::vector circles; 153 | 154 | /// Apply the Hough Transform to find the circles 155 | HoughCircles( src_gray, circles, CV_HOUGH_GRADIENT, 1, src_gray.rows/8, 200, 100, 0, 0 ); 156 | 157 | /// Draw the circles detected 158 | for( size_t i = 0; i < circles.size(); i++ ) 159 | { 160 | cv::Point center(cvRound(circles[i][0]), cvRound(circles[i][1])); 161 | int radius = cvRound(circles[i][2]); 162 | // circle center 163 | circle( circleImage, center, 3, cv::Scalar(0,255,0,255), -1, 8, 0 ); 164 | // circle outline 165 | circle( circleImage, center, radius, cv::Scalar(0,0,255,255), 3, 8, 0 ); 166 | } 167 | 168 | /// Show your results 169 | 170 | return MatToUIImage(circleImage); 171 | 172 | 173 | } 174 | 175 | + (NSArray*)laneDetectForImage:(UIImage*)rawImage{ 176 | 177 | NSMutableArray *imageArray = [NSMutableArray array]; 178 | 179 | cv::Mat image; 180 | UIImageToMat(rawImage, image); 181 | 182 | cv::Mat whiteOnly; 183 | cvtColor(image,whiteOnly,CV_RGB2HSV); 184 | cv::inRange(whiteOnly, cv::Scalar(20, 20, 20), cv::Scalar(255, 255, 255), whiteOnly); 185 | [imageArray addObject:MatToUIImage(whiteOnly)];//1 186 | cv::Mat gray; 187 | cv::cvtColor(image,gray,CV_RGB2GRAY); 188 | cv::vector codes; 189 | cv::Mat corners; 190 | findDataMatrix(gray, codes, corners); 191 | 192 | drawDataMatrixCodes(image, codes, corners); 193 | cv::Rect roi(0,image.cols/3,image.cols-1,image.rows - image.cols/3);// set the ROI for the image 194 | 195 | cv::Mat imgROI = image(roi); 196 | // [imageArray addObject:MatToUIImage(imgROI)];//1 197 | 198 | // Canny algorithm 199 | cv::Mat contours; 200 | Canny(imgROI,contours,50,250); 201 | cv::Mat contoursInv; 202 | cv::threshold(contours,contoursInv,128,255,cv::THRESH_BINARY_INV); 203 | [imageArray addObject:MatToUIImage(contoursInv)];//2 204 | /* 205 | Hough tranform for line detection with feedback 206 | Increase by 25 for the next frame if we found some lines. 207 | This is so we don't miss other lines that may crop up in the next frame 208 | but at the same time we don't want to start the feed back loop from scratch. 209 | */ 210 | int houghVote = 200; 211 | std::vector lines; 212 | if (houghVote < 1 or lines.size() > 2){ // we lost all lines. reset 213 | houghVote = 200; 214 | } 215 | else{ houghVote += 25;} 216 | while(lines.size() < 5 && houghVote > 0){ 217 | HoughLines(contours,lines,1,PI/180, houghVote); 218 | houghVote -= 5; 219 | } 220 | // std::cout << houghVote << "\n"; 221 | cv::Mat result(imgROI.size(),CV_8U,cv::Scalar(255)); 222 | imgROI.copyTo(result); 223 | 224 | 225 | // Draw the lines 226 | std::vector::const_iterator it= lines.begin(); 227 | cv::Mat hough(imgROI.size(),CV_8U,cv::Scalar(0)); 228 | while (it!=lines.end()) { 229 | 230 | float rho= (*it)[0]; // first element is distance rho 231 | float theta= (*it)[1]; // second element is angle theta 232 | NSLog(@"%lf",theta); 233 | if ( (theta > 0.1 && theta < 1.47) || (theta < 3.04 && theta > 1.67) ) { // filter to remove vertical and horizontal lines 234 | 235 | // point of intersection of the line with first row 236 | cv::Point pt1(rho/cos(theta),0); 237 | // point of intersection of the line with last row 238 | cv::Point pt2((rho-result.rows*sin(theta))/cos(theta),result.rows); 239 | // draw a white line 240 | line( result, pt1, pt2, cv::Scalar(255), 8); 241 | line( hough, pt1, pt2, cv::Scalar(255), 8); 242 | } 243 | 244 | //std::cout << "line: (" << rho << "," << theta << ")\n"; 245 | ++it; 246 | } 247 | [imageArray addObject:MatToUIImage(result)];//3 248 | // Create LineFinder instance 249 | LineFinder ld; 250 | 251 | // Set probabilistic Hough parameters 252 | ld.setLineLengthAndGap(60,10); 253 | ld.setMinVote(4); 254 | 255 | // Detect lines 256 | std::vector li= ld.findLines(contours); 257 | cv::Mat houghP(imgROI.size(),CV_8U,cv::Scalar(0)); 258 | ld.setShift(0); 259 | ld.drawDetectedLines(houghP); 260 | 261 | [imageArray addObject:MatToUIImage(houghP)];//4 262 | 263 | bitwise_and(houghP,hough,houghP); 264 | cv::Mat houghPinv(imgROI.size(),CV_8U,cv::Scalar(0)); 265 | cv::Mat dst(imgROI.size(),CV_8U,cv::Scalar(0)); 266 | threshold(houghP,houghPinv,150,255,cv::THRESH_BINARY_INV); // threshold and invert to black lines 267 | 268 | [imageArray addObject:MatToUIImage(houghPinv)];//5 269 | 270 | Canny(houghPinv,contours,100,350); 271 | li= ld.findLines(contours); 272 | 273 | [imageArray addObject:MatToUIImage(contours)];//6 274 | 275 | // Set probabilistic Hough parameters 276 | ld.setLineLengthAndGap(5,2); 277 | ld.setMinVote(1); 278 | ld.setShift(image.cols/3); 279 | ld.drawDetectedLines(image); 280 | 281 | int count = lines.size(); 282 | NSLog(@"Lines Segments:%d",count); 283 | [imageArray addObject:MatToUIImage(image)]; 284 | 285 | return [imageArray copy]; 286 | } 287 | 288 | @end 289 | -------------------------------------------------------------------------------- /openCV-PracticeTests/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | NSCameraUsageDescription 6 | Please give me camera permission 7 | CFBundleDevelopmentRegion 8 | en 9 | CFBundleExecutable 10 | $(EXECUTABLE_NAME) 11 | CFBundleIdentifier 12 | $(PRODUCT_BUNDLE_IDENTIFIER) 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | $(PRODUCT_NAME) 17 | CFBundlePackageType 18 | BNDL 19 | CFBundleShortVersionString 20 | 1.0 21 | CFBundleSignature 22 | ???? 23 | CFBundleVersion 24 | 1 25 | 26 | 27 | -------------------------------------------------------------------------------- /openCV-PracticeTests/openCV_PracticeTests.m: -------------------------------------------------------------------------------- 1 | // 2 | // openCV_PracticeTests.m 3 | // openCV-PracticeTests 4 | // 5 | // Created by Realank on 16/4/29. 6 | // Copyright © 2016年 realank. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface openCV_PracticeTests : XCTestCase 12 | 13 | @end 14 | 15 | @implementation openCV_PracticeTests 16 | 17 | - (void)setUp { 18 | [super setUp]; 19 | // Put setup code here. This method is called before the invocation of each test method in the class. 20 | } 21 | 22 | - (void)tearDown { 23 | // Put teardown code here. This method is called after the invocation of each test method in the class. 24 | [super tearDown]; 25 | } 26 | 27 | - (void)testExample { 28 | // This is an example of a functional test case. 29 | // Use XCTAssert and related functions to verify your tests produce the correct results. 30 | } 31 | 32 | - (void)testPerformanceExample { 33 | // This is an example of a performance test case. 34 | [self measureBlock:^{ 35 | // Put the code you want to measure the time of here. 36 | }]; 37 | } 38 | 39 | @end 40 | -------------------------------------------------------------------------------- /openCV-PracticeUITests/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | BNDL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | 1 23 | 24 | 25 | -------------------------------------------------------------------------------- /openCV-PracticeUITests/openCV_PracticeUITests.m: -------------------------------------------------------------------------------- 1 | // 2 | // openCV_PracticeUITests.m 3 | // openCV-PracticeUITests 4 | // 5 | // Created by Realank on 16/4/29. 6 | // Copyright © 2016年 realank. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface openCV_PracticeUITests : XCTestCase 12 | 13 | @end 14 | 15 | @implementation openCV_PracticeUITests 16 | 17 | - (void)setUp { 18 | [super setUp]; 19 | 20 | // Put setup code here. This method is called before the invocation of each test method in the class. 21 | 22 | // In UI tests it is usually best to stop immediately when a failure occurs. 23 | self.continueAfterFailure = NO; 24 | // UI tests must launch the application that they test. Doing this in setup will make sure it happens for each test method. 25 | [[[XCUIApplication alloc] init] launch]; 26 | 27 | // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this. 28 | } 29 | 30 | - (void)tearDown { 31 | // Put teardown code here. This method is called after the invocation of each test method in the class. 32 | [super tearDown]; 33 | } 34 | 35 | - (void)testExample { 36 | // Use recording to get started writing UI tests. 37 | // Use XCTAssert and related functions to verify your tests produce the correct results. 38 | } 39 | 40 | @end 41 | --------------------------------------------------------------------------------