├── .hgignore ├── CameraDetect.xcodeproj └── project.pbxproj ├── CameraExampleAppDelegate.h ├── CameraExampleAppDelegate.m ├── CameraExampleViewController.h ├── CameraExampleViewController.mm ├── Info.plist ├── README.md ├── data ├── grace_hopper.jpg └── yolo_labels.txt ├── en.lproj └── MainStoryboard_iPhone.storyboard ├── ios_image_load.h ├── ios_image_load.mm ├── main.mm ├── squarePNG.png ├── tensorflow_utils.h └── tensorflow_utils.mm /.hgignore: -------------------------------------------------------------------------------- 1 | syntax:glob 2 | data/*.pb 3 | CameraDetect.xcodeproj/project.xcworkspace 4 | CameraDetect.xcodeproj/xcuserdata 5 | 6 | -------------------------------------------------------------------------------- /CameraDetect.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 46; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 314F0E161D4A4D9300069AAE /* frozen_process_no_filter_tiny.pb in Resources */ = {isa = PBXBuildFile; fileRef = 314F0E151D4A4D9300069AAE /* frozen_process_no_filter_tiny.pb */; }; 11 | 591D3EC51CFF7F130059011C /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 591D3EC41CFF7F120059011C /* AVFoundation.framework */; }; 12 | 591D3ECB1CFF7F5F0059011C /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 591D3ECA1CFF7F5F0059011C /* CoreMedia.framework */; }; 13 | 591D3ECD1CFF7F9F0059011C /* AssetsLibrary.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 591D3ECC1CFF7F9F0059011C /* AssetsLibrary.framework */; }; 14 | 591D3ECF1CFF7FCE0059011C /* ImageIO.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 591D3ECE1CFF7FCE0059011C /* ImageIO.framework */; }; 15 | 591D3ED21CFF85C30059011C /* ios_image_load.mm in Sources */ = {isa = PBXBuildFile; fileRef = 591D3ED11CFF85C30059011C /* ios_image_load.mm */; }; 16 | 591D3ED51CFF85FD0059011C /* tensorflow_utils.mm in Sources */ = {isa = PBXBuildFile; fileRef = 591D3ED31CFF85FD0059011C /* tensorflow_utils.mm */; }; 17 | 591D3EDA1CFFA83A0059011C /* grace_hopper.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 591D3ED71CFFA83A0059011C /* grace_hopper.jpg */; }; 18 | 591D3EDB1CFFA83A0059011C /* yolo_labels.txt in Resources */ = {isa = PBXBuildFile; fileRef = 591D3ED81CFFA83A0059011C /* yolo_labels.txt */; }; 19 | 591D3EDF1CFFAD230059011C /* libprotobuf-lite.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 591D3EDD1CFFAD230059011C /* libprotobuf-lite.a */; }; 20 | 591D3EE01CFFAD230059011C /* libprotobuf.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 591D3EDE1CFFAD230059011C /* libprotobuf.a */; }; 21 | 592FF8B918ECBD7600C164F8 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 592FF8B818ECBD7600C164F8 /* Foundation.framework */; }; 22 | 592FF8BB18ECBD7600C164F8 /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 592FF8BA18ECBD7600C164F8 /* CoreGraphics.framework */; }; 23 | 592FF90218ECC66200C164F8 /* main.mm in Sources */ = {isa = PBXBuildFile; fileRef = 592FF90118ECC66200C164F8 /* main.mm */; }; 24 | 592FF90D18EDD0DA00C164F8 /* MainStoryboard_iPhone.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 592FF90A18EDD0DA00C164F8 /* MainStoryboard_iPhone.storyboard */; }; 25 | 592FF92518EE240200C164F8 /* CameraExampleAppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 592FF92218EE240200C164F8 /* CameraExampleAppDelegate.m */; }; 26 | 592FF92618EE240200C164F8 /* CameraExampleViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 592FF92418EE240200C164F8 /* CameraExampleViewController.mm */; }; 27 | /* End PBXBuildFile section */ 28 | 29 | /* Begin PBXFileReference section */ 30 | 314F0E151D4A4D9300069AAE /* frozen_process_no_filter_tiny.pb */ = {isa = PBXFileReference; lastKnownFileType = file; path = frozen_process_no_filter_tiny.pb; sourceTree = ""; }; 31 | 591D3EC41CFF7F120059011C /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk/System/Library/Frameworks/AVFoundation.framework; sourceTree = DEVELOPER_DIR; }; 32 | 591D3EC61CFF7F370059011C /* CoreFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreFoundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk/System/Library/Frameworks/CoreFoundation.framework; sourceTree = DEVELOPER_DIR; }; 33 | 591D3EC81CFF7F500059011C /* CoreImage.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreImage.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk/System/Library/Frameworks/CoreImage.framework; sourceTree = DEVELOPER_DIR; }; 34 | 591D3ECA1CFF7F5F0059011C /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk/System/Library/Frameworks/CoreMedia.framework; sourceTree = DEVELOPER_DIR; }; 35 | 591D3ECC1CFF7F9F0059011C /* AssetsLibrary.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AssetsLibrary.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk/System/Library/Frameworks/AssetsLibrary.framework; sourceTree = DEVELOPER_DIR; }; 36 | 591D3ECE1CFF7FCE0059011C /* ImageIO.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = ImageIO.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk/System/Library/Frameworks/ImageIO.framework; sourceTree = DEVELOPER_DIR; }; 37 | 591D3ED01CFF85C30059011C /* ios_image_load.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ios_image_load.h; sourceTree = SOURCE_ROOT; }; 38 | 591D3ED11CFF85C30059011C /* ios_image_load.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = ios_image_load.mm; sourceTree = SOURCE_ROOT; }; 39 | 591D3ED31CFF85FD0059011C /* tensorflow_utils.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = tensorflow_utils.mm; sourceTree = SOURCE_ROOT; }; 40 | 591D3ED41CFF85FD0059011C /* tensorflow_utils.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = tensorflow_utils.h; sourceTree = SOURCE_ROOT; }; 41 | 591D3ED71CFFA83A0059011C /* grace_hopper.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = grace_hopper.jpg; sourceTree = ""; }; 42 | 591D3ED81CFFA83A0059011C /* yolo_labels.txt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = yolo_labels.txt; sourceTree = ""; }; 43 | 591D3EDD1CFFAD230059011C /* libprotobuf-lite.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = "libprotobuf-lite.a"; path = "../../makefile/gen/protobuf_ios/lib/libprotobuf-lite.a"; sourceTree = ""; }; 44 | 591D3EDE1CFFAD230059011C /* libprotobuf.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libprotobuf.a; path = ../../makefile/gen/protobuf_ios/lib/libprotobuf.a; sourceTree = ""; }; 45 | 592FF8B518ECBD7600C164F8 /* CameraDetect.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = CameraDetect.app; sourceTree = BUILT_PRODUCTS_DIR; }; 46 | 592FF8B818ECBD7600C164F8 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = System/Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; }; 47 | 592FF8BA18ECBD7600C164F8 /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; }; 48 | 592FF90118ECC66200C164F8 /* main.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = main.mm; sourceTree = SOURCE_ROOT; }; 49 | 592FF90318ECCB8300C164F8 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = SOURCE_ROOT; }; 50 | 592FF90B18EDD0DA00C164F8 /* en */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = en; path = MainStoryboard_iPhone.storyboard; sourceTree = ""; }; 51 | 592FF92118EE240200C164F8 /* CameraExampleAppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CameraExampleAppDelegate.h; sourceTree = SOURCE_ROOT; }; 52 | 592FF92218EE240200C164F8 /* CameraExampleAppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraExampleAppDelegate.m; sourceTree = SOURCE_ROOT; }; 53 | 592FF92318EE240200C164F8 /* CameraExampleViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CameraExampleViewController.h; sourceTree = SOURCE_ROOT; }; 54 | 592FF92418EE240200C164F8 /* CameraExampleViewController.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = CameraExampleViewController.mm; sourceTree = SOURCE_ROOT; }; 55 | /* End PBXFileReference section */ 56 | 57 | /* Begin PBXFrameworksBuildPhase section */ 58 | 592FF8B218ECBD7600C164F8 /* Frameworks */ = { 59 | isa = PBXFrameworksBuildPhase; 60 | buildActionMask = 2147483647; 61 | files = ( 62 | 591D3EDF1CFFAD230059011C /* libprotobuf-lite.a in Frameworks */, 63 | 591D3EE01CFFAD230059011C /* libprotobuf.a in Frameworks */, 64 | 591D3ECF1CFF7FCE0059011C /* ImageIO.framework in Frameworks */, 65 | 591D3ECD1CFF7F9F0059011C /* AssetsLibrary.framework in Frameworks */, 66 | 591D3ECB1CFF7F5F0059011C /* CoreMedia.framework in Frameworks */, 67 | 591D3EC51CFF7F130059011C /* AVFoundation.framework in Frameworks */, 68 | 592FF8BB18ECBD7600C164F8 /* CoreGraphics.framework in Frameworks */, 69 | 592FF8B918ECBD7600C164F8 /* Foundation.framework in Frameworks */, 70 | ); 71 | runOnlyForDeploymentPostprocessing = 0; 72 | }; 73 | /* End PBXFrameworksBuildPhase section */ 74 | 75 | /* Begin PBXGroup section */ 76 | 591D3ED61CFFA83A0059011C /* data */ = { 77 | isa = PBXGroup; 78 | children = ( 79 | 591D3ED71CFFA83A0059011C /* grace_hopper.jpg */, 80 | 314F0E151D4A4D9300069AAE /* frozen_process_no_filter_tiny.pb */, 81 | 591D3ED81CFFA83A0059011C /* yolo_labels.txt */, 82 | ); 83 | path = data; 84 | sourceTree = SOURCE_ROOT; 85 | }; 86 | 592FF8AA18ECBD3600C164F8 = { 87 | isa = PBXGroup; 88 | children = ( 89 | 592FF8BE18ECBD7600C164F8 /* CameraDetect */, 90 | 592FF8B718ECBD7600C164F8 /* Frameworks */, 91 | 592FF8B618ECBD7600C164F8 /* Products */, 92 | ); 93 | sourceTree = ""; 94 | }; 95 | 592FF8B618ECBD7600C164F8 /* Products */ = { 96 | isa = PBXGroup; 97 | children = ( 98 | 592FF8B518ECBD7600C164F8 /* CameraDetect.app */, 99 | ); 100 | name = Products; 101 | sourceTree = ""; 102 | }; 103 | 592FF8B718ECBD7600C164F8 /* Frameworks */ = { 104 | isa = PBXGroup; 105 | children = ( 106 | 591D3EDD1CFFAD230059011C /* libprotobuf-lite.a */, 107 | 591D3EDE1CFFAD230059011C /* libprotobuf.a */, 108 | 591D3ECE1CFF7FCE0059011C /* ImageIO.framework */, 109 | 591D3ECC1CFF7F9F0059011C /* AssetsLibrary.framework */, 110 | 591D3ECA1CFF7F5F0059011C /* CoreMedia.framework */, 111 | 591D3EC81CFF7F500059011C /* CoreImage.framework */, 112 | 591D3EC61CFF7F370059011C /* CoreFoundation.framework */, 113 | 591D3EC41CFF7F120059011C /* AVFoundation.framework */, 114 | 592FF8B818ECBD7600C164F8 /* Foundation.framework */, 115 | 592FF8BA18ECBD7600C164F8 /* CoreGraphics.framework */, 116 | ); 117 | name = Frameworks; 118 | sourceTree = ""; 119 | }; 120 | 592FF8BE18ECBD7600C164F8 /* CameraDetect */ = { 121 | isa = PBXGroup; 122 | children = ( 123 | 591D3ED61CFFA83A0059011C /* data */, 124 | 592FF90718EDD0DA00C164F8 /* en.lproj */, 125 | 592FF92118EE240200C164F8 /* CameraExampleAppDelegate.h */, 126 | 592FF92218EE240200C164F8 /* CameraExampleAppDelegate.m */, 127 | 592FF92318EE240200C164F8 /* CameraExampleViewController.h */, 128 | 592FF92418EE240200C164F8 /* CameraExampleViewController.mm */, 129 | 592FF90318ECCB8300C164F8 /* Info.plist */, 130 | 591D3ED01CFF85C30059011C /* ios_image_load.h */, 131 | 591D3ED11CFF85C30059011C /* ios_image_load.mm */, 132 | 592FF90118ECC66200C164F8 /* main.mm */, 133 | 591D3ED31CFF85FD0059011C /* tensorflow_utils.mm */, 134 | 591D3ED41CFF85FD0059011C /* tensorflow_utils.h */, 135 | ); 136 | name = CameraDetect; 137 | path = SimpleExample; 138 | sourceTree = ""; 139 | }; 140 | 592FF90718EDD0DA00C164F8 /* en.lproj */ = { 141 | isa = PBXGroup; 142 | children = ( 143 | 592FF90A18EDD0DA00C164F8 /* MainStoryboard_iPhone.storyboard */, 144 | ); 145 | path = en.lproj; 146 | sourceTree = SOURCE_ROOT; 147 | }; 148 | /* End PBXGroup section */ 149 | 150 | /* Begin PBXNativeTarget section */ 151 | 592FF8B418ECBD7600C164F8 /* CameraDetect */ = { 152 | isa = PBXNativeTarget; 153 | buildConfigurationList = 592FF8E318ECBD7600C164F8 /* Build configuration list for PBXNativeTarget "CameraDetect" */; 154 | buildPhases = ( 155 | 592FF8B118ECBD7600C164F8 /* Sources */, 156 | 592FF8B218ECBD7600C164F8 /* Frameworks */, 157 | 592FF8B318ECBD7600C164F8 /* Resources */, 158 | ); 159 | buildRules = ( 160 | ); 161 | dependencies = ( 162 | ); 163 | name = CameraDetect; 164 | productName = SimpleExample; 165 | productReference = 592FF8B518ECBD7600C164F8 /* CameraDetect.app */; 166 | productType = "com.apple.product-type.application"; 167 | }; 168 | /* End PBXNativeTarget section */ 169 | 170 | /* Begin PBXProject section */ 171 | 592FF8AB18ECBD3600C164F8 /* Project object */ = { 172 | isa = PBXProject; 173 | attributes = { 174 | LastUpgradeCheck = 0800; 175 | TargetAttributes = { 176 | 592FF8B418ECBD7600C164F8 = { 177 | DevelopmentTeam = 76HL236Y54; 178 | DevelopmentTeamName = "Yang Jian (Personal Team)"; 179 | }; 180 | }; 181 | }; 182 | buildConfigurationList = 592FF8AE18ECBD3600C164F8 /* Build configuration list for PBXProject "CameraDetect" */; 183 | compatibilityVersion = "Xcode 3.2"; 184 | developmentRegion = English; 185 | hasScannedForEncodings = 0; 186 | knownRegions = ( 187 | en, 188 | ); 189 | mainGroup = 592FF8AA18ECBD3600C164F8; 190 | productRefGroup = 592FF8B618ECBD7600C164F8 /* Products */; 191 | projectDirPath = ""; 192 | projectRoot = ""; 193 | targets = ( 194 | 592FF8B418ECBD7600C164F8 /* CameraDetect */, 195 | ); 196 | }; 197 | /* End PBXProject section */ 198 | 199 | /* Begin PBXResourcesBuildPhase section */ 200 | 592FF8B318ECBD7600C164F8 /* Resources */ = { 201 | isa = PBXResourcesBuildPhase; 202 | buildActionMask = 2147483647; 203 | files = ( 204 | 314F0E161D4A4D9300069AAE /* frozen_process_no_filter_tiny.pb in Resources */, 205 | 591D3EDA1CFFA83A0059011C /* grace_hopper.jpg in Resources */, 206 | 592FF90D18EDD0DA00C164F8 /* MainStoryboard_iPhone.storyboard in Resources */, 207 | 591D3EDB1CFFA83A0059011C /* yolo_labels.txt in Resources */, 208 | ); 209 | runOnlyForDeploymentPostprocessing = 0; 210 | }; 211 | /* End PBXResourcesBuildPhase section */ 212 | 213 | /* Begin PBXSourcesBuildPhase section */ 214 | 592FF8B118ECBD7600C164F8 /* Sources */ = { 215 | isa = PBXSourcesBuildPhase; 216 | buildActionMask = 2147483647; 217 | files = ( 218 | 592FF90218ECC66200C164F8 /* main.mm in Sources */, 219 | 591D3ED21CFF85C30059011C /* ios_image_load.mm in Sources */, 220 | 592FF92618EE240200C164F8 /* CameraExampleViewController.mm in Sources */, 221 | 592FF92518EE240200C164F8 /* CameraExampleAppDelegate.m in Sources */, 222 | 591D3ED51CFF85FD0059011C /* tensorflow_utils.mm in Sources */, 223 | ); 224 | runOnlyForDeploymentPostprocessing = 0; 225 | }; 226 | /* End PBXSourcesBuildPhase section */ 227 | 228 | /* Begin PBXVariantGroup section */ 229 | 592FF90A18EDD0DA00C164F8 /* MainStoryboard_iPhone.storyboard */ = { 230 | isa = PBXVariantGroup; 231 | children = ( 232 | 592FF90B18EDD0DA00C164F8 /* en */, 233 | ); 234 | name = MainStoryboard_iPhone.storyboard; 235 | sourceTree = ""; 236 | }; 237 | /* End PBXVariantGroup section */ 238 | 239 | /* Begin XCBuildConfiguration section */ 240 | 592FF8AF18ECBD3600C164F8 /* Debug */ = { 241 | isa = XCBuildConfiguration; 242 | buildSettings = { 243 | CLANG_WARN_BOOL_CONVERSION = YES; 244 | CLANG_WARN_CONSTANT_CONVERSION = YES; 245 | CLANG_WARN_EMPTY_BODY = YES; 246 | CLANG_WARN_ENUM_CONVERSION = YES; 247 | CLANG_WARN_INT_CONVERSION = YES; 248 | CLANG_WARN_UNREACHABLE_CODE = YES; 249 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 250 | ENABLE_STRICT_OBJC_MSGSEND = YES; 251 | ENABLE_TESTABILITY = YES; 252 | GCC_NO_COMMON_BLOCKS = YES; 253 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 254 | GCC_WARN_ABOUT_RETURN_TYPE = YES; 255 | GCC_WARN_UNDECLARED_SELECTOR = YES; 256 | GCC_WARN_UNINITIALIZED_AUTOS = YES; 257 | GCC_WARN_UNUSED_FUNCTION = YES; 258 | GCC_WARN_UNUSED_VARIABLE = YES; 259 | ONLY_ACTIVE_ARCH = YES; 260 | }; 261 | name = Debug; 262 | }; 263 | 592FF8B018ECBD3600C164F8 /* Release */ = { 264 | isa = XCBuildConfiguration; 265 | buildSettings = { 266 | CLANG_WARN_BOOL_CONVERSION = YES; 267 | CLANG_WARN_CONSTANT_CONVERSION = YES; 268 | CLANG_WARN_EMPTY_BODY = YES; 269 | CLANG_WARN_ENUM_CONVERSION = YES; 270 | CLANG_WARN_INT_CONVERSION = YES; 271 | CLANG_WARN_UNREACHABLE_CODE = YES; 272 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 273 | ENABLE_STRICT_OBJC_MSGSEND = YES; 274 | GCC_NO_COMMON_BLOCKS = YES; 275 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 276 | GCC_WARN_ABOUT_RETURN_TYPE = YES; 277 | GCC_WARN_UNDECLARED_SELECTOR = YES; 278 | GCC_WARN_UNINITIALIZED_AUTOS = YES; 279 | GCC_WARN_UNUSED_FUNCTION = YES; 280 | GCC_WARN_UNUSED_VARIABLE = YES; 281 | }; 282 | name = Release; 283 | }; 284 | 592FF8DF18ECBD7600C164F8 /* Debug */ = { 285 | isa = XCBuildConfiguration; 286 | buildSettings = { 287 | ALWAYS_SEARCH_USER_PATHS = NO; 288 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 289 | ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage; 290 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 291 | CLANG_CXX_LIBRARY = "compiler-default"; 292 | CLANG_ENABLE_MODULES = YES; 293 | CLANG_ENABLE_OBJC_ARC = NO; 294 | CLANG_WARN_BOOL_CONVERSION = YES; 295 | CLANG_WARN_CONSTANT_CONVERSION = YES; 296 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 297 | CLANG_WARN_EMPTY_BODY = YES; 298 | CLANG_WARN_ENUM_CONVERSION = YES; 299 | CLANG_WARN_INT_CONVERSION = YES; 300 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 301 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 302 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 303 | COPY_PHASE_STRIP = NO; 304 | ENABLE_BITCODE = NO; 305 | FRAMEWORK_SEARCH_PATHS = "$(inherited)"; 306 | GCC_C_LANGUAGE_STANDARD = gnu99; 307 | GCC_DYNAMIC_NO_PIC = NO; 308 | GCC_OPTIMIZATION_LEVEL = 0; 309 | GCC_PRECOMPILE_PREFIX_HEADER = YES; 310 | GCC_PREFIX_HEADER = ""; 311 | GCC_PREPROCESSOR_DEFINITIONS = ( 312 | "DEBUG=1", 313 | "$(inherited)", 314 | ); 315 | GCC_SYMBOLS_PRIVATE_EXTERN = NO; 316 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 317 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 318 | GCC_WARN_UNDECLARED_SELECTOR = YES; 319 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 320 | GCC_WARN_UNUSED_FUNCTION = YES; 321 | GCC_WARN_UNUSED_VARIABLE = YES; 322 | HEADER_SEARCH_PATHS = ( 323 | "$(SRCROOT)/../../makefile/gen/proto", 324 | "$(SRCROOT)/../../makefile/downloads/eigen-latest", 325 | "$(SRCROOT)/../../makefile/downloads", 326 | "$(SRCROOT)/../../makefile/downloads/protobuf/src/", 327 | "$(SRCROOT)/../../../..", 328 | ); 329 | INFOPLIST_FILE = "$(SRCROOT)/Info.plist"; 330 | IPHONEOS_DEPLOYMENT_TARGET = 9.2; 331 | LIBRARY_SEARCH_PATHS = ( 332 | "$(SRCROOT)/../../makefile/gen/lib", 333 | "$(SRCROOT)/../../makefile/gen/protobuf_ios/lib", 334 | ); 335 | ONLY_ACTIVE_ARCH = NO; 336 | OTHER_LDFLAGS = ( 337 | "-force_load", 338 | "$(SRCROOT)/../../makefile/gen/lib/libtensorflow-core.a", 339 | "-Xlinker", 340 | "-S", 341 | "-Xlinker", 342 | "-x", 343 | "-Xlinker", 344 | "-dead_strip", 345 | ); 346 | PRODUCT_BUNDLE_IDENTIFIER = net.yjmade.CameraDetect; 347 | PRODUCT_NAME = "$(TARGET_NAME)"; 348 | SDKROOT = iphoneos; 349 | TARGETED_DEVICE_FAMILY = "1,2"; 350 | VALID_ARCHS = "arm64 armv7 armv7s"; 351 | WRAPPER_EXTENSION = app; 352 | }; 353 | name = Debug; 354 | }; 355 | 592FF8E018ECBD7600C164F8 /* Release */ = { 356 | isa = XCBuildConfiguration; 357 | buildSettings = { 358 | ALWAYS_SEARCH_USER_PATHS = NO; 359 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 360 | ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage; 361 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 362 | CLANG_CXX_LIBRARY = "compiler-default"; 363 | CLANG_ENABLE_MODULES = YES; 364 | CLANG_ENABLE_OBJC_ARC = NO; 365 | CLANG_WARN_BOOL_CONVERSION = YES; 366 | CLANG_WARN_CONSTANT_CONVERSION = YES; 367 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 368 | CLANG_WARN_EMPTY_BODY = YES; 369 | CLANG_WARN_ENUM_CONVERSION = YES; 370 | CLANG_WARN_INT_CONVERSION = YES; 371 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 372 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 373 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 374 | COPY_PHASE_STRIP = YES; 375 | ENABLE_BITCODE = NO; 376 | ENABLE_NS_ASSERTIONS = NO; 377 | FRAMEWORK_SEARCH_PATHS = "$(inherited)"; 378 | GCC_C_LANGUAGE_STANDARD = gnu99; 379 | GCC_PRECOMPILE_PREFIX_HEADER = YES; 380 | GCC_PREFIX_HEADER = ""; 381 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 382 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 383 | GCC_WARN_UNDECLARED_SELECTOR = YES; 384 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 385 | GCC_WARN_UNUSED_FUNCTION = YES; 386 | GCC_WARN_UNUSED_VARIABLE = YES; 387 | HEADER_SEARCH_PATHS = ( 388 | "$(SRCROOT)/../../makefile/gen/proto", 389 | "$(SRCROOT)/../../makefile/downloads/eigen-latest", 390 | "$(SRCROOT)/../../makefile/downloads", 391 | "$(SRCROOT)/../../makefile/downloads/protobuf/src/", 392 | "$(SRCROOT)/../../../..", 393 | ); 394 | INFOPLIST_FILE = "$(SRCROOT)/Info.plist"; 395 | IPHONEOS_DEPLOYMENT_TARGET = 9.2; 396 | LIBRARY_SEARCH_PATHS = ( 397 | "$(SRCROOT)/../../makefile/gen/lib", 398 | "$(SRCROOT)/../../makefile/gen/protobuf_ios/lib", 399 | ); 400 | ONLY_ACTIVE_ARCH = NO; 401 | OTHER_LDFLAGS = ( 402 | "-force_load", 403 | "$(SRCROOT)/../../makefile/gen/lib/libtensorflow-core.a", 404 | "-Xlinker", 405 | "-S", 406 | "-Xlinker", 407 | "-x", 408 | "-Xlinker", 409 | "-dead_strip", 410 | ); 411 | PRODUCT_BUNDLE_IDENTIFIER = net.yjmade.CameraDetect; 412 | PRODUCT_NAME = "$(TARGET_NAME)"; 413 | SDKROOT = iphoneos; 414 | TARGETED_DEVICE_FAMILY = "1,2"; 415 | VALIDATE_PRODUCT = YES; 416 | VALID_ARCHS = "arm64 armv7 armv7s"; 417 | WRAPPER_EXTENSION = app; 418 | }; 419 | name = Release; 420 | }; 421 | /* End XCBuildConfiguration section */ 422 | 423 | /* Begin XCConfigurationList section */ 424 | 592FF8AE18ECBD3600C164F8 /* Build configuration list for PBXProject "CameraDetect" */ = { 425 | isa = XCConfigurationList; 426 | buildConfigurations = ( 427 | 592FF8AF18ECBD3600C164F8 /* Debug */, 428 | 592FF8B018ECBD3600C164F8 /* Release */, 429 | ); 430 | defaultConfigurationIsVisible = 0; 431 | defaultConfigurationName = Release; 432 | }; 433 | 592FF8E318ECBD7600C164F8 /* Build configuration list for PBXNativeTarget "CameraDetect" */ = { 434 | isa = XCConfigurationList; 435 | buildConfigurations = ( 436 | 592FF8DF18ECBD7600C164F8 /* Debug */, 437 | 592FF8E018ECBD7600C164F8 /* Release */, 438 | ); 439 | defaultConfigurationIsVisible = 0; 440 | defaultConfigurationName = Release; 441 | }; 442 | /* End XCConfigurationList section */ 443 | }; 444 | rootObject = 592FF8AB18ECBD3600C164F8 /* Project object */; 445 | } 446 | -------------------------------------------------------------------------------- /CameraExampleAppDelegate.h: -------------------------------------------------------------------------------- 1 | // Copyright 2015 Google Inc. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | #import 16 | 17 | @interface CameraExampleAppDelegate : UIResponder 18 | 19 | @property(strong, nonatomic) UIWindow *window; 20 | 21 | @end 22 | -------------------------------------------------------------------------------- /CameraExampleAppDelegate.m: -------------------------------------------------------------------------------- 1 | // Copyright 2015 Google Inc. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | #import "CameraExampleAppDelegate.h" 16 | 17 | @implementation CameraExampleAppDelegate 18 | 19 | @synthesize window = _window; 20 | 21 | - (BOOL)application:(UIApplication *)application 22 | didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 23 | [self.window makeKeyAndVisible]; 24 | return YES; 25 | } 26 | 27 | - (void)applicationWillResignActive:(UIApplication *)application { 28 | [[UIApplication sharedApplication] setIdleTimerDisabled:NO]; 29 | } 30 | 31 | - (void)applicationDidEnterBackground:(UIApplication *)application { 32 | } 33 | 34 | - (void)applicationWillEnterForeground:(UIApplication *)application { 35 | } 36 | 37 | - (void)applicationDidBecomeActive:(UIApplication *)application { 38 | [[UIApplication sharedApplication] setIdleTimerDisabled:YES]; 39 | } 40 | 41 | - (void)applicationWillTerminate:(UIApplication *)application { 42 | } 43 | 44 | @end 45 | -------------------------------------------------------------------------------- /CameraExampleViewController.h: -------------------------------------------------------------------------------- 1 | // Copyright 2015 Google Inc. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | #import 16 | #import 17 | 18 | #include 19 | #include "tensorflow/core/public/session.h" 20 | 21 | @interface CameraExampleViewController 22 | : UIViewController { 24 | IBOutlet UIView *previewView; 25 | IBOutlet UISegmentedControl *camerasControl; 26 | AVCaptureVideoPreviewLayer *previewLayer; 27 | AVCaptureVideoDataOutput *videoDataOutput; 28 | dispatch_queue_t videoDataOutputQueue; 29 | AVCaptureStillImageOutput *stillImageOutput; 30 | UIView *flashView; 31 | UIImage *square; 32 | BOOL isUsingFrontFacingCamera; 33 | AVSpeechSynthesizer *synth; 34 | NSMutableDictionary *oldPredictionValues; 35 | NSMutableArray *labelLayers; 36 | AVCaptureSession *session; 37 | std::unique_ptr tf_session; 38 | std::vector labels; 39 | } 40 | @property(retain, nonatomic) CATextLayer *predictionTextLayer; 41 | 42 | - (IBAction)takePicture:(id)sender; 43 | - (IBAction)switchCameras:(id)sender; 44 | 45 | @end 46 | -------------------------------------------------------------------------------- /CameraExampleViewController.mm: -------------------------------------------------------------------------------- 1 | // Copyright 2015 Google Inc. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | #import 16 | #import 17 | #import 18 | #import 19 | #import "CameraExampleViewController.h" 20 | 21 | #include 22 | 23 | #include "tensorflow_utils.h" 24 | 25 | static const NSString *AVCaptureStillImageIsCapturingStillImageContext = 26 | @"AVCaptureStillImageIsCapturingStillImageContext"; 27 | 28 | @interface CameraExampleViewController (InternalMethods) 29 | - (void)setupAVCapture; 30 | - (void)teardownAVCapture; 31 | @end 32 | 33 | @implementation CameraExampleViewController 34 | - (void)setupAVCapture { 35 | NSError *error = nil; 36 | 37 | [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) 38 | { 39 | if (granted == true) 40 | { 41 | //[self presentViewController : picker animated:YES completion:NULL]; 42 | //Do your stuff 43 | NSLog(@"granted"); 44 | } 45 | else 46 | { 47 | UIAlertView *cameraAlert = [[UIAlertView alloc] 48 | initWithTitle:@"Warning" 49 | message:@"No Permission" 50 | delegate:self 51 | cancelButtonTitle:@"OK" 52 | otherButtonTitles:nil,nil]; 53 | [cameraAlert show]; 54 | 55 | NSLog(@"denied"); 56 | } 57 | 58 | }]; 59 | session = [AVCaptureSession new]; 60 | if ([[UIDevice currentDevice] userInterfaceIdiom] == 61 | UIUserInterfaceIdiomPhone) 62 | [session setSessionPreset:AVCaptureSessionPreset640x480]; 63 | else 64 | [session setSessionPreset:AVCaptureSessionPresetPhoto]; 65 | AVCaptureDevice *device = 66 | [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 67 | AVCaptureDeviceInput *deviceInput = 68 | [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; 69 | assert(error == nil); 70 | isUsingFrontFacingCamera = NO; 71 | if ([session canAddInput:deviceInput]) [session addInput:deviceInput]; 72 | 73 | stillImageOutput = [AVCaptureStillImageOutput new]; 74 | [stillImageOutput 75 | addObserver:self 76 | forKeyPath:@"capturingStillImage" 77 | options:NSKeyValueObservingOptionNew 78 | context:(void *)(AVCaptureStillImageIsCapturingStillImageContext)]; 79 | if ([session canAddOutput:stillImageOutput]) 80 | [session addOutput:stillImageOutput]; 81 | 82 | videoDataOutput = [AVCaptureVideoDataOutput new]; 83 | NSDictionary *rgbOutputSettings = [NSDictionary 84 | dictionaryWithObject:[NSNumber numberWithInt:kCMPixelFormat_32BGRA] 85 | forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 86 | [videoDataOutput setVideoSettings:rgbOutputSettings]; 87 | [videoDataOutput setAlwaysDiscardsLateVideoFrames:YES]; 88 | videoDataOutputQueue = 89 | dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL); 90 | [videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue]; 91 | if ([session canAddOutput:videoDataOutput]) 92 | [session addOutput:videoDataOutput]; 93 | AVCaptureConnection *connection=[videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; 94 | connection.videoOrientation = AVCaptureVideoOrientationPortrait; 95 | [connection setEnabled:YES]; 96 | 97 | previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session]; 98 | [previewLayer setBackgroundColor:[[UIColor blackColor] CGColor]]; 99 | [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspect]; 100 | CALayer *rootLayer = [previewView layer]; 101 | [rootLayer setMasksToBounds:YES]; 102 | [previewLayer setFrame:[rootLayer bounds]]; 103 | [rootLayer addSublayer:previewLayer]; 104 | [session startRunning]; 105 | [session release]; 106 | if (error) { 107 | UIAlertView *alertView = [[UIAlertView alloc] 108 | initWithTitle:[NSString stringWithFormat:@"Failed with error %d", 109 | (int)[error code]] 110 | message:[error localizedDescription] 111 | delegate:nil 112 | cancelButtonTitle:@"Dismiss" 113 | otherButtonTitles:nil]; 114 | [alertView show]; 115 | [alertView release]; 116 | [self teardownAVCapture]; 117 | } 118 | } 119 | 120 | - (void)teardownAVCapture { 121 | [videoDataOutput release]; 122 | if (videoDataOutputQueue) dispatch_release(videoDataOutputQueue); 123 | [stillImageOutput removeObserver:self forKeyPath:@"isCapturingStillImage"]; 124 | [stillImageOutput release]; 125 | [previewLayer removeFromSuperlayer]; 126 | [previewLayer release]; 127 | } 128 | 129 | - (void)observeValueForKeyPath:(NSString *)keyPath 130 | ofObject:(id)object 131 | change:(NSDictionary *)change 132 | context:(void *)context { 133 | if (context == AVCaptureStillImageIsCapturingStillImageContext) { 134 | BOOL isCapturingStillImage = 135 | [[change objectForKey:NSKeyValueChangeNewKey] boolValue]; 136 | 137 | if (isCapturingStillImage) { 138 | // do flash bulb like animation 139 | flashView = [[UIView alloc] initWithFrame:[previewView frame]]; 140 | [flashView setBackgroundColor:[UIColor whiteColor]]; 141 | [flashView setAlpha:0.f]; 142 | [[[self view] window] addSubview:flashView]; 143 | 144 | [UIView animateWithDuration:.4f 145 | animations:^{ 146 | [flashView setAlpha:1.f]; 147 | }]; 148 | } else { 149 | [UIView animateWithDuration:.4f 150 | animations:^{ 151 | [flashView setAlpha:0.f]; 152 | } 153 | completion:^(BOOL finished) { 154 | [flashView removeFromSuperview]; 155 | [flashView release]; 156 | flashView = nil; 157 | }]; 158 | } 159 | } 160 | } 161 | 162 | - (AVCaptureVideoOrientation)avOrientationForDeviceOrientation: 163 | (UIDeviceOrientation)deviceOrientation { 164 | AVCaptureVideoOrientation result = 165 | (AVCaptureVideoOrientation)(deviceOrientation); 166 | if (deviceOrientation == UIDeviceOrientationLandscapeLeft) 167 | result = AVCaptureVideoOrientationLandscapeRight; 168 | else if (deviceOrientation == UIDeviceOrientationLandscapeRight) 169 | result = AVCaptureVideoOrientationLandscapeLeft; 170 | // NSLog(@"orientation, %ld,%ld",(long)deviceOrientation,(long)result); 171 | return result; 172 | } 173 | 174 | - (IBAction)takePicture:(id)sender { 175 | if ([session isRunning]) { 176 | [session stopRunning]; 177 | [sender setTitle:@"Continue" forState:UIControlStateNormal]; 178 | 179 | flashView = [[UIView alloc] initWithFrame:[previewView frame]]; 180 | [flashView setBackgroundColor:[UIColor whiteColor]]; 181 | [flashView setAlpha:0.f]; 182 | [[[self view] window] addSubview:flashView]; 183 | 184 | [UIView animateWithDuration:.2f 185 | animations:^{ 186 | [flashView setAlpha:1.f]; 187 | } 188 | completion:^(BOOL finished) { 189 | [UIView animateWithDuration:.2f 190 | animations:^{ 191 | [flashView setAlpha:0.f]; 192 | } 193 | completion:^(BOOL finished) { 194 | [flashView removeFromSuperview]; 195 | [flashView release]; 196 | flashView = nil; 197 | }]; 198 | }]; 199 | 200 | } else { 201 | [session startRunning]; 202 | [sender setTitle:@"Freeze Frame" forState:UIControlStateNormal]; 203 | } 204 | } 205 | 206 | + (CGRect)videoPreviewBoxForGravity:(NSString *)gravity 207 | frameSize:(CGSize)frameSize 208 | apertureSize:(CGSize)apertureSize { 209 | CGFloat apertureRatio = apertureSize.height / apertureSize.width; 210 | CGFloat viewRatio = frameSize.width / frameSize.height; 211 | 212 | CGSize size = CGSizeZero; 213 | if ([gravity isEqualToString:AVLayerVideoGravityResizeAspectFill]) { 214 | if (viewRatio > apertureRatio) { 215 | size.width = frameSize.width; 216 | size.height = 217 | apertureSize.width * (frameSize.width / apertureSize.height); 218 | } else { 219 | size.width = 220 | apertureSize.height * (frameSize.height / apertureSize.width); 221 | size.height = frameSize.height; 222 | } 223 | } else if ([gravity isEqualToString:AVLayerVideoGravityResizeAspect]) { 224 | if (viewRatio > apertureRatio) { 225 | size.width = 226 | apertureSize.height * (frameSize.height / apertureSize.width); 227 | size.height = frameSize.height; 228 | } else { 229 | size.width = frameSize.width; 230 | size.height = 231 | apertureSize.width * (frameSize.width / apertureSize.height); 232 | } 233 | } else if ([gravity isEqualToString:AVLayerVideoGravityResize]) { 234 | size.width = frameSize.width; 235 | size.height = frameSize.height; 236 | } 237 | 238 | CGRect videoBox; 239 | videoBox.size = size; 240 | if (size.width < frameSize.width) 241 | videoBox.origin.x = (frameSize.width - size.width) / 2; 242 | else 243 | videoBox.origin.x = (size.width - frameSize.width) / 2; 244 | 245 | if (size.height < frameSize.height) 246 | videoBox.origin.y = (frameSize.height - size.height) / 2; 247 | else 248 | videoBox.origin.y = (size.height - frameSize.height) / 2; 249 | 250 | return videoBox; 251 | } 252 | 253 | - (void)captureOutput:(AVCaptureOutput *)captureOutput 254 | didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 255 | fromConnection:(AVCaptureConnection *)connection { 256 | CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 257 | [self runCNNOnFrame:pixelBuffer]; 258 | } 259 | 260 | - (void)dealloc { 261 | [self teardownAVCapture]; 262 | [square release]; 263 | [super dealloc]; 264 | } 265 | 266 | // use front/back camera 267 | - (IBAction)switchCameras:(id)sender { 268 | AVCaptureDevicePosition desiredPosition; 269 | if (isUsingFrontFacingCamera) 270 | desiredPosition = AVCaptureDevicePositionBack; 271 | else 272 | desiredPosition = AVCaptureDevicePositionFront; 273 | 274 | for (AVCaptureDevice *d in 275 | [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { 276 | if ([d position] == desiredPosition) { 277 | [[previewLayer session] beginConfiguration]; 278 | AVCaptureDeviceInput *input = 279 | [AVCaptureDeviceInput deviceInputWithDevice:d error:nil]; 280 | for (AVCaptureInput *oldInput in [[previewLayer session] inputs]) { 281 | [[previewLayer session] removeInput:oldInput]; 282 | } 283 | [[previewLayer session] addInput:input]; 284 | [[previewLayer session] commitConfiguration]; 285 | break; 286 | } 287 | } 288 | isUsingFrontFacingCamera = !isUsingFrontFacingCamera; 289 | } 290 | 291 | - (void)didReceiveMemoryWarning { 292 | [super didReceiveMemoryWarning]; 293 | } 294 | 295 | - (void)viewDidUnload { 296 | [super viewDidUnload]; 297 | [oldPredictionValues release]; 298 | } 299 | 300 | - (void)viewWillAppear:(BOOL)animated { 301 | [super viewWillAppear:animated]; 302 | } 303 | 304 | - (void)viewDidAppear:(BOOL)animated { 305 | [super viewDidAppear:animated]; 306 | } 307 | 308 | - (void)viewWillDisappear:(BOOL)animated { 309 | [super viewWillDisappear:animated]; 310 | } 311 | 312 | - (void)viewDidDisappear:(BOOL)animated { 313 | [super viewDidDisappear:animated]; 314 | } 315 | 316 | - (BOOL)shouldAutorotateToInterfaceOrientation: 317 | (UIInterfaceOrientation)interfaceOrientation { 318 | return (interfaceOrientation == UIInterfaceOrientationPortrait); 319 | } 320 | 321 | - (BOOL)prefersStatusBarHidden { 322 | return YES; 323 | } 324 | 325 | // =================================================== 326 | - (void)runCNNOnFrame:(CVPixelBufferRef)pixelBuffer { 327 | assert(pixelBuffer != NULL); 328 | 329 | OSType sourcePixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer); 330 | 331 | int doReverseChannels; 332 | if (kCVPixelFormatType_32ARGB == sourcePixelFormat) { 333 | doReverseChannels = 0; 334 | } else if (kCVPixelFormatType_32BGRA == sourcePixelFormat) { 335 | doReverseChannels = 1; 336 | } else { 337 | assert(false); // Unknown source format 338 | } 339 | 340 | 341 | const int sourceRowBytes = (int)CVPixelBufferGetBytesPerRow(pixelBuffer); 342 | const int image_width = (int)CVPixelBufferGetWidth(pixelBuffer); 343 | const int fullHeight = (int)CVPixelBufferGetHeight(pixelBuffer); 344 | int image_channels = 4; 345 | CVPixelBufferLockBaseAddress(pixelBuffer, 0); 346 | unsigned char *sourceBaseAddr = 347 | (unsigned char *)(CVPixelBufferGetBaseAddress(pixelBuffer)); 348 | int image_height; 349 | unsigned char *sourceStartAddr; 350 | if (fullHeight <= image_width) { 351 | image_height = fullHeight; 352 | sourceStartAddr = sourceBaseAddr; 353 | } else { 354 | image_height = image_width; 355 | const int marginY = ((fullHeight - image_width) / 2); 356 | sourceStartAddr = (sourceBaseAddr + (marginY * sourceRowBytes)); 357 | } 358 | // NSLog(@"load image %dx%d",fullHeight,image_width); 359 | const int wanted_channels = 3; 360 | 361 | tensorflow::Tensor image_tensor( 362 | tensorflow::DT_FLOAT, 363 | tensorflow::TensorShape( 364 | {image_height, image_width, wanted_channels})); 365 | auto image_tensor_mapped = image_tensor.tensor(); 366 | tensorflow::uint8 *in = sourceStartAddr; 367 | float *out = image_tensor_mapped.data(); 368 | for (int y = 0; y < image_height; ++y) { 369 | float *out_row = out + (y * image_width * wanted_channels); 370 | for (int x = 0; x < image_width; ++x) { 371 | tensorflow::uint8 *in_pixel = 372 | in + (y * image_width * image_channels) + (x * image_channels); 373 | float *out_pixel = out_row + (x * wanted_channels); 374 | for (int c = 0; c < wanted_channels; ++c) { 375 | out_pixel[c] = in_pixel[wanted_channels-c-1]; 376 | } 377 | } 378 | } 379 | 380 | if (tf_session.get()) { 381 | std::vector outputs; 382 | NSLog(@"start run"); 383 | tensorflow::Status run_status = tf_session->Run( 384 | {{"input", image_tensor}}, {"boxes","classes_prob","classes_arg"}, {}, &outputs); 385 | NSLog(@"stop run"); 386 | if (!run_status.ok()) { 387 | LOG(ERROR) << "Running model failed:" << run_status; 388 | } else { 389 | tensorflow::Tensor *boxes = &outputs[0]; 390 | tensorflow::Tensor *probs = &outputs[1]; 391 | tensorflow::Tensor *args = &outputs[2]; 392 | auto probs_vec=probs->vec(); 393 | auto args_vec=args->vec(); 394 | auto boxes_matrix=boxes->matrix(); 395 | 396 | NSMutableArray *probs_filtered = [NSMutableArray array]; 397 | NSMutableArray *labels_filtered = [NSMutableArray array]; 398 | NSMutableArray *boxes_filtered = [NSMutableArray array]; 399 | for (int index=0;index0.2f){ 403 | [probs_filtered addObject:[NSNumber numberWithFloat:probsValue]]; 404 | std::string label=labels[(tensorflow::StringPiece::size_type)args_vec(index)]; 405 | [labels_filtered addObject:[NSString stringWithUTF8String:label.c_str()]]; 406 | [boxes_filtered addObject:[NSArray arrayWithObjects: 407 | [NSNumber numberWithFloat:boxes_matrix(index,0)], 408 | [NSNumber numberWithFloat:boxes_matrix(index,1)], 409 | [NSNumber numberWithFloat:boxes_matrix(index,2)], 410 | [NSNumber numberWithFloat:boxes_matrix(index,3)], nil 411 | ]]; 412 | } 413 | } 414 | dispatch_async(dispatch_get_main_queue(), ^(void){ 415 | [self setPredictionWithLabels:labels_filtered 416 | probs:probs_filtered 417 | boxes:boxes_filtered 418 | ]; 419 | }); 420 | NSLog(@"labels %@ %@",labels_filtered,boxes_filtered); 421 | } 422 | } 423 | } 424 | 425 | - (void)viewDidLoad { 426 | [super viewDidLoad]; 427 | [self setupAVCapture]; 428 | square = [[UIImage imageNamed:@"squarePNG"] retain]; 429 | synth = [[AVSpeechSynthesizer alloc] init]; 430 | labelLayers = [[NSMutableArray alloc] init]; 431 | oldPredictionValues = [[NSMutableDictionary alloc] init]; 432 | NSLog(@"Load Model"); 433 | tensorflow::Status load_status = 434 | LoadModel(@"frozen_process_no_filter_tiny", @"pb", &tf_session); 435 | if (!load_status.ok()) { 436 | LOG(FATAL) << "Couldn't load model: " << load_status; 437 | } 438 | 439 | tensorflow::Status labels_status = 440 | LoadLabels(@"yolo_labels", @"txt", &labels); 441 | if (!labels_status.ok()) { 442 | LOG(FATAL) << "Couldn't load labels: " << labels_status; 443 | } 444 | 445 | 446 | } 447 | 448 | -(void)setPredictionWithLabels:(NSArray *)labels_filtered 449 | probs:(NSArray *)probs_filtered 450 | boxes:(NSArray *)boxes_filtered{ 451 | 452 | [self removeAllLabelLayers]; 453 | CGRect mainScreenBounds = [[UIScreen mainScreen] bounds]; 454 | 455 | for (int i=0;i<[labels_filtered count];i++){ 456 | NSString *label=(NSString *)labels_filtered[i]; 457 | [self addLabelLayerWithText:[NSString stringWithFormat:@"%@ %.2f",label,[probs_filtered[i] floatValue]] 458 | originX:[boxes_filtered[i][0] floatValue]*mainScreenBounds.size.width+mainScreenBounds.origin.x 459 | originY:[boxes_filtered[i][1] floatValue]*mainScreenBounds.size.height+mainScreenBounds.origin.y 460 | width:[boxes_filtered[i][2] floatValue]*mainScreenBounds.size.width 461 | height:[boxes_filtered[i][3] floatValue]*mainScreenBounds.size.height 462 | alignment:kCAAlignmentLeft]; 463 | } 464 | } 465 | 466 | - (void)removeAllLabelLayers { 467 | for (CATextLayer *layer in labelLayers) { 468 | [layer removeFromSuperlayer]; 469 | } 470 | [labelLayers removeAllObjects]; 471 | } 472 | 473 | - (void)addLabelLayerWithText:(NSString *)text 474 | originX:(float)originX 475 | originY:(float)originY 476 | width:(float)width 477 | height:(float)height 478 | alignment:(NSString *)alignment { 479 | 480 | // NSLog(@"x = %.f,y = %.f, width = %.f, height = %.f",mainScreenBounds.origin.x,mainScreenBounds.origin.y,mainScreenBounds.size.width,mainScreenBounds.size.height); 481 | NSString *const font = @"Menlo-Regular"; 482 | const float fontSize = 8.0f; 483 | 484 | const float marginSizeX = 5.0f; 485 | const float marginSizeY = 2.0f; 486 | 487 | const float realOriginX=originX-(width/2); 488 | const float realOriginY=originY-(height/2); 489 | 490 | 491 | const CGRect backgroundBounds = CGRectMake( 492 | ceilf(realOriginX), 493 | ceilf(realOriginY), 494 | ceilf(width), 495 | ceilf(height) 496 | ); 497 | NSLog(@"box x:%f y:%f width:%f height:%f",realOriginX,realOriginY,width,height); 498 | 499 | const CGRect textBounds = 500 | CGRectMake((realOriginX + marginSizeX), (realOriginY + marginSizeY), 501 | (width - (marginSizeX * 2)), (height - (marginSizeY * 2))); 502 | 503 | CATextLayer *background = [CATextLayer layer]; 504 | [background setBackgroundColor:[UIColor blackColor].CGColor]; 505 | [background setOpacity:0.1f]; 506 | [background setFrame:backgroundBounds]; 507 | background.cornerRadius = 5.0f; 508 | 509 | [[self.view layer] addSublayer:background]; 510 | [labelLayers addObject:background]; 511 | 512 | CATextLayer *layer = [CATextLayer layer]; 513 | [layer setForegroundColor:[UIColor whiteColor].CGColor]; 514 | [layer setFrame:textBounds]; 515 | [layer setAlignmentMode:alignment]; 516 | [layer setWrapped:YES]; 517 | [layer setFont:font]; 518 | [layer setFontSize:fontSize]; 519 | layer.contentsScale = [[UIScreen mainScreen] scale]; 520 | [layer setString:text]; 521 | 522 | [[self.view layer] addSublayer:layer]; 523 | [labelLayers addObject:layer]; 524 | } 525 | 526 | @end 527 | -------------------------------------------------------------------------------- /Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleDisplayName 8 | ${PRODUCT_NAME} 9 | CFBundleExecutable 10 | ${EXECUTABLE_NAME} 11 | CFBundleIdentifier 12 | $(PRODUCT_BUNDLE_IDENTIFIER) 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | ${PRODUCT_NAME} 17 | CFBundlePackageType 18 | APPL 19 | CFBundleShortVersionString 20 | 1.0 21 | CFBundleSignature 22 | ???? 23 | CFBundleVersion 24 | 1.0 25 | LSRequiresIPhoneOS 26 | 27 | UIMainStoryboardFile 28 | MainStoryboard_iPhone 29 | UIStatusBarHidden 30 | 31 | NSCameraUsageDescription 32 | 33 | UISupportedInterfaceOrientations 34 | 35 | UIInterfaceOrientationPortrait 36 | 37 | UISupportedInterfaceOrientations~ipad 38 | 39 | UIInterfaceOrientationPortrait 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Realtime iOS Object Detection with TensorFlow 2 | 3 | This Repository contains all the file to build a YOLO based object detection app except the tensorflow frozon model file, you can download the model file [here](https://drive.google.com/file/d/0B0wuoauR_vfzdVhFVkpoZklUWTg/view?usp=sharing). 4 | 5 | This app is derived from [Google's TensorFlow iOS Camera Example](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/contrib/ios_examples/camera). Thanks to the [YOLO_tensorflow](https://github.com/gliese581gg/YOLO_tensorflow) project by gliese581gg, I took the tiny model implementation and do some like modification, mainly about merge as much as possible operation to the graph of tensorflow, include the proprocessing (resize the image and normalize each pixel) and result interpreting. Then froze the checkpoint data from glese581gg with the GraphDef to the pb file, and use it in the app. 6 | 7 | ## Build 8 | - follow the [instruction](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/contrib/makefile) of the tensorflow buildin [ios_example](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/examples/ios), to compile the protobuf and tensorflow core static library 9 | 10 | - Clone this repository under the `tensorflow/contrib/ios_example` at same level of the offical camera project 11 | 12 | - download the [graph file](https://drive.google.com/file/d/0B0wuoauR_vfzdVhFVkpoZklUWTg/view?usp=sharing) and decompress it to data folder 13 | 14 | - now you can open the Xcode project file and compile, run it on your real device. 15 | 16 | ##Disclame 17 | 18 | Despite I have already use YOLO tiny model, at runtime it still require around 850M memory, so only iPhone 6s or later which get no smaller than 2GB of memory can make it running, otherwise it will be killed immediately when loading the model. 19 | 20 | 21 | ##Froze the model by yourself 22 | - clone my fork of [YOLO_tensorflow](https://github.com/yjmade/YOLO_tensorflow), download the [weights checkpoint file provide by gliese581gg](https://drive.google.com/file/d/0B2JbaJSrWLpza0FtQlc3ejhMTTA/view?usp=sharing) and put it into the weights folder 23 | 24 | - in ipython 25 | 26 | ```python 27 | from YOLO_tiny_tf import YOLO_TF 28 | 29 | yolo=YOLO_TF() 30 | with open("weights/tiny_model.pb","wb") as f: 31 | f.write(yolo.sess.graph_def.SerializeToString()) 32 | ``` 33 | 34 | - follow this [tutoral](https://www.tensorflow.org/versions/r0.9/how_tos/tool_developers/index.html#freezing) to build the tensorflow frozen tools 35 | 36 | ```bash 37 | python -m tensorflow.python.tools.freeze_graph \ 38 | --input_graph=tiny_model.pb\ 39 | --input_checkpoint=YOLO_tiny.ckpt\ 40 | --output_graph=frozen_tiny.pb\ 41 | --output_node_names=classes_prob,classes_arg,boxes\ --input_binary=1 42 | ``` 43 | 44 | the output of frozen_tiny.pb then you can use it in the app. 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /data/grace_hopper.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yjmade/ios_camera_object_detection/133f3233238d43c29c86f0e10ab3e92f1c3e7c9c/data/grace_hopper.jpg -------------------------------------------------------------------------------- /data/yolo_labels.txt: -------------------------------------------------------------------------------- 1 | aeroplane 2 | bicycle 3 | bird 4 | boat 5 | bottle 6 | bus 7 | car 8 | cat 9 | chair 10 | cow 11 | diningtable 12 | dog 13 | horse 14 | motorbike 15 | person 16 | pottedplant 17 | sheep 18 | sofa 19 | train 20 | tvmonitor -------------------------------------------------------------------------------- /en.lproj/MainStoryboard_iPhone.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | -------------------------------------------------------------------------------- /ios_image_load.h: -------------------------------------------------------------------------------- 1 | // Copyright 2015 Google Inc. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | #ifndef TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_IMAGE_LOAD_H_ 16 | #define TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_IMAGE_LOAD_H_ 17 | 18 | #include 19 | 20 | #include "tensorflow/core/framework/types.h" 21 | 22 | std::vector LoadImageFromFile(const char* file_name, 23 | int* out_width, 24 | int* out_height, 25 | int* out_channels); 26 | 27 | #endif // TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_IMAGE_LOAD_H_ 28 | -------------------------------------------------------------------------------- /ios_image_load.mm: -------------------------------------------------------------------------------- 1 | // Copyright 2015 Google Inc. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | #include "ios_image_load.h" 16 | 17 | #include 18 | #include 19 | #include 20 | #include 21 | 22 | #import 23 | #import 24 | 25 | using tensorflow::uint8; 26 | 27 | std::vector LoadImageFromFile(const char* file_name, 28 | int* out_width, int* out_height, 29 | int* out_channels) { 30 | FILE* file_handle = fopen(file_name, "rb"); 31 | fseek(file_handle, 0, SEEK_END); 32 | const size_t bytes_in_file = ftell(file_handle); 33 | fseek(file_handle, 0, SEEK_SET); 34 | std::vector file_data(bytes_in_file); 35 | fread(file_data.data(), 1, bytes_in_file, file_handle); 36 | fclose(file_handle); 37 | CFDataRef file_data_ref = CFDataCreateWithBytesNoCopy(NULL, file_data.data(), 38 | bytes_in_file, 39 | kCFAllocatorNull); 40 | CGDataProviderRef image_provider = 41 | CGDataProviderCreateWithCFData(file_data_ref); 42 | 43 | const char* suffix = strrchr(file_name, '.'); 44 | if (!suffix || suffix == file_name) { 45 | suffix = ""; 46 | } 47 | CGImageRef image; 48 | if (strcasecmp(suffix, ".png") == 0) { 49 | image = CGImageCreateWithPNGDataProvider(image_provider, NULL, true, 50 | kCGRenderingIntentDefault); 51 | } else if ((strcasecmp(suffix, ".jpg") == 0) || 52 | (strcasecmp(suffix, ".jpeg") == 0)) { 53 | image = CGImageCreateWithJPEGDataProvider(image_provider, NULL, true, 54 | kCGRenderingIntentDefault); 55 | } else { 56 | CFRelease(image_provider); 57 | CFRelease(file_data_ref); 58 | fprintf(stderr, "Unknown suffix for file '%s'\n", file_name); 59 | *out_width = 0; 60 | *out_height = 0; 61 | *out_channels = 0; 62 | return std::vector(); 63 | } 64 | 65 | const int width = (int)CGImageGetWidth(image); 66 | const int height = (int)CGImageGetHeight(image); 67 | const int channels = 4; 68 | CGColorSpaceRef color_space = CGColorSpaceCreateDeviceRGB(); 69 | const int bytes_per_row = (width * channels); 70 | const int bytes_in_image = (bytes_per_row * height); 71 | std::vector result(bytes_in_image); 72 | const int bits_per_component = 8; 73 | CGContextRef context = CGBitmapContextCreate(result.data(), width, height, 74 | bits_per_component, bytes_per_row, color_space, 75 | kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big); 76 | CGColorSpaceRelease(color_space); 77 | CGContextDrawImage(context, CGRectMake(0, 0, width, height), image); 78 | CGContextRelease(context); 79 | CFRelease(image); 80 | CFRelease(image_provider); 81 | CFRelease(file_data_ref); 82 | 83 | *out_width = width; 84 | *out_height = height; 85 | *out_channels = channels; 86 | return result; 87 | } 88 | -------------------------------------------------------------------------------- /main.mm: -------------------------------------------------------------------------------- 1 | // Copyright 2015 Google Inc. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | #import 16 | 17 | #import "CameraExampleAppDelegate.h" 18 | 19 | int main(int argc, char *argv[]) { 20 | int retVal = 0; 21 | 22 | @autoreleasepool { 23 | retVal = UIApplicationMain( 24 | argc, argv, nil, NSStringFromClass([CameraExampleAppDelegate class])); 25 | } 26 | return retVal; 27 | } 28 | -------------------------------------------------------------------------------- /squarePNG.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yjmade/ios_camera_object_detection/133f3233238d43c29c86f0e10ab3e92f1c3e7c9c/squarePNG.png -------------------------------------------------------------------------------- /tensorflow_utils.h: -------------------------------------------------------------------------------- 1 | // Copyright 2015 Google Inc. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | #ifndef TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_TENSORFLOW_UTILS_H_ 16 | #define TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_TENSORFLOW_UTILS_H_ 17 | 18 | #include 19 | #include 20 | 21 | #include "tensorflow/core/public/session.h" 22 | #include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" 23 | 24 | tensorflow::Status LoadModel(NSString* file_name, NSString* file_type, 25 | std::unique_ptr* session); 26 | tensorflow::Status LoadLabels(NSString* file_name, NSString* file_type, 27 | std::vector* label_strings); 28 | void GetTopN(const Eigen::TensorMap, 29 | Eigen::Aligned>& prediction, const int num_results, 30 | const float threshold, 31 | std::vector >* top_results); 32 | 33 | #endif // TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_TENSORFLOW_UTILS_H_ 34 | -------------------------------------------------------------------------------- /tensorflow_utils.mm: -------------------------------------------------------------------------------- 1 | // Copyright 2015 Google Inc. All rights reserved. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | #import 16 | 17 | #include "tensorflow_utils.h" 18 | 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | 26 | #include "google/protobuf/io/coded_stream.h" 27 | #include "google/protobuf/io/zero_copy_stream_impl.h" 28 | #include "google/protobuf/io/zero_copy_stream_impl_lite.h" 29 | #include "google/protobuf/message_lite.h" 30 | #include "tensorflow/core/framework/tensor.h" 31 | #include "tensorflow/core/framework/types.pb.h" 32 | #include "tensorflow/core/platform/env.h" 33 | #include "tensorflow/core/platform/logging.h" 34 | #include "tensorflow/core/platform/mutex.h" 35 | #include "tensorflow/core/platform/types.h" 36 | #include "tensorflow/core/public/session.h" 37 | 38 | 39 | namespace { 40 | class IfstreamInputStream : public ::google::protobuf::io::CopyingInputStream { 41 | public: 42 | explicit IfstreamInputStream(const std::string& file_name) 43 | : ifs_(file_name.c_str(), std::ios::in | std::ios::binary) {} 44 | ~IfstreamInputStream() { ifs_.close(); } 45 | 46 | int Read(void* buffer, int size) { 47 | if (!ifs_) { 48 | return -1; 49 | } 50 | ifs_.read(static_cast(buffer), size); 51 | return ifs_.gcount(); 52 | } 53 | 54 | private: 55 | std::ifstream ifs_; 56 | }; 57 | } // namespace 58 | 59 | // Returns the top N confidence values over threshold in the provided vector, 60 | // sorted by confidence in descending order. 61 | void GetTopN(const Eigen::TensorMap, 62 | Eigen::Aligned>& prediction, const int num_results, 63 | const float threshold, 64 | std::vector >* top_results) { 65 | // Will contain top N results in ascending order. 66 | std::priority_queue, 67 | std::vector >, 68 | std::greater > > top_result_pq; 69 | 70 | const int count = prediction.size(); 71 | for (int i = 0; i < count; ++i) { 72 | const float value = prediction(i); 73 | 74 | // Only add it if it beats the threshold and has a chance at being in 75 | // the top N. 76 | if (value < threshold) { 77 | continue; 78 | } 79 | 80 | top_result_pq.push(std::pair(value, i)); 81 | 82 | // If at capacity, kick the smallest value out. 83 | if (top_result_pq.size() > num_results) { 84 | top_result_pq.pop(); 85 | } 86 | } 87 | 88 | // Copy to output vector and reverse into descending order. 89 | while (!top_result_pq.empty()) { 90 | top_results->push_back(top_result_pq.top()); 91 | top_result_pq.pop(); 92 | } 93 | std::reverse(top_results->begin(), top_results->end()); 94 | } 95 | 96 | 97 | bool PortableReadFileToProto(const std::string& file_name, 98 | ::google::protobuf::MessageLite* proto) { 99 | ::google::protobuf::io::CopyingInputStreamAdaptor stream( 100 | new IfstreamInputStream(file_name)); 101 | stream.SetOwnsCopyingStream(true); 102 | ::google::protobuf::io::CodedInputStream coded_stream(&stream); 103 | // Total bytes hard limit / warning limit are set to 1GB and 512MB 104 | // respectively. 105 | coded_stream.SetTotalBytesLimit(1024LL << 20, 512LL << 20); 106 | return proto->ParseFromCodedStream(&coded_stream); 107 | } 108 | 109 | NSString* FilePathForResourceName(NSString* name, NSString* extension) { 110 | NSString* file_path = [[NSBundle mainBundle] pathForResource:name ofType:extension]; 111 | if (file_path == NULL) { 112 | LOG(FATAL) << "Couldn't find '" << [name UTF8String] << "." 113 | << [extension UTF8String] << "' in bundle."; 114 | return nullptr; 115 | } 116 | return file_path; 117 | } 118 | 119 | tensorflow::Status LoadModel(NSString* file_name, NSString* file_type, 120 | std::unique_ptr* session) { 121 | tensorflow::SessionOptions options; 122 | 123 | tensorflow::Session* session_pointer = nullptr; 124 | tensorflow::Status session_status = tensorflow::NewSession(options, &session_pointer); 125 | if (!session_status.ok()) { 126 | LOG(ERROR) << "Could not create Tensorflow Session: " << session_status; 127 | return session_status; 128 | } 129 | session->reset(session_pointer); 130 | LOG(INFO) << "Session created."; 131 | 132 | tensorflow::GraphDef tensorflow_graph; 133 | LOG(INFO) << "Graph created."; 134 | 135 | NSString* model_path = FilePathForResourceName(file_name, file_type); 136 | if (!model_path) { 137 | LOG(ERROR) << "Failed to find model proto at" << [file_name UTF8String] 138 | << [file_type UTF8String]; 139 | return tensorflow::errors::NotFound([file_name UTF8String], 140 | [file_type UTF8String]); 141 | } 142 | const bool read_proto_succeeded = PortableReadFileToProto( 143 | [model_path UTF8String], &tensorflow_graph); 144 | if (!read_proto_succeeded) { 145 | LOG(ERROR) << "Failed to load model proto from" << [model_path UTF8String]; 146 | return tensorflow::errors::NotFound([model_path UTF8String]); 147 | } 148 | 149 | LOG(INFO) << "Creating session."; 150 | tensorflow::Status create_status = (*session)->Create(tensorflow_graph); 151 | if (!create_status.ok()) { 152 | LOG(ERROR) << "Could not create Tensorflow Graph: " << create_status; 153 | return create_status; 154 | } 155 | 156 | return tensorflow::Status::OK(); 157 | } 158 | 159 | tensorflow::Status LoadLabels(NSString* file_name, NSString* file_type, 160 | std::vector* label_strings) { 161 | // Read the label list 162 | NSString* labels_path = FilePathForResourceName(file_name, file_type); 163 | if (!labels_path) { 164 | LOG(ERROR) << "Failed to find model proto at" << [file_name UTF8String] 165 | << [file_type UTF8String]; 166 | return tensorflow::errors::NotFound([file_name UTF8String], 167 | [file_type UTF8String]); 168 | } 169 | std::ifstream t; 170 | t.open([labels_path UTF8String]); 171 | std::string line; 172 | while(t){ 173 | std::getline(t, line); 174 | label_strings->push_back(line); 175 | } 176 | t.close(); 177 | return tensorflow::Status::OK(); 178 | } --------------------------------------------------------------------------------